chore: initial commit - CloudSearch v0.0.2
This commit is contained in:
4200
packages/backend/package-lock.json
generated
Executable file
4200
packages/backend/package-lock.json
generated
Executable file
File diff suppressed because it is too large
Load Diff
43
packages/backend/package.json
Executable file
43
packages/backend/package.json
Executable file
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"name": "cloudsearch-backend",
|
||||
"version": "0.0.2",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "tsx watch src/main.ts",
|
||||
"build": "tsc",
|
||||
"start": "node dist/main.js",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"dependencies": {
|
||||
"bcryptjs": "^2.4.3",
|
||||
"better-sqlite3": "^11.0.0",
|
||||
"cors": "^2.8.5",
|
||||
"express": "^4.21.0",
|
||||
"express-rate-limit": "^7.4.0",
|
||||
"helmet": "^8.0.0",
|
||||
"https-proxy-agent": "^9.0.0",
|
||||
"socks-proxy-agent": "^9.0.0",
|
||||
"ioredis": "^5.4.0",
|
||||
"jsqr": "^1.4.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"morgan": "^1.10.0",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"playwright": "^1.52.0",
|
||||
"sharp": "^0.33.0",
|
||||
"uuid": "^10.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bcryptjs": "^2.4.6",
|
||||
"@types/better-sqlite3": "^7.6.11",
|
||||
"@types/cors": "^2.8.17",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/jsonwebtoken": "^9.0.6",
|
||||
"@types/morgan": "^1.9.9",
|
||||
"@types/multer": "^1.4.12",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"tsx": "^4.19.0",
|
||||
"typescript": "^5.6.0",
|
||||
"vitest": "^2.1.0"
|
||||
}
|
||||
}
|
||||
76
packages/backend/src/admin/auth.service.ts
Executable file
76
packages/backend/src/admin/auth.service.ts
Executable file
@@ -0,0 +1,76 @@
|
||||
import jwt from 'jsonwebtoken';
|
||||
import bcrypt from 'bcryptjs';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import config from '../config';
|
||||
import { getDb } from '../database/database';
|
||||
|
||||
export interface AuthPayload {
|
||||
id: number;
|
||||
username: string;
|
||||
}
|
||||
|
||||
export function login(username: string, password: string): string | null {
|
||||
const db = getDb();
|
||||
const row = db.prepare('SELECT id, username, password_hash FROM admins WHERE username = ?').get(username) as any;
|
||||
|
||||
if (!row) return null;
|
||||
|
||||
const valid = bcrypt.compareSync(password, row.password_hash);
|
||||
if (!valid) return null;
|
||||
|
||||
// Update last login
|
||||
db.prepare('UPDATE admins SET last_login = datetime(\'now\') WHERE id = ?').run(row.id);
|
||||
|
||||
// Generate JWT
|
||||
const payload: AuthPayload = { id: row.id, username: row.username };
|
||||
const token = jwt.sign(payload, config.jwtSecret, { expiresIn: '24h' });
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
export function verifyToken(token: string): AuthPayload | null {
|
||||
try {
|
||||
const decoded = jwt.verify(token, config.jwtSecret) as AuthPayload;
|
||||
return decoded;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function authMiddleware(req: Request, res: Response, next: NextFunction): void {
|
||||
const authHeader = req.headers.authorization;
|
||||
|
||||
if (!authHeader || !authHeader.startsWith('Bearer ')) {
|
||||
res.status(401).json({ error: 'Missing or invalid authorization header', code: 401 });
|
||||
return;
|
||||
}
|
||||
|
||||
const token = authHeader.split(' ')[1];
|
||||
const payload = verifyToken(token);
|
||||
|
||||
if (!payload) {
|
||||
res.status(401).json({ error: 'Invalid or expired token', code: 401 });
|
||||
return;
|
||||
}
|
||||
|
||||
(req as any).user = payload;
|
||||
next();
|
||||
}
|
||||
|
||||
export function changePassword(username: string, oldPassword: string, newPassword: string): { success: boolean; message: string } {
|
||||
const db = getDb();
|
||||
const row = db.prepare('SELECT id, password_hash FROM admins WHERE username = ?').get(username) as any;
|
||||
if (!row) {
|
||||
return { success: false, message: '用户不存在' };
|
||||
}
|
||||
|
||||
const valid = bcrypt.compareSync(oldPassword, row.password_hash);
|
||||
if (!valid) {
|
||||
return { success: false, message: '原密码错误' };
|
||||
}
|
||||
|
||||
const salt = bcrypt.genSaltSync(10);
|
||||
const hash = bcrypt.hashSync(newPassword, salt);
|
||||
db.prepare("UPDATE admins SET password_hash = ? WHERE id = ?").run(hash, row.id);
|
||||
return { success: true, message: '密码修改成功' };
|
||||
}
|
||||
161
packages/backend/src/admin/stats.service.ts
Executable file
161
packages/backend/src/admin/stats.service.ts
Executable file
@@ -0,0 +1,161 @@
|
||||
import { getDb } from '../database/database';
|
||||
import { formatLocalDate } from '../utils/time';
|
||||
|
||||
export interface AdminStats {
|
||||
todaySearches: number;
|
||||
todaySaves: number;
|
||||
monthSearches: number;
|
||||
monthSaves: number;
|
||||
totalSearches: number;
|
||||
totalSaves: number;
|
||||
hotKeywords: Array<{ keyword: string; count: number }>;
|
||||
trendTrend: Array<{ date: string; searches: number; saves: number; searchDelta: number; saveDelta: number }>;
|
||||
cloudUsage: Array<{
|
||||
cloudType: string;
|
||||
nickname: string;
|
||||
storageUsed: string;
|
||||
storageTotal: string;
|
||||
isActive: boolean;
|
||||
}>;
|
||||
topIps: Array<{ ip: string; ip_location: string | null; count: number }>;
|
||||
provinceRankings: Array<{ province: string; count: number }>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get today's date string in the configured timezone (e.g. "2026-05-04").
|
||||
* Delegates to shared formatLocalDate() in utils/time.ts.
|
||||
*/
|
||||
function todayLocalDate(): string {
|
||||
return formatLocalDate();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the first day of the current month in the configured timezone.
|
||||
*/
|
||||
function monthStartLocalDate(): string {
|
||||
return todayLocalDate().slice(0, 7) + '-01';
|
||||
}
|
||||
|
||||
export function getStats(trendDays: number = 7): AdminStats {
|
||||
const db = getDb();
|
||||
|
||||
// Use local timezone date — NOT UTC via toISOString()
|
||||
const today = todayLocalDate();
|
||||
const monthStart = monthStartLocalDate();
|
||||
|
||||
// IMPORTANT: created_at is stored as "YYYY-MM-DDTHH:mm:ss+08:00" (localTimestamp)
|
||||
// SQLite's date() function would interpret the +08:00 timezone offset and
|
||||
// convert to UTC, giving wrong date. Instead, use SUBSTR to get first 10 chars.
|
||||
const todaySearchesRow = db.prepare(
|
||||
"SELECT COUNT(*) as count FROM search_stats WHERE SUBSTR(created_at, 1, 10) = ?"
|
||||
).get(today) as any;
|
||||
|
||||
const todaySavesRow = db.prepare(
|
||||
"SELECT COUNT(*) as count FROM save_records WHERE SUBSTR(created_at, 1, 10) = ?"
|
||||
).get(today) as any;
|
||||
|
||||
const monthSearchesRow = db.prepare(
|
||||
"SELECT COUNT(*) as count FROM search_stats WHERE SUBSTR(created_at, 1, 10) >= ?"
|
||||
).get(monthStart) as any;
|
||||
|
||||
const monthSavesRow = db.prepare(
|
||||
"SELECT COUNT(*) as count FROM save_records WHERE SUBSTR(created_at, 1, 10) >= ?"
|
||||
).get(monthStart) as any;
|
||||
|
||||
// Total searches
|
||||
const totalSearchesRow = db.prepare(
|
||||
"SELECT COUNT(*) as count FROM search_stats"
|
||||
).get() as any;
|
||||
|
||||
// Total saves
|
||||
const totalSavesRow = db.prepare(
|
||||
"SELECT COUNT(*) as count FROM save_records"
|
||||
).get() as any;
|
||||
|
||||
// Hot keywords
|
||||
const hotKeywords = db.prepare(
|
||||
'SELECT keyword, search_count as count FROM hot_keywords ORDER BY search_count DESC LIMIT 20'
|
||||
).all() as Array<{ keyword: string; count: number }>;
|
||||
|
||||
// Trend data (configurable days, default 7)
|
||||
const trendLen = Math.min(Math.max(trendDays, 1), 90);
|
||||
const trendTrend: Array<{ date: string; searches: number; saves: number; searchDelta: number; saveDelta: number }> = [];
|
||||
for (let i = trendLen - 1; i >= 0; i--) {
|
||||
const d = new Date();
|
||||
const target = new Date(d.getTime() - i * 86400000);
|
||||
const dateStr = formatLocalDate(target);
|
||||
const searchRow = db.prepare(
|
||||
"SELECT COUNT(*) as count FROM search_stats WHERE SUBSTR(created_at, 1, 10) = ?"
|
||||
).get(dateStr) as any;
|
||||
const saveRow = db.prepare(
|
||||
"SELECT COUNT(*) as count FROM save_records WHERE SUBSTR(created_at, 1, 10) = ?"
|
||||
).get(dateStr) as any;
|
||||
trendTrend.push({
|
||||
date: dateStr,
|
||||
searches: searchRow?.count || 0,
|
||||
saves: saveRow?.count || 0,
|
||||
searchDelta: 0,
|
||||
saveDelta: 0,
|
||||
});
|
||||
}
|
||||
// Compute day-over-day delta (absolute change from previous day)
|
||||
for (let i = trendTrend.length - 1; i > 0; i--) {
|
||||
const prev = trendTrend[i - 1];
|
||||
const curr = trendTrend[i];
|
||||
curr.searchDelta = curr.searches - prev.searches;
|
||||
curr.saveDelta = curr.saves - prev.saves;
|
||||
}
|
||||
|
||||
// Cloud usage
|
||||
const cloudUsage = db.prepare(
|
||||
'SELECT cloud_type as cloudType, nickname, storage_used as storageUsed, storage_total as storageTotal, is_active as isActive FROM cloud_configs ORDER BY id ASC'
|
||||
).all() as Array<{
|
||||
cloudType: string;
|
||||
nickname: string;
|
||||
storageUsed: string;
|
||||
storageTotal: string;
|
||||
isActive: boolean;
|
||||
}>;
|
||||
|
||||
// Top IPs from save_records — correctly count total per IP, then get latest location
|
||||
const topIps = db.prepare(
|
||||
`SELECT ip_address as ip, COUNT(*) as count,
|
||||
(SELECT ip_location FROM save_records s2
|
||||
WHERE s2.ip_address = s1.ip_address AND s2.ip_location IS NOT NULL AND s2.ip_location != ''
|
||||
ORDER BY s2.created_at DESC LIMIT 1) as ip_location
|
||||
FROM save_records s1
|
||||
WHERE ip_address IS NOT NULL AND ip_address != ''
|
||||
GROUP BY ip_address
|
||||
ORDER BY count DESC LIMIT 10`
|
||||
).all() as Array<{ ip: string; ip_location: string | null; count: number }>;
|
||||
|
||||
// Province rankings — extract province from ip_location (first segment)
|
||||
let provinceRankings: Array<{ province: string; count: number }> = [];
|
||||
const locRows = db.prepare(
|
||||
`SELECT ip_location FROM save_records WHERE ip_location IS NOT NULL AND ip_location != ''`
|
||||
).all() as Array<{ ip_location: string }>;
|
||||
const provMap = new Map<string, number>();
|
||||
for (const row of locRows) {
|
||||
const parts = row.ip_location.trim().split(/\s+/);
|
||||
const province = parts[0] || '未知';
|
||||
provMap.set(province, (provMap.get(province) || 0) + 1);
|
||||
}
|
||||
provinceRankings = Array.from(provMap.entries())
|
||||
.map(([province, count]) => ({ province, count }))
|
||||
.sort((a, b) => b.count - a.count)
|
||||
.slice(0, 15);
|
||||
|
||||
return {
|
||||
todaySearches: (todaySearchesRow as any)?.count || 0,
|
||||
todaySaves: (todaySavesRow as any)?.count || 0,
|
||||
monthSearches: (monthSearchesRow as any)?.count || 0,
|
||||
monthSaves: (monthSavesRow as any)?.count || 0,
|
||||
totalSearches: (totalSearchesRow as any)?.count || 0,
|
||||
totalSaves: (totalSavesRow as any)?.count || 0,
|
||||
hotKeywords,
|
||||
trendTrend,
|
||||
cloudUsage,
|
||||
topIps,
|
||||
provinceRankings,
|
||||
};
|
||||
}
|
||||
40
packages/backend/src/admin/system-config.service.ts
Executable file
40
packages/backend/src/admin/system-config.service.ts
Executable file
@@ -0,0 +1,40 @@
|
||||
import { getDb } from '../database/database';
|
||||
import { localTimestamp } from '../utils/time';
|
||||
|
||||
export interface SystemConfigEntry {
|
||||
key: string;
|
||||
value: string;
|
||||
description?: string;
|
||||
updated_at?: string;
|
||||
}
|
||||
|
||||
export function getAllSystemConfigs(): SystemConfigEntry[] {
|
||||
const db = getDb();
|
||||
return db.prepare('SELECT key, value, description, updated_at FROM system_configs ORDER BY key').all() as SystemConfigEntry[];
|
||||
}
|
||||
|
||||
export function getSystemConfig(key: string): string | null {
|
||||
const db = getDb();
|
||||
const row = db.prepare('SELECT value FROM system_configs WHERE key = ?').get(key) as { value: string } | undefined;
|
||||
return row?.value ?? null;
|
||||
}
|
||||
|
||||
export function updateSystemConfig(key: string, value: string): void {
|
||||
const db = getDb();
|
||||
db.prepare(
|
||||
"UPDATE system_configs SET value = ?, updated_at = ? WHERE key = ?"
|
||||
).run(value, localTimestamp(), key);
|
||||
}
|
||||
|
||||
export function updateSystemConfigs(entries: { key: string; value: string }[]): void {
|
||||
const db = getDb();
|
||||
const update = db.prepare(
|
||||
"UPDATE system_configs SET value = ?, updated_at = ? WHERE key = ?"
|
||||
);
|
||||
const tx = db.transaction((items: { key: string; value: string }[]) => {
|
||||
for (const item of items) {
|
||||
update.run(item.value, localTimestamp(), item.key);
|
||||
}
|
||||
});
|
||||
tx(entries);
|
||||
}
|
||||
615
packages/backend/src/cloud/admin.routes.ts
Normal file
615
packages/backend/src/cloud/admin.routes.ts
Normal file
@@ -0,0 +1,615 @@
|
||||
import { Router, Request, Response } from 'express';
|
||||
// Native fetch available in Node 20+
|
||||
import fs from "fs";
|
||||
import { execSync } from 'child_process';
|
||||
import { adminLimiter, loginLimiter } from '../middleware/rate-limit';
|
||||
import { getSaveRecords } from '../cloud/cloud.service';
|
||||
import { getCloudConfigs, getCloudConfigById, saveCloudConfig, deleteCloudConfig, getCloudConfigByType, testCloudConnection, testCloudConnectionWithCookie } from '../cloud/credential.service';
|
||||
// Note: check-in routes were removed (sign-in feature removed)
|
||||
import { getAllCloudTypes } from '../cloud/cloud-types.service';
|
||||
import { login, authMiddleware, verifyToken, changePassword } from '../admin/auth.service';
|
||||
import { getStats } from '../admin/stats.service';
|
||||
import { getAllSystemConfigs, updateSystemConfig, updateSystemConfigs, getSystemConfig } from '../admin/system-config.service';
|
||||
import { testProxyConnection } from '../utils/proxy-agent';
|
||||
import { getDb } from '../database/database';
|
||||
import { reconnectRedis, testRedisConnection } from '../middleware/cache';
|
||||
import { startQrLogin, getQrLoginStatus, cancelQrLogin } from '../cloud/qr-login.service';
|
||||
import { BaiduDriver } from '../cloud/drivers/baidu.driver';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Public routes (no auth required)
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/**
|
||||
* POST /api/admin/login
|
||||
* Admin login
|
||||
*/
|
||||
router.post('/admin/login', loginLimiter, (req: Request, res: Response) => {
|
||||
try {
|
||||
const { username, password } = req.body;
|
||||
if (!username || !password) {
|
||||
res.status(400).json({ error: 'Username and password are required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const token = login(username, password);
|
||||
if (!token) {
|
||||
res.status(401).json({ error: 'Invalid credentials' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ token });
|
||||
} catch (err: any) {
|
||||
console.error('[Login] Error:', err);
|
||||
res.status(500).json({ error: err.message || 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/admin/cloud-types
|
||||
* List all cloud types (public, read-only).
|
||||
*/
|
||||
router.get('/admin/cloud-types', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const types = getAllCloudTypes();
|
||||
res.json({ types });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// QR Login routes (no auth — user not logged in yet)
|
||||
// MUST be before authMiddleware!
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
// ===== 夸克扫码登录 =====
|
||||
router.post('/admin/quark/qr-login/start', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const result = await startQrLogin();
|
||||
res.json({ ok: true, ...result });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/admin/quark/qr-login/:sessionId/status', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const sessionId = req.params.sessionId as string;
|
||||
const result = await getQrLoginStatus(sessionId);
|
||||
res.json({ ok: true, ...result });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/admin/quark/qr-login/:sessionId/cancel', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const sessionId = req.params.sessionId as string;
|
||||
await cancelQrLogin(sessionId);
|
||||
res.json({ ok: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// ===== 百度扫码登录 =====
|
||||
router.post("/admin/baidu/qr-login/start", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const result = await BaiduDriver.startQrLogin();
|
||||
res.json({ ok: true, ...result });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/admin/baidu/qr-login/:sessionId/status", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const sessionId = req.params.sessionId as string;
|
||||
const result: any = await BaiduDriver.getQrLoginStatus(sessionId);
|
||||
// Map to frontend-expected format (frontend reads data.cookie)
|
||||
res.json({
|
||||
ok: true,
|
||||
status: result.status,
|
||||
cookie: result.cookie || result.access_token || "",
|
||||
nickname: result.nickname || "",
|
||||
storage_used: result.storage_used || "",
|
||||
storage_total: result.storage_total || "",
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/admin/baidu/qr-login/:sessionId/cancel", async (req: Request, res: Response) => {
|
||||
try {
|
||||
BaiduDriver.cancelQrLogin(req.params.sessionId as string);
|
||||
} catch {}
|
||||
res.json({ ok: true });
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Auth wall — all routes below require JWT
|
||||
// ═══════════════════════════════════════
|
||||
router.use('/admin', authMiddleware);
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Cloud Configs CRUD
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/cloud-configs — list all cloud configs */
|
||||
router.get('/admin/cloud-configs', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const configs = getCloudConfigs();
|
||||
res.json(configs);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to fetch cloud configs' });
|
||||
}
|
||||
});
|
||||
|
||||
/** POST /api/admin/cloud-configs — create or smart-replace a cloud config */
|
||||
router.post('/admin/cloud-configs', (req: Request, res: Response) => {
|
||||
try {
|
||||
const data = req.body;
|
||||
if (!data.cloud_type) {
|
||||
res.status(400).json({ error: 'cloud_type is required' });
|
||||
return;
|
||||
}
|
||||
// Normalize is_active: frontend sends boolean, SQLite needs 0/1
|
||||
if (typeof data.is_active === 'boolean') data.is_active = data.is_active ? 1 : 0;
|
||||
// Normalize is_transfer_enabled: frontend sends boolean, SQLite needs 0/1
|
||||
if (typeof data.is_transfer_enabled === 'boolean') data.is_transfer_enabled = data.is_transfer_enabled ? 1 : 0;
|
||||
const saved = saveCloudConfig(data);
|
||||
res.json(saved);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to save cloud config' });
|
||||
}
|
||||
});
|
||||
|
||||
/** PUT /api/admin/cloud-configs/:id — update an existing cloud config */
|
||||
router.put('/admin/cloud-configs/:id', (req: Request, res: Response) => {
|
||||
try {
|
||||
const id = parseInt(req.params.id as string);
|
||||
const existing = getCloudConfigById(id);
|
||||
if (!existing) {
|
||||
res.status(404).json({ error: 'Cloud config not found' });
|
||||
return;
|
||||
}
|
||||
const saved = saveCloudConfig({ ...req.body, id });
|
||||
res.json(saved);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to update cloud config' });
|
||||
}
|
||||
});
|
||||
|
||||
/** DELETE /api/admin/cloud-configs/:id */
|
||||
router.delete('/admin/cloud-configs/:id', (req: Request, res: Response) => {
|
||||
try {
|
||||
const id = parseInt(req.params.id as string);
|
||||
const ok = deleteCloudConfig(id);
|
||||
if (!ok) {
|
||||
res.status(404).json({ error: 'Cloud config not found' });
|
||||
return;
|
||||
}
|
||||
res.json({ success: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to delete cloud config' });
|
||||
}
|
||||
});
|
||||
|
||||
/** POST /api/admin/cloud-configs/:type/test — test cloud connection (by type or id) */
|
||||
router.post('/admin/cloud-configs/:type/test', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const type = req.params.type as string;
|
||||
const { cookie, id } = req.body;
|
||||
|
||||
// If cookie is provided directly, test with it (for new configs not yet saved)
|
||||
if (cookie) {
|
||||
const result = await testCloudConnectionWithCookie(type, cookie);
|
||||
res.json(result);
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise test by config id
|
||||
if (id) {
|
||||
const result = await testCloudConnection(parseInt(id));
|
||||
res.json(result);
|
||||
return;
|
||||
}
|
||||
|
||||
res.status(400).json({ success: false, message: 'Provide either cookie or id' });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ success: false, message: err.message || 'Connection test failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Stats
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/stats */
|
||||
router.get('/admin/stats', (req: Request, res: Response) => {
|
||||
try {
|
||||
const days = req.query.days ? parseInt(req.query.days as string) : 7;
|
||||
const stats = getStats(days);
|
||||
res.json(stats);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get stats' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Save Records (转存日志)
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/save-records */
|
||||
router.get('/admin/save-records', (req: Request, res: Response) => {
|
||||
try {
|
||||
const page = parseInt(req.query.page as string) || 1;
|
||||
const pageSize = parseInt(req.query.pageSize as string) || 20;
|
||||
const startDate = req.query.startDate as string | undefined;
|
||||
const endDate = req.query.endDate as string | undefined;
|
||||
const status = req.query.status as string | undefined;
|
||||
const sourceType = req.query.sourceType as string | undefined;
|
||||
const keyword = req.query.keyword as string | undefined;
|
||||
const result = getSaveRecords(page, pageSize, startDate, endDate, status, sourceType, keyword);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get save records' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// System Configs
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/system-configs */
|
||||
router.get('/admin/system-configs', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const configs = getAllSystemConfigs();
|
||||
res.json(configs);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get system configs' });
|
||||
}
|
||||
});
|
||||
|
||||
/** PUT /api/admin/system-configs — batch update */
|
||||
router.put('/admin/system-configs', (req: Request, res: Response) => {
|
||||
try {
|
||||
const { entries } = req.body;
|
||||
if (!entries || !Array.isArray(entries)) {
|
||||
res.status(400).json({ error: 'entries array is required' });
|
||||
return;
|
||||
}
|
||||
updateSystemConfigs(entries);
|
||||
res.json({ success: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to update system configs' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Cloud Types Toggle
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** PUT /api/admin/cloud-types — toggle cloud type enabled/disabled */
|
||||
router.put('/admin/cloud-types', (req: Request, res: Response) => {
|
||||
try {
|
||||
const { type, enabled } = req.body;
|
||||
if (!type) {
|
||||
res.status(400).json({ error: 'type is required' });
|
||||
return;
|
||||
}
|
||||
const db = getDb();
|
||||
db.prepare(
|
||||
`INSERT INTO system_configs (key, value, description) VALUES (?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET value = excluded.value`
|
||||
).run(`cloud_type_${type}_enabled`, enabled ? '1' : '0', `Enable/disable ${type} cloud drive`);
|
||||
res.json({ success: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to toggle cloud type' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Change Password
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** POST /api/admin/change-password */
|
||||
router.post('/admin/change-password', (req: Request, res: Response) => {
|
||||
try {
|
||||
const { oldPassword, newPassword } = req.body;
|
||||
if (!oldPassword || !newPassword) {
|
||||
res.status(400).json({ error: 'Both old and new passwords are required' });
|
||||
return;
|
||||
}
|
||||
// Get username from JWT
|
||||
const authHeader = req.headers.authorization || '';
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const payload = verifyToken(token);
|
||||
if (!payload) {
|
||||
res.status(401).json({ error: 'Invalid token' });
|
||||
return;
|
||||
}
|
||||
const result = changePassword(payload.username, oldPassword, newPassword);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to change password' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// DB Status
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/db-status */
|
||||
router.get('/admin/db-status', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const dbFile = getSystemConfig('db_path') || '';
|
||||
let dbSize = 'N/A';
|
||||
if (dbFile) {
|
||||
try {
|
||||
const stats = fs.statSync(dbFile);
|
||||
dbSize = (stats.size / 1024 / 1024).toFixed(2) + ' MB';
|
||||
} catch {}
|
||||
}
|
||||
|
||||
const db = getDb();
|
||||
const counts = {
|
||||
save_records: (db.prepare('SELECT COUNT(*) as c FROM save_records').get() as any)?.c || 0,
|
||||
search_stats: (db.prepare('SELECT COUNT(*) as c FROM search_stats').get() as any)?.c || 0,
|
||||
system_configs: (db.prepare('SELECT COUNT(*) as c FROM system_configs').get() as any)?.c || 0,
|
||||
cloud_configs: (db.prepare('SELECT COUNT(*) as c FROM cloud_configs').get() as any)?.c || 0,
|
||||
content_cache: (db.prepare('SELECT COUNT(*) as c FROM content_cache').get() as any)?.c || 0,
|
||||
};
|
||||
|
||||
// Redis status
|
||||
let redis_status = 'disconnected';
|
||||
let redis_url = getSystemConfig('redis_url') || '';
|
||||
try {
|
||||
const testResult = await testRedisConnection(redis_url);
|
||||
redis_status = testResult.ok ? 'connected' : 'disconnected';
|
||||
} catch {
|
||||
redis_status = 'error';
|
||||
}
|
||||
|
||||
res.json({
|
||||
db_size: dbSize,
|
||||
db_path: dbFile,
|
||||
...counts,
|
||||
redis_status,
|
||||
redis_url,
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get DB status' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Test Redis Connection
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** POST /api/admin/test-redis */
|
||||
router.post('/admin/test-redis', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { url } = req.body;
|
||||
if (!url) {
|
||||
res.status(400).json({ ok: false, info: 'Redis URL is required' });
|
||||
return;
|
||||
}
|
||||
const result = await testRedisConnection(url);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, info: err.message || 'Redis test failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Test External Service
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** POST /api/admin/test-external-service */
|
||||
router.post('/admin/test-external-service', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { type, url, token } = req.body;
|
||||
const start = Date.now();
|
||||
|
||||
switch (type) {
|
||||
case 'pansou': {
|
||||
const pansouUrl = url || getSystemConfig('pansou_url') || '';
|
||||
if (!pansouUrl) {
|
||||
res.json({ ok: false, info: 'PanSou URL not configured' });
|
||||
return;
|
||||
}
|
||||
const response = await fetch(pansouUrl + '/api/health', { signal: AbortSignal.timeout(8000) });
|
||||
const data: any = await response.json();
|
||||
const latency = Date.now() - start;
|
||||
res.json({
|
||||
ok: response.ok && data?.status === 'ok',
|
||||
latency,
|
||||
info: response.ok ? `连接成功 (${data?.channels_count || 0} 频道, ${data?.plugin_count || 0} 插件)` : '连接失败',
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'video_parser': {
|
||||
const parserUrl = url || getSystemConfig('video_parser_url') || '';
|
||||
if (!parserUrl) {
|
||||
res.json({ ok: false, info: 'Video Parser URL not configured' });
|
||||
return;
|
||||
}
|
||||
const response = await fetch(parserUrl + '/health', { signal: AbortSignal.timeout(8000) });
|
||||
const latency = Date.now() - start;
|
||||
res.json({
|
||||
ok: response.ok,
|
||||
latency,
|
||||
info: response.ok ? '连接成功' : `HTTP ${response.status}`,
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'tmdb': {
|
||||
const tmdbToken = token || getSystemConfig('tmdb_api_key') || '';
|
||||
if (!tmdbToken) {
|
||||
res.json({ ok: false, info: 'TMDB API Key not configured' });
|
||||
return;
|
||||
}
|
||||
const response = await fetch('https://api.themoviedb.org/3/configuration', {
|
||||
headers: { Authorization: `Bearer ${tmdbToken}` },
|
||||
signal: AbortSignal.timeout(8000),
|
||||
});
|
||||
const latency = Date.now() - start;
|
||||
res.json({
|
||||
ok: response.ok,
|
||||
latency,
|
||||
info: response.ok ? '连接成功' : `HTTP ${response.status}`,
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'proxy': {
|
||||
const proxyUrl = url || getSystemConfig('search_proxy_url') || '';
|
||||
if (!proxyUrl) {
|
||||
res.json({ ok: false, info: 'Proxy URL not configured' });
|
||||
return;
|
||||
}
|
||||
const result = await testProxyConnection(proxyUrl);
|
||||
res.json(result);
|
||||
break;
|
||||
}
|
||||
case 'ip_geo': {
|
||||
const geoUrl = url || getSystemConfig('ip_geo_api_url') || '';
|
||||
if (!geoUrl) {
|
||||
res.json({ ok: false, info: '请先输入 IP 归属地查询 API 地址' });
|
||||
return;
|
||||
}
|
||||
const testUrl = geoUrl.replace('{ip}', '8.8.8.8');
|
||||
const response = await fetch(testUrl, { signal: AbortSignal.timeout(8000) });
|
||||
const data: any = await response.json();
|
||||
const latency = Date.now() - start;
|
||||
const valid = !!(data?.country || data?.region || data?.city || data?.countryCode);
|
||||
res.json({ ok: valid, latency, info: valid ? '连接成功' : '响应格式不符' });
|
||||
break;
|
||||
}
|
||||
default:
|
||||
res.json({ ok: false, info: `Unknown service type: ${type}` });
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, info: err.message || 'External service test failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Pansou Info & Update
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/pansou-info — pansou health + version + update check */
|
||||
router.get('/admin/pansou-info', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const baseUrl = getSystemConfig('pansou_url') || '';
|
||||
if (!baseUrl) {
|
||||
res.json({ status: 'disconnected', channelCount: 0, pluginCount: 0, diskCount: 0, version: '', hasUpdate: false, latestVersion: '' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Fetch PanSou health
|
||||
const healthUrl = baseUrl + '/api/health';
|
||||
const response = await fetch(healthUrl, { signal: AbortSignal.timeout(8000) });
|
||||
const healthData: any = await response.json();
|
||||
const channelCount = healthData.channels_count || 0;
|
||||
const pluginCount = healthData.plugin_count || 0;
|
||||
|
||||
// Derive disk count from channel names
|
||||
const driveKeywords = ['aliyun', 'baidu', 'quark', '115', 'pikpak', 'xunlei', 'uc', '123', '139', '189', 'tianyi', 'netease'];
|
||||
const drives = new Set<string>();
|
||||
for (const ch of (healthData.channels || [])) {
|
||||
for (const kw of driveKeywords) {
|
||||
if (ch.toLowerCase().includes(kw)) { drives.add(kw); break; }
|
||||
}
|
||||
}
|
||||
const diskCount = drives.size || 5;
|
||||
|
||||
// Get local version from docker label
|
||||
let version = '';
|
||||
let hasUpdate = false;
|
||||
let latestVersion = '';
|
||||
try {
|
||||
const created = execSync(
|
||||
`docker inspect CloudSearch_PanSou --format '{{index .Config.Labels "org.opencontainers.image.created"}}'`,
|
||||
{ timeout: 5000, encoding: 'utf8' }
|
||||
).trim();
|
||||
version = created ? created.slice(0, 10) : '';
|
||||
|
||||
// Check update cache
|
||||
const cacheFile = '/tmp/pansou-update-cache.json';
|
||||
let cache: any = null;
|
||||
try { cache = JSON.parse(fs.readFileSync(cacheFile, 'utf8') || 'null'); } catch {}
|
||||
const threeDays = 3 * 24 * 3600 * 1000;
|
||||
|
||||
if (!cache || (Date.now() - cache.checkedAt) > threeDays) {
|
||||
// Check GHCR for latest version
|
||||
try {
|
||||
const tokenRes = await fetch(
|
||||
'https://ghcr.io/token?scope=repository:fish2018/pansou-web:pull&service=ghcr.io'
|
||||
);
|
||||
const ghcrToken = (await tokenRes.json() as any).token;
|
||||
const manifestRes = await fetch(
|
||||
'https://ghcr.io/v2/fish2018/pansou-web/manifests/latest',
|
||||
{ headers: { Authorization: `Bearer ${ghcrToken}`, Accept: 'application/vnd.oci.image.index.v1+json, application/vnd.docker.distribution.manifest.list.v2+json' } }
|
||||
);
|
||||
const manifestList: any = await manifestRes.json();
|
||||
const amd64 = manifestList.manifests?.find((m: any) => m.platform?.architecture === 'amd64' && m.platform?.os === 'linux');
|
||||
if (amd64) {
|
||||
const blobRes = await fetch(
|
||||
`https://ghcr.io/v2/fish2018/pansou-web/manifests/${amd64.digest}`,
|
||||
{ headers: { Authorization: `Bearer ${ghcrToken}`, Accept: 'application/vnd.oci.image.manifest.v1+json' } }
|
||||
);
|
||||
const blobData: any = await blobRes.json();
|
||||
const cfgDigest = blobData.config?.digest;
|
||||
if (cfgDigest) {
|
||||
const cfgRes = await fetch(
|
||||
`https://ghcr.io/v2/fish2018/pansou-web/blobs/${cfgDigest}`,
|
||||
{ headers: { Authorization: `Bearer ${ghcrToken}` } }
|
||||
);
|
||||
const cfgData: any = await cfgRes.json();
|
||||
const remoteCreated = cfgData.config?.Labels?.['org.opencontainers.image.created'];
|
||||
if (remoteCreated) {
|
||||
latestVersion = remoteCreated.slice(0, 10);
|
||||
if (version && latestVersion !== version) hasUpdate = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
fs.writeFileSync(cacheFile, JSON.stringify({ checkedAt: Date.now(), hasUpdate, latestVersion }));
|
||||
} else {
|
||||
hasUpdate = cache.hasUpdate;
|
||||
latestVersion = cache.latestVersion;
|
||||
}
|
||||
} catch {}
|
||||
|
||||
res.json({
|
||||
status: response.ok ? 'connected' : 'disconnected',
|
||||
channelCount,
|
||||
pluginCount,
|
||||
diskCount,
|
||||
version,
|
||||
hasUpdate,
|
||||
latestVersion,
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.json({ status: 'error', channelCount: 0, pluginCount: 0, diskCount: 0, version: '', hasUpdate: false, latestVersion: '', error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
/** POST /api/admin/update-pansou — pull latest pansou image + recreate container */
|
||||
router.post('/admin/update-pansou', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
execSync('docker pull ghcr.io/fish2018/pansou-web:latest', { timeout: 120000 });
|
||||
execSync('docker compose -p cloudsearch -f /app/docker-compose.yml up -d pansou', { timeout: 60000 });
|
||||
try { fs.unlinkSync('/tmp/pansou-update-cache.json'); } catch {}
|
||||
res.json({ success: true, message: 'PanSou 更新成功' });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ success: false, error: err.message || 'PanSou 更新失败' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
254
packages/backend/src/cloud/cleanup.service.ts
Executable file
254
packages/backend/src/cloud/cleanup.service.ts
Executable file
@@ -0,0 +1,254 @@
|
||||
import { getDb } from '../database/database';
|
||||
import { getSystemConfig, updateSystemConfig } from '../admin/system-config.service';
|
||||
import { formatLocalDate, formatLocalDateTime } from '../utils/time';
|
||||
import { QuarkDriver } from './drivers/quark.driver';
|
||||
import { BaiduDriver } from './drivers/baidu.driver';
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// CloudCleanupDriver — contract that each cloud driver must fulfill
|
||||
// to participate in the cleanup cycle.
|
||||
//
|
||||
// To add a new cloud type (e.g. Baidu, Aliyun), implement these three
|
||||
// methods in the driver and register it in getDriverForCleanup() below.
|
||||
// The controller (this file) handles WHEN and WITH WHAT parameters;
|
||||
// the driver handles HOW.
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
/** Each cleanup operation returns { trashed: number; errors: string[] } */
|
||||
interface CleanupOpResult { trashed: number; errors: string[] }
|
||||
|
||||
interface CloudCleanupDriver {
|
||||
/** Trash date folders (YYYY-MM-DD) older than `days`. */
|
||||
cleanupOldDateFolders(days: number): Promise<CleanupOpResult>;
|
||||
/**
|
||||
* If used space exceeds thresholdPercent% of TOTAL capacity,
|
||||
* delete oldest date folders until totalBytes * deletePercent/100
|
||||
* of TOTAL capacity is freed.
|
||||
* @param thresholdPercent — trigger when usage >= this % of total
|
||||
* @param deletePercent — free this % of total capacity
|
||||
*/
|
||||
cleanupBySpaceThreshold(thresholdPercent: number, deletePercent: number): Promise<CleanupOpResult>;
|
||||
/** Permanently empty the recycle bin. */
|
||||
emptyTrash(): Promise<boolean>;
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Driver factory — create the right driver for a given cloud config.
|
||||
// When adding a new cloud type, add a case here.
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
function getDriverForCleanup(config: { cloud_type: string; cookie: string }): CloudCleanupDriver | null {
|
||||
switch (config.cloud_type) {
|
||||
case 'quark':
|
||||
return new QuarkDriver({ cookie: config.cookie });
|
||||
case 'baidu':
|
||||
return new BaiduDriver({ cookie: config.cookie });
|
||||
// case 'aliyun': return new AliyunDriver({ cookie: config.cookie });
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// Cleanup controller — reads system configs and dispatches to each
|
||||
// active cloud driver. Every driver receives the same parameters;
|
||||
// the driver decides whether/how to act.
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
interface CleanupStats {
|
||||
filesTrashed: number;
|
||||
logsDeleted: number;
|
||||
trashEmptied: boolean;
|
||||
errors: string[];
|
||||
}
|
||||
|
||||
/** Get all active cloud configs (any type). Used by the orchestrator. */
|
||||
function getActiveCleanupConfigs(): Array<{ id: number; cloud_type: string; cookie: string; nickname?: string }> {
|
||||
const db = getDb();
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, cookie, nickname FROM cloud_configs
|
||||
WHERE is_active = 1 AND cookie IS NOT NULL AND cookie != ''`
|
||||
).all() as Array<{ id: number; cloud_type: string; cookie: string; nickname?: string }>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispatch cleanupOldDateFolders to every active driver.
|
||||
* Each driver receives the same `days` parameter.
|
||||
*/
|
||||
async function cleanupCloudFiles(days: number): Promise<CleanupOpResult> {
|
||||
const configs = getActiveCleanupConfigs();
|
||||
const errors: string[] = [];
|
||||
let totalTrashed = 0;
|
||||
|
||||
for (const cfg of configs) {
|
||||
const driver = getDriverForCleanup(cfg);
|
||||
if (!driver) {
|
||||
console.log(`[Cleanup] No driver for cloud_type="${cfg.cloud_type}", skipping config #${cfg.id}`);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const result = await driver.cleanupOldDateFolders(days);
|
||||
totalTrashed += result.trashed;
|
||||
errors.push(...result.errors.map(e => `[${cfg.cloud_type}#${cfg.id}] ${e}`));
|
||||
} catch (err: any) {
|
||||
errors.push(`[${cfg.cloud_type}#${cfg.id}] cleanupOldDateFolders: ${err.message}`);
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
}
|
||||
|
||||
return { trashed: totalTrashed, errors };
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispatch cleanupBySpaceThreshold to every active driver.
|
||||
* Each driver receives the same threshold/delete percentages.
|
||||
*/
|
||||
async function cleanupAllBySpaceThreshold(
|
||||
thresholdPercent: number,
|
||||
deletePercent: number,
|
||||
): Promise<CleanupOpResult> {
|
||||
const configs = getActiveCleanupConfigs();
|
||||
const errors: string[] = [];
|
||||
let totalTrashed = 0;
|
||||
|
||||
for (const cfg of configs) {
|
||||
const driver = getDriverForCleanup(cfg);
|
||||
if (!driver) {
|
||||
console.log(`[Cleanup] No driver for cloud_type="${cfg.cloud_type}", skipping config #${cfg.id}`);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const result = await driver.cleanupBySpaceThreshold(thresholdPercent, deletePercent);
|
||||
totalTrashed += result.trashed;
|
||||
errors.push(...result.errors.map(e => `[${cfg.cloud_type}#${cfg.id}] ${e}`));
|
||||
} catch (err: any) {
|
||||
errors.push(`[${cfg.cloud_type}#${cfg.id}] cleanupBySpaceThreshold: ${err.message}`);
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
}
|
||||
|
||||
return { trashed: totalTrashed, errors };
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispatch emptyTrash to every active driver.
|
||||
*/
|
||||
export async function emptyAllTrash(): Promise<{ emptied: boolean; errors: string[] }> {
|
||||
const configs = getActiveCleanupConfigs();
|
||||
const errors: string[] = [];
|
||||
let emptied = false;
|
||||
|
||||
for (const cfg of configs) {
|
||||
const driver = getDriverForCleanup(cfg);
|
||||
if (!driver) continue;
|
||||
try {
|
||||
const ok = await driver.emptyTrash();
|
||||
if (ok) {
|
||||
emptied = true;
|
||||
console.log(`[Cleanup] ✅ Emptied trash for [${cfg.cloud_type}#${cfg.id}]`);
|
||||
} else {
|
||||
errors.push(`[${cfg.cloud_type}#${cfg.id}] empty trash failed`);
|
||||
}
|
||||
} catch (err: any) {
|
||||
errors.push(`[${cfg.cloud_type}#${cfg.id}]: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
return { emptied, errors };
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete save_records older than the specified number of days.
|
||||
*/
|
||||
function cleanupLogs(days: number): number {
|
||||
const db = getDb();
|
||||
const cutoffStr = formatLocalDateTime(new Date(Date.now() - days * 24 * 60 * 60 * 1000));
|
||||
|
||||
const result = db.prepare('DELETE FROM save_records WHERE created_at < ?').run(cutoffStr);
|
||||
console.log(`[Cleanup] Deleted ${result.changes} save records older than ${days} days (before ${cutoffStr})`);
|
||||
return result.changes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run full cleanup cycle:
|
||||
* 0. Force-clean by space threshold (if enabled & exceeded) — priority highest
|
||||
* 1. Delete old save_records
|
||||
* 2. Trash old date folders by retention days
|
||||
* 3. Empty recycle bin (permanently free space)
|
||||
*/
|
||||
export async function runFullCleanup(): Promise<CleanupStats> {
|
||||
const fileDays = parseInt(getSystemConfig('cleanup_file_retention_days') || '7', 10);
|
||||
const logDays = parseInt(getSystemConfig('cleanup_log_retention_days') || '30', 10);
|
||||
const emptyTrashEnabled = getSystemConfig('cleanup_empty_trash') !== 'false';
|
||||
|
||||
const stats: CleanupStats = { filesTrashed: 0, logsDeleted: 0, trashEmptied: false, errors: [] };
|
||||
|
||||
// 0. Space threshold (highest priority)
|
||||
const thresholdEnabled = getSystemConfig('cleanup_space_threshold_enabled');
|
||||
if (thresholdEnabled === 'true') {
|
||||
const thresholdPercent = parseInt(getSystemConfig('cleanup_space_threshold_percent') || '90', 10);
|
||||
const deletePercent = parseInt(getSystemConfig('cleanup_space_threshold_delete_percent') || '10', 10);
|
||||
if (thresholdPercent > 0 && thresholdPercent < 100) {
|
||||
try {
|
||||
const result = await cleanupAllBySpaceThreshold(thresholdPercent, deletePercent);
|
||||
stats.filesTrashed += result.trashed;
|
||||
stats.errors.push(...result.errors);
|
||||
} catch (err: any) {
|
||||
stats.errors.push(`空间阈值清理失败: ${err.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 1. Delete old save_records
|
||||
try {
|
||||
stats.logsDeleted = cleanupLogs(logDays);
|
||||
} catch (err: any) {
|
||||
stats.errors.push(`日志清理失败: ${err.message}`);
|
||||
}
|
||||
|
||||
// 2. Trash old files from cloud drives
|
||||
try {
|
||||
const result = await cleanupCloudFiles(fileDays);
|
||||
stats.filesTrashed += result.trashed;
|
||||
stats.errors.push(...result.errors);
|
||||
} catch (err: any) {
|
||||
stats.errors.push(`文件清理失败: ${err.message}`);
|
||||
}
|
||||
|
||||
// 3. Empty recycle bin (only if enabled, and only if we trashed something)
|
||||
if (emptyTrashEnabled && stats.filesTrashed > 0) {
|
||||
try {
|
||||
const result = await emptyAllTrash();
|
||||
stats.trashEmptied = result.emptied;
|
||||
stats.errors.push(...result.errors);
|
||||
} catch (err: any) {
|
||||
stats.errors.push(`清空回收站失败: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Save last run timestamp and stats
|
||||
updateSystemConfig('cleanup_last_run', formatLocalDateTime());
|
||||
updateSystemConfig('cleanup_last_stats',
|
||||
JSON.stringify({ filesTrashed: stats.filesTrashed, logsDeleted: stats.logsDeleted, trashEmptied: stats.trashEmptied, errors: stats.errors.length })
|
||||
);
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a daily cleanup is due and run it.
|
||||
* Called periodically by the scheduler (setInterval).
|
||||
*/
|
||||
export async function checkAndRunScheduledCleanup(): Promise<void> {
|
||||
const enabled = getSystemConfig('cleanup_enabled');
|
||||
if (enabled !== 'true') return;
|
||||
|
||||
const lastRun = getSystemConfig('cleanup_last_run');
|
||||
const todayStr = formatLocalDate();
|
||||
|
||||
if (lastRun && lastRun.startsWith(todayStr)) return;
|
||||
|
||||
console.log(`[Cleanup] Scheduled cleanup starting at ${new Date().toISOString()}...`);
|
||||
const stats = await runFullCleanup();
|
||||
console.log(`[Cleanup] Done: trashed ${stats.filesTrashed} folders, deleted ${stats.logsDeleted} logs, emptied trash: ${stats.trashEmptied}, errors: ${stats.errors.length}`);
|
||||
}
|
||||
69
packages/backend/src/cloud/cloud-types.service.ts
Executable file
69
packages/backend/src/cloud/cloud-types.service.ts
Executable file
@@ -0,0 +1,69 @@
|
||||
import { getSystemConfig } from '../admin/system-config.service';
|
||||
|
||||
export interface CloudTypeInfo {
|
||||
type: string;
|
||||
label: string;
|
||||
icon: string;
|
||||
enabled: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* 网盘图标 — 使用打包进镜像的 PNG 图标文件
|
||||
* 图标存放在 /app/dist/frontend/icons/,通过 Express static 中间件对外提供
|
||||
*/
|
||||
/**
|
||||
* 网盘图标 — 内联 SVG data URI,无需外部文件
|
||||
*/
|
||||
function makeSvgIcon(bg: string, letter: string): string {
|
||||
const c = encodeURIComponent(bg);
|
||||
const l = encodeURIComponent(letter);
|
||||
return `data:image/svg+xml,%3Csvg%20viewBox%3D%220%200%2024%2024%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%3Crect%20width%3D%2224%22%20height%3D%2224%22%20rx%3D%224%22%20fill%3D%22${c}%22%2F%3E%3Ctext%20x%3D%2212%22%20y%3D%2217%22%20font-size%3D%2213%22%20font-weight%3D%22bold%22%20fill%3D%22%23fff%22%20text-anchor%3D%22middle%22%20font-family%3D%22Arial%2Csans-serif%22%3E${l}%3C%2Ftext%3E%3C%2Fsvg%3E`;
|
||||
}
|
||||
|
||||
const ICONS: Record<string, string> = {
|
||||
baidu: makeSvgIcon('#4e6ef2', '百'),
|
||||
aliyun: makeSvgIcon('#ff6a00', '阿'),
|
||||
quark: makeSvgIcon('#07c160', '夸'),
|
||||
'115': makeSvgIcon('#9b59b6', '1'),
|
||||
tianyi: makeSvgIcon('#00a1d6', '天'),
|
||||
'123pan': makeSvgIcon('#e74c3c', '1'),
|
||||
uc: makeSvgIcon('#f39c12', 'U'),
|
||||
xunlei: makeSvgIcon('#2ecc71', '迅'),
|
||||
pikpak: makeSvgIcon('#8e44ad', 'P'),
|
||||
magnet: 'data:image/svg+xml,%3Csvg%20viewBox%3D%220%200%2024%2024%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%3Crect%20width%3D%2224%22%20height%3D%2224%22%20rx%3D%224%22%20fill%3D%22%236366F1%22%2F%3E%3Cpath%20d%3D%22M7%2016l5-5m-5%200l5%205m5-5l-5-5m5%200l-5%205%22%20stroke%3D%22%23fff%22%20stroke-width%3D%222%22%20stroke-linecap%3D%22round%22%20fill%3D%22none%22%2F%3E%3Ccircle%20cx%3D%2212%22%20cy%3D%2211%22%20r%3D%221%22%20fill%3D%22%23fff%22%2F%3E%3C%2Fsvg%3E',
|
||||
ed2k: 'data:image/svg+xml,%3Csvg%20viewBox%3D%220%200%2024%2024%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%3Crect%20width%3D%2224%22%20height%3D%2224%22%20rx%3D%224%22%20fill%3D%22%238B4513%22%2F%3E%3Ctext%20x%3D%2212%22%20y%3D%2217%22%20font-size%3D%2211%22%20font-weight%3D%22bold%22%20fill%3D%22%23fff%22%20text-anchor%3D%22middle%22%20font-family%3D%22Arial%2Csans-serif%22%3EeD%3C%2Ftext%3E%3C%2Fsvg%3E',
|
||||
others: 'data:image/svg+xml,%3Csvg%20viewBox%3D%220%200%2024%2024%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%3Crect%20width%3D%2224%22%20height%3D%2224%22%20rx%3D%224%22%20fill%3D%22%239CA3AF%22%2F%3E%3Cpath%20d%3D%22M6%2013c0-2.8%202.2-5%205-5a5%205%200%200%201%204.5%202.7A4%204%200%200%201%2020%2014a4%204%200%200%201-3%203.9h-8A4%204%200%200%201%206%2013z%22%20fill%3D%22none%22%20stroke%3D%22%23fff%22%20stroke-width%3D%221.5%22%20stroke-linejoin%3D%22round%22%2F%3E%3C%2Fsvg%3E',
|
||||
};
|
||||
|
||||
const ALL_CLOUD_TYPES: { type: string; label: string; icon: string }[] = [
|
||||
{ type: 'quark', label: '夸克网盘', icon: ICONS.quark },
|
||||
{ type: 'baidu', label: '百度网盘', icon: ICONS.baidu },
|
||||
{ type: 'aliyun', label: '阿里云盘', icon: ICONS.aliyun },
|
||||
{ type: '115', label: '115 网盘', icon: ICONS['115'] },
|
||||
{ type: 'tianyi', label: '天翼云盘', icon: ICONS.tianyi },
|
||||
{ type: '123pan', label: '123 云盘', icon: ICONS['123pan'] },
|
||||
{ type: 'uc', label: 'UC 网盘', icon: ICONS.uc },
|
||||
{ type: 'xunlei', label: '迅雷网盘', icon: ICONS.xunlei },
|
||||
{ type: 'pikpak', label: 'PikPak', icon: ICONS.pikpak },
|
||||
{ type: 'magnet', label: '磁力链接', icon: ICONS.magnet },
|
||||
{ type: 'ed2k', label: '电驴链接', icon: ICONS.ed2k },
|
||||
{ type: 'others', label: '其他', icon: ICONS.others },
|
||||
];
|
||||
|
||||
export function isCloudTypeEnabled(type: string): boolean {
|
||||
const val = getSystemConfig(`cloud_type_${type}_enabled`);
|
||||
if (val === null) return type !== 'others';
|
||||
return val === "true" || val === "1";
|
||||
}
|
||||
|
||||
export function getAllCloudTypes(): CloudTypeInfo[] {
|
||||
return ALL_CLOUD_TYPES.map(ct => ({ ...ct, enabled: isCloudTypeEnabled(ct.type) }));
|
||||
}
|
||||
|
||||
export function getEnabledCloudTypeSet(): Set<string> {
|
||||
const enabled = new Set<string>();
|
||||
for (const ct of ALL_CLOUD_TYPES) {
|
||||
if (isCloudTypeEnabled(ct.type)) enabled.add(ct.type);
|
||||
}
|
||||
return enabled;
|
||||
}
|
||||
323
packages/backend/src/cloud/cloud.service.ts
Normal file
323
packages/backend/src/cloud/cloud.service.ts
Normal file
@@ -0,0 +1,323 @@
|
||||
import { getDb } from '../database/database';
|
||||
import { localTimestamp, formatLocalDateTime } from '../utils/time';
|
||||
import { getSystemConfig } from '../admin/system-config.service';
|
||||
import { QuarkDriver } from './drivers/quark.driver';
|
||||
import { BaiduDriver } from './drivers/baidu.driver';
|
||||
import { CloudConfig, getAndValidateCredential, getActiveCloudConfigs } from './credential.service';
|
||||
import { lookupIpLocation } from './ip-lookup';
|
||||
|
||||
/** In-flight save dedup: prevents concurrent saves of the same URL (race condition fix) */
|
||||
const inFlightSaves = new Map<string, Promise<SaveResult>>();
|
||||
|
||||
export interface SaveResult {
|
||||
success: boolean;
|
||||
shareUrl?: string;
|
||||
share_url?: string;
|
||||
sharePwd?: string;
|
||||
folderName?: string;
|
||||
message: string;
|
||||
file_count?: number;
|
||||
folder_count?: number;
|
||||
duration_ms?: number;
|
||||
}
|
||||
|
||||
export interface SaveRecord {
|
||||
id: number;
|
||||
source_type: string;
|
||||
source_title: string | null;
|
||||
source_url: string;
|
||||
target_cloud: string;
|
||||
share_url: string | null;
|
||||
share_pwd: string | null;
|
||||
file_size: string | null;
|
||||
file_count: number;
|
||||
folder_count: number;
|
||||
duration_ms: number;
|
||||
status: string;
|
||||
error_message: string | null;
|
||||
folder_name: string | null;
|
||||
original_folder_name: string | null;
|
||||
ip_address: string | null;
|
||||
ip_location: string | null;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
/** Core save logic extracted so inFlight dedup can wrap it */
|
||||
async function doSaveFromShare(shareUrl: string, cloudType: string, sourceTitle?: string, ipAddress?: string): Promise<SaveResult> {
|
||||
const db = getDb();
|
||||
const ipLocation = await lookupIpLocation(ipAddress || '');
|
||||
|
||||
// ── Short-term dedup: prevent duplicate saves of the same URL within 60 seconds ──
|
||||
const DEDUP_WINDOW_SEC = 60;
|
||||
let dedupCutoff = '';
|
||||
try {
|
||||
const recentCutoff = db.prepare(
|
||||
`SELECT datetime('now','localtime', '-${DEDUP_WINDOW_SEC} seconds') as cutoff`
|
||||
).get() as { cutoff: string };
|
||||
dedupCutoff = recentCutoff.cutoff;
|
||||
|
||||
const recentRecord = db.prepare(
|
||||
`SELECT share_url, share_pwd, status, error_message, folder_name, original_folder_name FROM save_records
|
||||
WHERE source_url = ? AND created_at >= ?
|
||||
ORDER BY created_at DESC LIMIT 1`
|
||||
).get(shareUrl, dedupCutoff) as {
|
||||
share_url: string | null; share_pwd: string | null; status: string;
|
||||
error_message: string | null; folder_name: string | null; original_folder_name: string | null;
|
||||
} | undefined;
|
||||
|
||||
if (recentRecord) {
|
||||
const alreadySaved = recentRecord.status === 'success' || recentRecord.status === 'reused';
|
||||
if (alreadySaved && recentRecord.share_url) {
|
||||
console.log(`[Share] 🛡️ Dedup: ${shareUrl} was saved ${DEDUP_WINDOW_SEC}s ago (status=${recentRecord.status}), returning existing share link`);
|
||||
db.prepare(
|
||||
`INSERT INTO save_records (source_type, source_title, source_url, target_cloud, share_url, share_pwd, file_size, file_count, folder_count, duration_ms, status, error_message, folder_name, original_folder_name, ip_address, ip_location, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
||||
).run(
|
||||
cloudType, sourceTitle || null, shareUrl, cloudType,
|
||||
recentRecord.share_url, recentRecord.share_pwd || null,
|
||||
null, 0, 0, 0, 'reused', null,
|
||||
recentRecord.folder_name || null, recentRecord.original_folder_name || null,
|
||||
ipAddress || null, ipLocation, localTimestamp(),
|
||||
);
|
||||
return {
|
||||
success: true,
|
||||
message: `🛡️ 此资源刚在 ${DEDUP_WINDOW_SEC} 秒内转存过,直接返回已有分享链接`,
|
||||
share_url: recentRecord.share_url, shareUrl: recentRecord.share_url,
|
||||
sharePwd: recentRecord.share_pwd || '', folderName: '',
|
||||
file_count: 0, folder_count: 0, duration_ms: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.log(`[Share] Dedup check failed: ${err.message}, proceeding with normal save`);
|
||||
}
|
||||
|
||||
// ── Share link reuse: if same source URL was already saved successfully, validate and reuse ──
|
||||
const reuseEnabled = getSystemConfig('save_reuse_enabled');
|
||||
if (reuseEnabled !== 'false') {
|
||||
try {
|
||||
const existing = db.prepare(
|
||||
`SELECT share_url, share_pwd, folder_name, original_folder_name FROM save_records
|
||||
WHERE source_url = ? AND status IN ('success', 'reused') AND share_url IS NOT NULL AND share_url != ''
|
||||
ORDER BY created_at DESC LIMIT 1`
|
||||
).get(shareUrl) as { share_url: string; share_pwd: string; folder_name: string | null; original_folder_name: string | null } | undefined;
|
||||
|
||||
if (existing?.share_url) {
|
||||
const { LinkValidator } = await import('../validation/link-validator.service');
|
||||
const validator = new LinkValidator();
|
||||
const validation = await validator.validate(existing.share_url, 'quark');
|
||||
if (validation.status === 'valid') {
|
||||
const isFirstReuse = dedupCutoff ? !db.prepare(
|
||||
`SELECT 1 FROM save_records WHERE source_url = ? AND created_at >= ? AND status = 'reused' LIMIT 1`
|
||||
).get(shareUrl, dedupCutoff) : true;
|
||||
const reuseStatus = isFirstReuse ? 'success' : 'reused';
|
||||
const reuseMsg = isFirstReuse
|
||||
? `♻️ 检测到此资源之前已转存过,直接复用已存在的分享链接`
|
||||
: `♻️ 短时间内重复请求,复用已有分享链接`;
|
||||
|
||||
console.log(`[Share] ♻️ Reusing existing share link for ${shareUrl}: ${existing.share_url} (firstReuse=${isFirstReuse})`);
|
||||
db.prepare(
|
||||
`INSERT INTO save_records (source_type, source_title, source_url, target_cloud, share_url, share_pwd, file_size, file_count, folder_count, duration_ms, status, error_message, folder_name, original_folder_name, ip_address, ip_location, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
||||
).run(
|
||||
cloudType, sourceTitle || null, shareUrl, cloudType,
|
||||
existing.share_url, existing.share_pwd || null,
|
||||
null, 0, 0, 0, reuseStatus, null,
|
||||
existing.folder_name || null, existing.original_folder_name || null,
|
||||
ipAddress || null, ipLocation, localTimestamp(),
|
||||
);
|
||||
return {
|
||||
success: true, message: reuseMsg,
|
||||
share_url: existing.share_url, shareUrl: existing.share_url,
|
||||
sharePwd: existing.share_pwd || '', folderName: '',
|
||||
file_count: 0, folder_count: 0, duration_ms: 0,
|
||||
};
|
||||
}
|
||||
console.log(`[Share] Existing share link for ${shareUrl} is invalid/expired, will re-save`);
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.log(`[Share] Link reuse check failed: ${err.message}, proceeding with normal save`);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Unified credential validation ──
|
||||
const credential = await getAndValidateCredential(cloudType);
|
||||
if (!credential.valid || !credential.config) {
|
||||
return { success: false, message: credential.message };
|
||||
}
|
||||
const config = credential.config;
|
||||
|
||||
// ── Check transfer enabled ──
|
||||
if (config.is_transfer_enabled === 0) {
|
||||
return { success: false, message: `${config.nickname || cloudType} 的转存功能已关闭,请先在后台开启` };
|
||||
}
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
let driverResult: { success: boolean; message: string; shareUrl?: string; sharePwd?: string; folderName?: string; fileCount?: number; folderCount?: number; originalFolderName?: string };
|
||||
|
||||
switch (cloudType) {
|
||||
case 'quark': {
|
||||
const driver = new QuarkDriver({ cookie: config.cookie!, nickname: config.nickname });
|
||||
driverResult = await driver.saveFromShare(shareUrl, sourceTitle);
|
||||
break;
|
||||
}
|
||||
case 'baidu': {
|
||||
const driver = new BaiduDriver({ cookie: config.cookie!, nickname: config.nickname });
|
||||
driverResult = await driver.saveFromShare(shareUrl, sourceTitle);
|
||||
break;
|
||||
}
|
||||
case 'aliyun':
|
||||
return { success: false, message: '阿里云盘保存功能暂未实现' };
|
||||
default:
|
||||
return { success: false, message: `暂不支持 ${cloudType} 的保存功能` };
|
||||
}
|
||||
|
||||
const durationMs = Date.now() - startTime;
|
||||
|
||||
if (driverResult.success) {
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET last_used_at = datetime('now','localtime'), total_saves = total_saves + 1, consecutive_failures = 0 WHERE id = ?`
|
||||
).run(config.id);
|
||||
} else if ((driverResult as any).cookieExpired) {
|
||||
// Cookie expired — don't count as failure, user needs to re-login
|
||||
} else {
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET consecutive_failures = consecutive_failures + 1 WHERE id = ?`
|
||||
).run(config.id);
|
||||
}
|
||||
|
||||
db.prepare(
|
||||
`INSERT INTO save_records (source_type, source_title, source_url, target_cloud, share_url, share_pwd, file_size, file_count, folder_count, duration_ms, status, error_message, folder_name, original_folder_name, ip_address, ip_location, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
||||
).run(
|
||||
cloudType, sourceTitle || driverResult.folderName || null, shareUrl, cloudType,
|
||||
driverResult.shareUrl || null, driverResult.sharePwd || null,
|
||||
null, driverResult.fileCount || 0, driverResult.folderCount || 0,
|
||||
durationMs, driverResult.success ? 'success' : 'failed',
|
||||
driverResult.success ? null : driverResult.message,
|
||||
driverResult.folderName || null, driverResult.originalFolderName || null,
|
||||
ipAddress || null, ipLocation, localTimestamp(),
|
||||
);
|
||||
|
||||
return {
|
||||
success: driverResult.success,
|
||||
message: driverResult.message,
|
||||
share_url: driverResult.shareUrl || '',
|
||||
shareUrl: driverResult.shareUrl,
|
||||
sharePwd: (driverResult as any).sharePwd || '',
|
||||
folderName: driverResult.folderName || '',
|
||||
file_count: driverResult.fileCount || 0,
|
||||
folder_count: driverResult.folderCount || 0,
|
||||
duration_ms: durationMs,
|
||||
};
|
||||
} catch (err: any) {
|
||||
const durationMs = Date.now() - startTime;
|
||||
const errorMessage = err.message || 'Failed to save to cloud';
|
||||
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET consecutive_failures = consecutive_failures + 1 WHERE id = ?`
|
||||
).run(config.id);
|
||||
|
||||
db.prepare(
|
||||
`INSERT INTO save_records (source_type, source_url, target_cloud, duration_ms, status, error_message, ip_address, ip_location, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
||||
).run(cloudType, shareUrl, cloudType, durationMs, 'failed', errorMessage, ipAddress || null, ipLocation, localTimestamp());
|
||||
|
||||
return { success: false, message: errorMessage };
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveFromShare(shareUrl: string, cloudType: string, sourceTitle?: string, ipAddress?: string): Promise<SaveResult> {
|
||||
const key = `${cloudType}:${shareUrl}`;
|
||||
|
||||
const inflight = inFlightSaves.get(key);
|
||||
if (inflight) {
|
||||
console.log(`[Share] ⏳ In-flight: ${shareUrl} — another save is already running, awaiting result`);
|
||||
return inflight;
|
||||
}
|
||||
|
||||
const promise = doSaveFromShare(shareUrl, cloudType, sourceTitle, ipAddress);
|
||||
inFlightSaves.set(key, promise);
|
||||
try {
|
||||
return await promise;
|
||||
} finally {
|
||||
inFlightSaves.delete(key);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Save Records ──────────────────────────────────────────────────
|
||||
|
||||
export function getSaveRecords(page: number = 1, pageSize: number = 20, startDate?: string, endDate?: string, status?: string, sourceType?: string, keyword?: string): { total: number; records: SaveRecord[]; summary?: { total: number; success: number; failed: number; reused: number } } {
|
||||
const db = getDb();
|
||||
const offset = (page - 1) * pageSize;
|
||||
const conditions: string[] = [];
|
||||
const params: any[] = [];
|
||||
const summaryConditions: string[] = [];
|
||||
const summaryParams: any[] = [];
|
||||
if (startDate) {
|
||||
conditions.push('created_at >= ?'); params.push(startDate);
|
||||
summaryConditions.push('created_at >= ?'); summaryParams.push(startDate);
|
||||
}
|
||||
if (endDate) {
|
||||
conditions.push('created_at < ?'); params.push(endDate);
|
||||
summaryConditions.push('created_at < ?'); summaryParams.push(endDate);
|
||||
}
|
||||
if (status) { conditions.push('status = ?'); params.push(status); }
|
||||
if (sourceType) {
|
||||
conditions.push('source_type = ?'); params.push(sourceType);
|
||||
summaryConditions.push('source_type = ?'); summaryParams.push(sourceType);
|
||||
}
|
||||
if (keyword) { conditions.push('source_title LIKE ?'); params.push(`%${keyword}%`); }
|
||||
const where = conditions.length > 0 ? 'WHERE ' + conditions.join(' AND ') : '';
|
||||
const total = (db.prepare(`SELECT COUNT(*) as count FROM save_records ${where}`).get(...params) as any).count;
|
||||
const records = db.prepare(
|
||||
`SELECT * FROM save_records ${where} ORDER BY created_at DESC LIMIT ? OFFSET ?`
|
||||
).all(...params, pageSize, offset) as SaveRecord[];
|
||||
|
||||
const summaryWhere = summaryConditions.length > 0 ? 'WHERE ' + summaryConditions.join(' AND ') : '';
|
||||
const summaryRows = db.prepare(
|
||||
`SELECT status, COUNT(*) as cnt FROM save_records ${summaryWhere} GROUP BY status`
|
||||
).all(...summaryParams) as { status: string; cnt: number }[];
|
||||
let sumTotal = 0, sumSuccess = 0, sumFailed = 0, sumReused = 0;
|
||||
for (const r of summaryRows) {
|
||||
sumTotal += r.cnt;
|
||||
if (r.status === 'success') sumSuccess = r.cnt;
|
||||
else if (r.status === 'failed') sumFailed = r.cnt;
|
||||
else if (r.status === 'reused') sumReused = r.cnt;
|
||||
}
|
||||
const summary = { total: sumTotal, success: sumSuccess, failed: sumFailed, reused: sumReused };
|
||||
|
||||
return { total, records, summary };
|
||||
}
|
||||
|
||||
export function cleanupOldSaveRecords(): void {
|
||||
const db = getDb();
|
||||
const cutoff = formatLocalDateTime(new Date(Date.now() - 60 * 24 * 60 * 60 * 1000));
|
||||
const deleted = db.prepare('DELETE FROM save_records WHERE created_at < ?').run(cutoff);
|
||||
console.log(`[Cleanup] Deleted ${deleted.changes} save records older than 60 days (before ${cutoff})`);
|
||||
}
|
||||
|
||||
// ── Storage Refresh ───────────────────────────────────────────────
|
||||
|
||||
export async function refreshAllStorageInfo(): Promise<void> {
|
||||
const configs = getActiveCloudConfigs().filter(c => c.cloud_type === 'quark' && c.cookie);
|
||||
if (configs.length === 0) return;
|
||||
|
||||
for (const cfg of configs) {
|
||||
try {
|
||||
const { QuarkDriver } = require('./drivers/quark.driver');
|
||||
const driver = new QuarkDriver({ cookie: cfg.cookie, nickname: cfg.nickname });
|
||||
const storage = await driver.getStorageInfo();
|
||||
if (storage.totalBytes > 0 || storage.usedBytes > 0) {
|
||||
const db = getDb();
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET storage_used = ?, storage_total = ? WHERE id = ?`
|
||||
).run(storage.used, storage.total, cfg.id);
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error(`[Storage] Failed to refresh quark#${cfg.id}:`, err.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
472
packages/backend/src/cloud/credential.service.ts
Normal file
472
packages/backend/src/cloud/credential.service.ts
Normal file
@@ -0,0 +1,472 @@
|
||||
import { getDb } from '../database/database';
|
||||
import { localTimestamp, formatLocalDate, formatLocalDateTime } from '../utils/time';
|
||||
import { encrypt, decrypt, isEncrypted } from '../utils/crypto';
|
||||
|
||||
// ── Background Used-Space Calculation ──────────────────────────
|
||||
|
||||
/**
|
||||
* Fire-and-forget: recursively calculate used space for a quark drive
|
||||
* and update the database when done.
|
||||
*/
|
||||
async function calculateUsedSpaceAsync(cookie: string, configId: number): Promise<void> {
|
||||
const { calculateUsedSpace } = require('./drivers/quark-cleanup');
|
||||
const usedBytes = await calculateUsedSpace(cookie);
|
||||
if (usedBytes > 0) {
|
||||
const usedFormatted = usedBytes >= 1024 ** 4
|
||||
? (usedBytes / 1024 ** 4).toFixed(1) + ' TB'
|
||||
: usedBytes >= 1024 ** 3
|
||||
? (usedBytes / 1024 ** 3).toFixed(1) + ' GB'
|
||||
: (usedBytes / 1024 ** 2).toFixed(1) + ' MB';
|
||||
const db = getDb();
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET storage_used = ?, updated_at = ? WHERE id = ?`
|
||||
).run(usedFormatted, localTimestamp(), configId);
|
||||
console.log(`[UsedSpace] Updated config #${configId}: used=${usedFormatted}`);
|
||||
}
|
||||
}
|
||||
|
||||
export interface CloudConfig {
|
||||
id: number;
|
||||
cloud_type: string;
|
||||
cookie?: string;
|
||||
nickname?: string;
|
||||
is_active: number;
|
||||
promotion_account?: string;
|
||||
is_transfer_enabled: number;
|
||||
storage_used?: string;
|
||||
storage_total?: string;
|
||||
checkin_status: string; // 'none'|'success'|'failed'|'pending'|'skipped'
|
||||
last_checkin_at?: string;
|
||||
checkin_message?: string;
|
||||
consecutive_failures: number;
|
||||
last_used_at?: string;
|
||||
total_saves: number;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
verification_status?: string;
|
||||
cloud_type_uid?: string;
|
||||
}
|
||||
|
||||
// ── Cookie Encryption Helper ──────────────────────────────────────
|
||||
/** Decrypt cookie. Handles legacy plaintext data transparently. */
|
||||
function decryptCookie(encrypted: string | null | undefined): string {
|
||||
if (!encrypted) return '';
|
||||
// If already plaintext (legacy data), return as-is
|
||||
if (!isEncrypted(encrypted)) return encrypted;
|
||||
return decrypt(encrypted);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract Quark __uid from cookie string.
|
||||
* Used for dedup: same cloud_type + same __uid = same account.
|
||||
*/
|
||||
function extractQuarkUid(cookie: string): string | null {
|
||||
const match = cookie.match(/(?:^|;\s*)__uid=([^;]+)/);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
// ── Config CRUD ──────────────────────────────────────────────────
|
||||
|
||||
export function getCloudConfigs(): CloudConfig[] {
|
||||
const db = getDb();
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, cookie, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total,
|
||||
cloud_type_uid,
|
||||
checkin_status, last_checkin_at, checkin_message, consecutive_failures,
|
||||
last_used_at, total_saves, created_at, updated_at, verification_status
|
||||
FROM cloud_configs ORDER BY id ASC`
|
||||
).all() as CloudConfig[];
|
||||
}
|
||||
|
||||
export function getAvailableClouds(): CloudConfig[] {
|
||||
const db = getDb();
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total,
|
||||
cloud_type_uid,
|
||||
checkin_status, last_checkin_at, checkin_message, consecutive_failures,
|
||||
last_used_at, total_saves, created_at, updated_at
|
||||
FROM cloud_configs WHERE is_active = 1 ORDER BY id ASC`
|
||||
).all() as CloudConfig[];
|
||||
}
|
||||
|
||||
/** Returns the first active config matching the given cloud type. */
|
||||
export function getCloudConfigByType(cloudType: string): CloudConfig | undefined {
|
||||
const db = getDb();
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, cookie, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total,
|
||||
cloud_type_uid,
|
||||
checkin_status, last_checkin_at, checkin_message, consecutive_failures,
|
||||
last_used_at, total_saves, created_at, updated_at, verification_status
|
||||
FROM cloud_configs WHERE cloud_type = ? AND is_active = 1
|
||||
ORDER BY id ASC LIMIT 1`
|
||||
).get(cloudType) as CloudConfig | undefined;
|
||||
}
|
||||
|
||||
export function getCloudConfigById(id: number): CloudConfig | undefined {
|
||||
const db = getDb();
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, cookie, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total,
|
||||
cloud_type_uid,
|
||||
checkin_status, last_checkin_at, checkin_message, consecutive_failures,
|
||||
last_used_at, total_saves, created_at, updated_at, verification_status
|
||||
FROM cloud_configs WHERE id = ?`
|
||||
).get(id) as CloudConfig | undefined;
|
||||
}
|
||||
|
||||
/** Returns all active cloud configs (used by save flow for cloud type switching). */
|
||||
export function getActiveCloudConfigs(): CloudConfig[] {
|
||||
const db = getDb();
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, cookie, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total,
|
||||
cloud_type_uid,
|
||||
checkin_status, last_checkin_at, checkin_message, consecutive_failures,
|
||||
last_used_at, total_saves, created_at, updated_at
|
||||
FROM cloud_configs WHERE is_active = 1
|
||||
ORDER BY cloud_type ASC, id ASC`
|
||||
).all() as CloudConfig[];
|
||||
}
|
||||
|
||||
export function saveCloudConfig(data: {
|
||||
id?: number;
|
||||
cloud_type: string;
|
||||
cookie?: string;
|
||||
nickname?: string;
|
||||
is_active?: number;
|
||||
promotion_account?: string;
|
||||
is_transfer_enabled?: number;
|
||||
storage_used?: string;
|
||||
storage_total?: string;
|
||||
}): CloudConfig {
|
||||
const db = getDb();
|
||||
// Encrypt cookie before storing
|
||||
const encryptedCookie = data.cookie ? encrypt(data.cookie) : null;
|
||||
|
||||
// Extract cloud_type_uid from cookie (Quark __uid)
|
||||
let cloudTypeUid: string | null = null;
|
||||
if (data.cookie) {
|
||||
cloudTypeUid = extractQuarkUid(data.cookie);
|
||||
}
|
||||
|
||||
if (data.id) {
|
||||
// Update by ID — always succeeds
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET
|
||||
cloud_type = COALESCE(?, cloud_type),
|
||||
cookie = COALESCE(?, cookie),
|
||||
nickname = COALESCE(?, nickname),
|
||||
is_active = COALESCE(?, is_active),
|
||||
promotion_account = COALESCE(?, promotion_account),
|
||||
is_transfer_enabled = COALESCE(?, is_transfer_enabled),
|
||||
storage_used = COALESCE(?, storage_used),
|
||||
storage_total = COALESCE(?, storage_total),
|
||||
cloud_type_uid = COALESCE(?, cloud_type_uid),
|
||||
consecutive_failures = 0,
|
||||
updated_at = ?
|
||||
WHERE id = ?`
|
||||
).run(data.cloud_type, encryptedCookie || null, data.nickname || null, data.is_active ?? 1, data.promotion_account ?? '', data.is_transfer_enabled ?? 1, data.storage_used || null, data.storage_total || null, cloudTypeUid || null, localTimestamp(), data.id);
|
||||
} else {
|
||||
// Try to find existing config by cloud_type + cloud_type_uid
|
||||
let existing: any = null;
|
||||
if (cloudTypeUid) {
|
||||
existing = db.prepare(
|
||||
`SELECT id FROM cloud_configs WHERE cloud_type = ? AND cloud_type_uid = ? LIMIT 1`
|
||||
).get(data.cloud_type, cloudTypeUid);
|
||||
}
|
||||
|
||||
// Fallback: match by cloud_type alone (legacy records without cloud_type_uid)
|
||||
if (!existing) {
|
||||
existing = db.prepare(
|
||||
'SELECT id FROM cloud_configs WHERE cloud_type = ? AND is_active = 1 LIMIT 1'
|
||||
).get(data.cloud_type) as any;
|
||||
}
|
||||
|
||||
if (existing) {
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET
|
||||
cookie = COALESCE(?, cookie),
|
||||
nickname = COALESCE(?, nickname),
|
||||
is_active = COALESCE(?, is_active),
|
||||
promotion_account = COALESCE(?, promotion_account),
|
||||
is_transfer_enabled = COALESCE(?, is_transfer_enabled),
|
||||
storage_used = COALESCE(?, storage_used),
|
||||
storage_total = COALESCE(?, storage_total),
|
||||
cloud_type_uid = COALESCE(?, cloud_type_uid),
|
||||
consecutive_failures = 0,
|
||||
updated_at = ?
|
||||
WHERE id = ?`
|
||||
).run(encryptedCookie || null, data.nickname || null, data.is_active ?? 1, data.promotion_account ?? '', data.is_transfer_enabled ?? 1, data.storage_used || null, data.storage_total || null, cloudTypeUid || null, localTimestamp(), existing.id);
|
||||
|
||||
// Re-read savedId for return
|
||||
const savedId = existing.id;
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, cookie, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total,
|
||||
cloud_type_uid,
|
||||
checkin_status, last_checkin_at, checkin_message, consecutive_failures,
|
||||
last_used_at, total_saves, created_at, updated_at
|
||||
FROM cloud_configs WHERE id = ?`
|
||||
).get(savedId) as CloudConfig;
|
||||
}
|
||||
|
||||
// No existing config found — insert new
|
||||
db.prepare(
|
||||
'INSERT INTO cloud_configs (cloud_type, cookie, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total, cloud_type_uid, consecutive_failures) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 0)'
|
||||
).run(data.cloud_type, encryptedCookie || null, data.nickname || null, data.is_active ?? 1, data.promotion_account ?? '', data.is_transfer_enabled ?? 1, data.storage_used || null, data.storage_total || null, cloudTypeUid || null);
|
||||
}
|
||||
|
||||
const savedId = data.id || (db.prepare('SELECT last_insert_rowid() as id').get() as any).id;
|
||||
return db.prepare(
|
||||
`SELECT id, cloud_type, cookie, nickname, is_active, promotion_account, is_transfer_enabled, storage_used, storage_total,
|
||||
cloud_type_uid,
|
||||
checkin_status, last_checkin_at, checkin_message, consecutive_failures,
|
||||
last_used_at, total_saves, created_at, updated_at
|
||||
FROM cloud_configs WHERE id = ?`
|
||||
).get(savedId) as CloudConfig;
|
||||
}
|
||||
|
||||
export function deleteCloudConfig(id: number): boolean {
|
||||
const db = getDb();
|
||||
const result = db.prepare('DELETE FROM cloud_configs WHERE id = ?').run(id);
|
||||
return result.changes > 0;
|
||||
}
|
||||
|
||||
// ── Cookie Validation ────────────────────────────────────────────
|
||||
|
||||
async function fetchQuarkNickname(cookie: string): Promise<string | null> {
|
||||
const MAX_RETRIES = 2;
|
||||
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
||||
try {
|
||||
const response = await fetch('https://pan.quark.cn/account/info?fr=pc&platform=pc', {
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/3.14.2 Chrome/112.0.5615.165 Electron/24.1.3.8 Safari/537.36 Channel/pckk_other_ch',
|
||||
'Cookie': cookie,
|
||||
'Accept': 'application/json',
|
||||
'Referer': 'https://pan.quark.cn/',
|
||||
},
|
||||
signal: AbortSignal.timeout(15000),
|
||||
});
|
||||
if (!response.ok) return null;
|
||||
const data = await response.json() as any;
|
||||
if (data?.data?.nickname) return data.data.nickname;
|
||||
} catch {
|
||||
if (attempt < MAX_RETRIES) {
|
||||
await new Promise(r => setTimeout(r, 1500));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function testCloudConnection(id: number): Promise<{
|
||||
success: boolean;
|
||||
message: string;
|
||||
nickname?: string;
|
||||
storage_used?: string;
|
||||
storage_total?: string;
|
||||
}> {
|
||||
const config = getCloudConfigById(id);
|
||||
if (!config) {
|
||||
return { success: false, message: 'Cloud config not found' };
|
||||
}
|
||||
|
||||
if (!config.cookie) {
|
||||
return { success: false, message: 'Cookie not configured' };
|
||||
}
|
||||
|
||||
try {
|
||||
let valid = false;
|
||||
let nickname = '';
|
||||
let storageUsed = config.storage_used || '';
|
||||
let storageTotal = config.storage_total || '';
|
||||
|
||||
if (config.cloud_type === 'baidu') {
|
||||
const { BaiduDriver } = require('./drivers/baidu.driver');
|
||||
const driver = new BaiduDriver({ cookie: config.cookie, nickname: config.nickname });
|
||||
valid = await driver.validate();
|
||||
if (valid) {
|
||||
const info = await driver.getUserInfo();
|
||||
if (info) {
|
||||
nickname = config.nickname || info.nickname || '百度网盘';
|
||||
const fmt = (b: number) => b >= 1024**3 ? (b/1024**3).toFixed(2)+' GB' : (b/1024**2).toFixed(2)+' MB';
|
||||
storageUsed = fmt(info.usedBytes);
|
||||
storageTotal = fmt(info.totalBytes);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const decodedCookie = decrypt(config.cookie);
|
||||
const { QuarkDriver } = require('./drivers/quark.driver');
|
||||
const driver = new QuarkDriver({ cookie: decodedCookie, nickname: config.nickname });
|
||||
valid = await driver.validate();
|
||||
if (valid) {
|
||||
nickname = config.nickname || (await fetchQuarkNickname(decodedCookie)) || '夸克网盘';
|
||||
const storage = await driver.getStorageInfoQuick(config.storage_total);
|
||||
storageTotal = (storage.total !== '-' && storage.total !== '0 B') ? storage.total : (config.storage_total || '');
|
||||
storageUsed = (storage.used && storage.used !== '-' && storage.used !== '0 B') ? storage.used : (config.storage_used || '');
|
||||
}
|
||||
}
|
||||
|
||||
const db = getDb();
|
||||
if (!valid) {
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET verification_status = 'invalid', updated_at = ? WHERE id = ?`
|
||||
).run(localTimestamp(), id);
|
||||
return { success: false, message: '连接失败:Cookie 无效或已过期,或网络暂时异常' };
|
||||
}
|
||||
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET nickname = ?, storage_total = ?, storage_used = ?, is_active = 1, verification_status = 'valid', updated_at = ? WHERE id = ?`
|
||||
).run(nickname, storageTotal, storageUsed, localTimestamp(), id);
|
||||
|
||||
// Fire-and-forget: recalculate used space in background (slow for big drives)
|
||||
if (config.cloud_type === 'quark') {
|
||||
calculateUsedSpaceAsync(decrypt(config.cookie), id).catch(err => console.error(`[UsedSpace] Background calc failed for #${id}:`, err.message));
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: '连接成功',
|
||||
nickname,
|
||||
storage_used: storageUsed,
|
||||
storage_total: storageTotal,
|
||||
};
|
||||
} catch (err: any) {
|
||||
try {
|
||||
const db = getDb();
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET verification_status = 'invalid', updated_at = ? WHERE id = ?`
|
||||
).run(localTimestamp(), id);
|
||||
} catch {}
|
||||
return { success: false, message: `连接失败:${err.message || '未知错误'}` };
|
||||
}
|
||||
}
|
||||
|
||||
export async function testCloudConnectionWithCookie(cloudType: string, cookie: string): Promise<{
|
||||
success: boolean;
|
||||
message: string;
|
||||
nickname?: string;
|
||||
storage_used?: string;
|
||||
storage_total?: string;
|
||||
}> {
|
||||
try {
|
||||
const { QuarkDriver } = require('./drivers/quark.driver');
|
||||
const driver = new QuarkDriver({ cookie, nickname: '' });
|
||||
const valid = await driver.validate();
|
||||
if (!valid) {
|
||||
return { success: false, message: '连接失败:Cookie 无效或已过期' };
|
||||
}
|
||||
const nickname = (await fetchQuarkNickname(cookie)) || cloudType;
|
||||
// getStorageInfo may timeout from overseas servers, don't fail if it does
|
||||
let storage: { used: string; total: string } = { used: '-', total: '-' };
|
||||
try {
|
||||
const s = await driver.getStorageInfoQuick();
|
||||
if (s) {
|
||||
storage = { used: s.used || '-', total: s.total || '-' };
|
||||
}
|
||||
} catch {
|
||||
// storage info is optional
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
message: '连接成功',
|
||||
nickname,
|
||||
storage_used: storage.used,
|
||||
storage_total: storage.total,
|
||||
};
|
||||
} catch (err: any) {
|
||||
return { success: false, message: `连接失败:${err.message || '未知错误'}` };
|
||||
}
|
||||
}
|
||||
|
||||
// ── Unified Credential Validation ─────────────────────────────────
|
||||
|
||||
export interface CredentialValidationResult {
|
||||
valid: boolean;
|
||||
config?: CloudConfig;
|
||||
errorCode?: string;
|
||||
message: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get and validate a credential for the given cloud type.
|
||||
*
|
||||
* This is the unified entry point for all save/transfer operations.
|
||||
* It handles:
|
||||
* 1. Finding an active config with < 5 consecutive failures (round-robin)
|
||||
* 2. Validating cookie freshness via driver.validate()
|
||||
* 3. Returning structured result with error codes
|
||||
*
|
||||
* Reference: search-ucmao get_and_validate_credential() pattern.
|
||||
*/
|
||||
export async function getAndValidateCredential(cloudType: string): Promise<CredentialValidationResult> {
|
||||
const db = getDb();
|
||||
|
||||
const config = db.prepare(
|
||||
`SELECT * FROM cloud_configs
|
||||
WHERE cloud_type = ? AND is_active = 1
|
||||
AND consecutive_failures < 5
|
||||
ORDER BY last_used_at ASC NULLS FIRST
|
||||
LIMIT 1`
|
||||
).get(cloudType) as CloudConfig | undefined;
|
||||
|
||||
if (!config) {
|
||||
return {
|
||||
valid: false,
|
||||
errorCode: 'NO_AVAILABLE_DRIVE',
|
||||
message: `Cloud type "${cloudType}" is not configured or no available drives`,
|
||||
};
|
||||
}
|
||||
|
||||
if (!config.cookie) {
|
||||
return {
|
||||
valid: false,
|
||||
errorCode: 'COOKIE_MISSING',
|
||||
message: `Cookie not configured for ${cloudType} drive #${config.id}`,
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
// Decrypt cookie before validation
|
||||
const decryptedCookie = decryptCookie(config.cookie);
|
||||
if (!decryptedCookie) {
|
||||
return {
|
||||
valid: false,
|
||||
errorCode: 'COOKIE_MISSING',
|
||||
message: `Cookie not configured for ${cloudType} drive #${config.id}`,
|
||||
};
|
||||
}
|
||||
|
||||
let cookieValid = false;
|
||||
if (cloudType === 'baidu') {
|
||||
const { BaiduDriver } = require('./drivers/baidu.driver');
|
||||
const driver = new BaiduDriver({ cookie: decryptedCookie, nickname: config.nickname });
|
||||
cookieValid = await driver.validate();
|
||||
} else {
|
||||
const { QuarkDriver } = require('./drivers/quark.driver');
|
||||
const driver = new QuarkDriver({ cookie: decryptedCookie, nickname: config.nickname });
|
||||
cookieValid = await driver.validate();
|
||||
}
|
||||
|
||||
if (!cookieValid) {
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET verification_status = 'invalid', updated_at = ? WHERE id = ?`
|
||||
).run(localTimestamp(), config.id);
|
||||
return {
|
||||
valid: false,
|
||||
errorCode: 'COOKIE_EXPIRED',
|
||||
message: `Cookie expired or invalid for ${cloudType} drive #${config.id}`,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
config: { ...config, cookie: decryptedCookie },
|
||||
message: 'ok',
|
||||
};
|
||||
} catch (err: any) {
|
||||
return {
|
||||
valid: false,
|
||||
errorCode: 'VALIDATION_ERROR',
|
||||
message: `Credential validation failed: ${err.message}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
327
packages/backend/src/cloud/database.ts
Executable file
327
packages/backend/src/cloud/database.ts
Executable file
@@ -0,0 +1,327 @@
|
||||
import Database from 'better-sqlite3';
|
||||
import path from 'path';
|
||||
import bcrypt from 'bcryptjs';
|
||||
import config from '../config';
|
||||
import { formatLocalDateTime } from '../utils/time';
|
||||
|
||||
let db: Database.Database | null = null;
|
||||
|
||||
export function getDb(): Database.Database {
|
||||
if (db) return db;
|
||||
|
||||
const dbDir = path.dirname(config.dbPath);
|
||||
const fs = require('fs');
|
||||
if (!fs.existsSync(dbDir)) {
|
||||
fs.mkdirSync(dbDir, { recursive: true });
|
||||
}
|
||||
|
||||
db = new Database(config.dbPath);
|
||||
db.pragma('journal_mode = WAL');
|
||||
db.pragma('foreign_keys = ON');
|
||||
|
||||
runMigrations(db);
|
||||
seedAdmin(db);
|
||||
|
||||
return db;
|
||||
}
|
||||
|
||||
function runMigrations(db: Database.Database): void {
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS admins (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
username TEXT UNIQUE NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime')),
|
||||
last_login TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS cloud_configs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
cloud_type TEXT NOT NULL,
|
||||
cookie TEXT,
|
||||
nickname TEXT,
|
||||
is_active INTEGER NOT NULL DEFAULT 1,
|
||||
storage_used TEXT,
|
||||
storage_total TEXT,
|
||||
checkin_status TEXT NOT NULL DEFAULT 'none',
|
||||
last_checkin_at TEXT,
|
||||
checkin_message TEXT,
|
||||
consecutive_failures INTEGER DEFAULT 0,
|
||||
last_used_at TEXT,
|
||||
total_saves INTEGER DEFAULT 0,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS promotions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
description TEXT,
|
||||
image_url TEXT,
|
||||
link_url TEXT,
|
||||
position TEXT,
|
||||
sort_order INTEGER NOT NULL DEFAULT 0,
|
||||
active INTEGER NOT NULL DEFAULT 1,
|
||||
click_count INTEGER NOT NULL DEFAULT 0,
|
||||
start_time TEXT,
|
||||
end_time TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS save_records (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
source_type TEXT,
|
||||
source_title TEXT,
|
||||
source_url TEXT,
|
||||
target_cloud TEXT,
|
||||
share_url TEXT,
|
||||
share_pwd TEXT,
|
||||
file_size TEXT,
|
||||
file_count INTEGER DEFAULT 0,
|
||||
duration_ms INTEGER DEFAULT 0,
|
||||
status TEXT NOT NULL DEFAULT '',
|
||||
error_message TEXT,
|
||||
ip_address TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS search_stats (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
keyword TEXT,
|
||||
intent TEXT,
|
||||
result_count INTEGER DEFAULT 0,
|
||||
ip_address TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS hot_keywords (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
keyword TEXT UNIQUE NOT NULL,
|
||||
search_count INTEGER NOT NULL DEFAULT 1,
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS system_configs (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL DEFAULT '',
|
||||
description TEXT,
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS content_cache (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
keyword TEXT UNIQUE NOT NULL,
|
||||
title TEXT,
|
||||
description TEXT,
|
||||
tags TEXT,
|
||||
cover TEXT,
|
||||
source TEXT,
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
`);
|
||||
seedSystemConfigs(db);
|
||||
migrateSaveRecords(db);
|
||||
migrateContentCache(db);
|
||||
migrateCloudConfigs(db);
|
||||
cleanupOldSaveRecords(db);
|
||||
}
|
||||
|
||||
/** 迁移: 给已有 save_records 表补充新列 */
|
||||
function migrateSaveRecords(db: Database.Database): void {
|
||||
const newCols: { col: string; def: string }[] = [
|
||||
{ col: 'share_pwd', def: 'TEXT' },
|
||||
{ col: 'file_count', def: 'INTEGER DEFAULT 0' },
|
||||
{ col: 'folder_count', def: 'INTEGER DEFAULT 0' },
|
||||
{ col: 'duration_ms', def: 'INTEGER DEFAULT 0' },
|
||||
{ col: 'status', def: "TEXT NOT NULL DEFAULT ''" },
|
||||
{ col: 'error_message', def: 'TEXT' },
|
||||
{ col: 'folder_name', def: 'TEXT' },
|
||||
{ col: 'request_url', def: 'TEXT' },
|
||||
{ col: 'ip_location', def: 'TEXT' },
|
||||
{ col: 'original_folder_name', def: 'TEXT' },
|
||||
];
|
||||
for (const { col, def } of newCols) {
|
||||
try {
|
||||
db.exec(`ALTER TABLE save_records ADD COLUMN ${col} ${def}`);
|
||||
} catch {
|
||||
// Column already exists — ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** 迁移: 给 content_cache 表加 douban_url 列 */
|
||||
function migrateContentCache(db: Database.Database): void {
|
||||
const columns: { col: string; def: string }[] = [
|
||||
{ col: 'douban_url', def: 'TEXT' },
|
||||
{ col: 'rating', def: 'TEXT' },
|
||||
{ col: 'rating_count', def: 'TEXT' },
|
||||
{ col: 'year', def: 'TEXT' },
|
||||
{ col: 'genres', def: 'TEXT' },
|
||||
{ col: 'directors', def: 'TEXT' },
|
||||
{ col: 'actors', def: 'TEXT' },
|
||||
{ col: 'region', def: 'TEXT' },
|
||||
{ col: 'duration', def: 'TEXT' },
|
||||
];
|
||||
for (const { col, def } of columns) {
|
||||
try {
|
||||
db.exec(`ALTER TABLE content_cache ADD COLUMN ${col} ${def}`);
|
||||
} catch {
|
||||
// Column already exists — ignore
|
||||
}
|
||||
}
|
||||
// 修复旧记录:source 为 NULL 但实际有 TMDB 数据的,标记为 tmdb
|
||||
db.exec(`UPDATE content_cache SET source = 'tmdb' WHERE source IS NULL AND title IS NOT NULL AND title != ''`);
|
||||
}
|
||||
|
||||
/** 迁移: 给 cloud_configs 表去UNIQUE约束 + 加签到/轮训字段 */
|
||||
function migrateCloudConfigs(db: Database.Database): void {
|
||||
// 加新列
|
||||
const newCols: { col: string; def: string }[] = [
|
||||
{ col: 'checkin_status', def: "TEXT NOT NULL DEFAULT 'none'" },
|
||||
{ col: 'last_checkin_at', def: 'TEXT' },
|
||||
{ col: 'checkin_message', def: 'TEXT' },
|
||||
{ col: 'consecutive_failures', def: 'INTEGER DEFAULT 0' },
|
||||
{ col: 'last_used_at', def: 'TEXT' },
|
||||
{ col: 'total_saves', def: 'INTEGER DEFAULT 0' },
|
||||
];
|
||||
for (const { col, def } of newCols) {
|
||||
try { db.exec(`ALTER TABLE cloud_configs ADD COLUMN ${col} ${def}`); } catch {}
|
||||
}
|
||||
// 检查旧表是否有 UNIQUE 约束,有则重建表
|
||||
const row = db.prepare(`SELECT sql FROM sqlite_master WHERE type='table' AND name='cloud_configs'`).get() as any;
|
||||
if (row && row.sql && row.sql.includes('cloud_type TEXT UNIQUE')) {
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS cloud_configs_v2 (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
cloud_type TEXT NOT NULL,
|
||||
cookie TEXT,
|
||||
nickname TEXT,
|
||||
is_active INTEGER NOT NULL DEFAULT 1,
|
||||
storage_used TEXT,
|
||||
storage_total TEXT,
|
||||
checkin_status TEXT NOT NULL DEFAULT 'none',
|
||||
last_checkin_at TEXT,
|
||||
checkin_message TEXT,
|
||||
consecutive_failures INTEGER DEFAULT 0,
|
||||
last_used_at TEXT,
|
||||
total_saves INTEGER DEFAULT 0,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
INSERT INTO cloud_configs_v2 (id, cloud_type, cookie, nickname, is_active, storage_used, storage_total, checkin_status, last_checkin_at, checkin_message, consecutive_failures, last_used_at, total_saves, created_at, updated_at)
|
||||
SELECT id, cloud_type, cookie, nickname, is_active, storage_used, storage_total, COALESCE(checkin_status,'none'), last_checkin_at, checkin_message, COALESCE(consecutive_failures,0), last_used_at, COALESCE(total_saves,0), created_at, updated_at FROM cloud_configs;
|
||||
DROP TABLE cloud_configs;
|
||||
ALTER TABLE cloud_configs_v2 RENAME TO cloud_configs;
|
||||
`);
|
||||
console.log('[DB] cloud_configs migration: UNIQUE constraint removed, new fields added');
|
||||
}
|
||||
|
||||
// Migration 2: Add verification_status column
|
||||
const row2 = db.prepare("SELECT sql FROM sqlite_master WHERE name='cloud_configs' AND sql LIKE '%verification_status%'").get();
|
||||
if (!row2) {
|
||||
db.exec("ALTER TABLE cloud_configs ADD COLUMN verification_status TEXT DEFAULT NULL");
|
||||
console.log('[DB] cloud_configs migration: verification_status column added');
|
||||
}
|
||||
|
||||
// Migration 3: Add cloud_type_uid column (for Quark __uid dedup)
|
||||
const row3 = db.prepare("SELECT sql FROM sqlite_master WHERE name='cloud_configs' AND sql LIKE '%cloud_type_uid%'").get();
|
||||
if (!row3) {
|
||||
db.exec("ALTER TABLE cloud_configs ADD COLUMN cloud_type_uid TEXT DEFAULT NULL");
|
||||
console.log('[DB] cloud_configs migration: cloud_type_uid column added');
|
||||
}
|
||||
|
||||
// Migration 4: Add promotion_account column
|
||||
const row4 = db.prepare("SELECT sql FROM sqlite_master WHERE name='cloud_configs' AND sql LIKE '%promotion_account%'").get();
|
||||
if (!row4) {
|
||||
db.exec("ALTER TABLE cloud_configs ADD COLUMN promotion_account TEXT DEFAULT ''");
|
||||
console.log('[DB] cloud_configs migration: promotion_account column added');
|
||||
}
|
||||
|
||||
// Migration 5: Add is_transfer_enabled column
|
||||
const row5 = db.prepare("SELECT sql FROM sqlite_master WHERE name='cloud_configs' AND sql LIKE '%is_transfer_enabled%'").get();
|
||||
if (!row5) {
|
||||
db.exec("ALTER TABLE cloud_configs ADD COLUMN is_transfer_enabled INTEGER DEFAULT 1");
|
||||
console.log('[DB] cloud_configs migration: is_transfer_enabled column added');
|
||||
}
|
||||
}
|
||||
|
||||
function seedAdmin(db: Database.Database): void {
|
||||
const existing = db.prepare('SELECT id FROM admins WHERE username = ?').get(config.adminUsername);
|
||||
if (existing) return;
|
||||
|
||||
const salt = bcrypt.genSaltSync(10);
|
||||
const hash = bcrypt.hashSync(config.adminPassword, salt);
|
||||
|
||||
db.prepare(
|
||||
'INSERT INTO admins (username, password_hash) VALUES (?, ?)'
|
||||
).run(config.adminUsername, hash);
|
||||
|
||||
console.log(`[DB] Admin user "${config.adminUsername}" created`);
|
||||
}
|
||||
|
||||
function seedSystemConfigs(db: Database.Database): void {
|
||||
const defaults: { key: string; value: string; description: string }[] = [
|
||||
{ key: 'pansou_url', value: config.pansouUrl, description: 'PanSou 搜索引擎服务地址' },
|
||||
{ key: 'video_parser_url', value: config.videoParserUrl, description: '视频解析服务地址' },
|
||||
{ key: 'validation_concurrency', value: String(config.validation.concurrency), description: '链接验证并发数' },
|
||||
{ key: 'validation_timeout', value: String(config.validation.timeout), description: '链接验证超时(ms)' },
|
||||
{ key: 'validation_cache_ttl_valid', value: String(config.validation.cacheTtlValid), description: '有效链接缓存时间(s)' },
|
||||
{ key: 'validation_cache_ttl_invalid', value: String(config.validation.cacheTtlInvalid), description: '无效链接缓存时间(s)' },
|
||||
{ key: 'search_proxy_enabled', value: 'false', description: '搜索代理开关(true/false)' },
|
||||
{ key: 'search_proxy_url', value: '', description: '搜索代理地址 (如 http://127.0.0.1:7890)' },
|
||||
{ key: 'search_strategy', value: 'wait_all', description: '搜索结果展示方式: wait_all=等待全部后展示, stream_channel=频道逐步展示' },
|
||||
{ key: 'link_validation_enabled', value: 'true', description: '资源链接有效性检测开关(true/false)' },
|
||||
{ key: 'cloud_enabled_quark', value: 'true', description: '夸克网盘' },
|
||||
{ key: 'cloud_enabled_baidu', value: 'true', description: '百度网盘' },
|
||||
{ key: 'cloud_enabled_aliyun', value: 'true', description: '阿里云盘' },
|
||||
{ key: 'cloud_enabled_115', value: 'true', description: '115 网盘' },
|
||||
{ key: 'cloud_enabled_tianyi', value: 'true', description: '天翼云盘' },
|
||||
{ key: 'cloud_enabled_123pan', value: 'true', description: '123 云盘' },
|
||||
{ key: 'cloud_enabled_uc', value: 'true', description: 'UC 网盘' },
|
||||
{ key: 'cloud_enabled_xunlei', value: 'true', description: '迅雷网盘' },
|
||||
{ key: 'cloud_enabled_pikpak', value: 'true', description: 'PikPak 网盘' },
|
||||
{ key: 'cloud_enabled_magnet', value: 'true', description: '磁力链接' },
|
||||
{ key: 'cloud_enabled_ed2k', value: 'true', description: '电驴链接' },
|
||||
{ key: 'cloud_enabled_others', value: 'false', description: '其他类型(默认关闭)' },
|
||||
{ key: 'search_result_limit', value: '10', description: '每类网盘最多展示的有效结果数' },
|
||||
{ key: 'search_fallback_image', value: '', description: '无图资源的兜底封面图 URL(留空使用渐变色)' },
|
||||
{ key: 'site_logo', value: '', description: '网站 LOGO 图片 URL(留空使用默认图标/文字)' },
|
||||
{ key: 'site_name', value: 'CloudSearch', description: '网站名称(显示在首页标题/页脚)' },
|
||||
{ key: 'site_disclaimer', value: '本站为非盈利性个人站点,所有资源仅供学习、研究使用,版权归原作者所有。请于下载后24小时内删除,切勿用于商业或非法用途。若侵犯了您的权益,请联系我们(邮箱:3337598077@qq.com),我们将及时处理。', description: '网站底部免责声明' },
|
||||
{ key: 'site_marquee', value: '📢 欢迎使用CloudSearch,所有资源仅供学习交流,请于下载后24小时内删除', description: '搜索栏下方滚动通知文字(从右往左滚动显示)' },
|
||||
{ key: 'tmdb_api_token', value: '', description: 'TMDB API 读取令牌(用于增强豆瓣内容信息)' },
|
||||
{ key: 'ip_geo_api_url', value: 'https://cn.apihz.cn/api/ip/chaapi.php?id=10014356&key=ca7ccb3b9ca044dd993c8604bc9afd93&ip={ip}&td=0', description: 'IP 归属地查询接口({ip} 会被替换为实际IP)' },
|
||||
{ key: 'ip_geo_api_key', value: '', description: 'IP 归属地备用 API Key(留空使用默认)' },
|
||||
{ key: 'title_filter_rules', value: '', description: '搜索结果标题过滤规则(一行一条:纯文本直接移除 / 正则用/包围/)' },
|
||||
{ key: 'timezone', value: 'Asia/Shanghai', description: '系统时区(如 Asia/Shanghai、America/New_York、UTC)' },
|
||||
{ key: 'redis_url', value: 'redis://redis:6379', description: 'Redis 连接地址(用于缓存优化)' },
|
||||
{ key: 'pansou_auth_token', value: '', description: 'PanSou API 认证令牌(用于私有搜索服务)' },
|
||||
{ key: 'pansou_web_enabled', value: 'false', description: '启用 PanSou Web 端访问(在 /pansou 路径提供 PanSou 搜索引擎管理界面)' },
|
||||
{ key: 'cleanup_enabled', value: 'true', description: '启用自动清理(每天检查一次,移入回收站+清空日志+清空回收站)' },
|
||||
{ key: 'cleanup_file_retention_days', value: '7', description: '云盘文件保留天数(超过此天数的日期文件夹将被移入回收站)' },
|
||||
{ key: 'cleanup_log_retention_days', value: '30', description: '转存日志保留天数' },
|
||||
{ key: 'cleanup_empty_trash', value: 'true', description: '清理时是否清空回收站(永久删除释放空间)' },
|
||||
{ key: 'cleanup_space_threshold_enabled', value: 'false', description: '启用空间阈值自动清理(已用空间超过XX%时按比例删除最旧的转存文件)' },
|
||||
{ key: 'cleanup_space_threshold_percent', value: '90', description: '空间使用阈值百分比(超过此值时触发强制清理)' },
|
||||
{ key: 'cleanup_space_threshold_delete_percent', value: '10', description: '触发阈值清理时释放总空间的百分比(如 10 表示累计删除最旧文件直到达到总空间的 10%,6TB 总空间 → 释放 ~600GB)' },
|
||||
{ key: 'save_reuse_enabled', value: 'true', description: '启用分享链接复用(相同原始链接不再重复转存,直接复用之前的分享链接)' },
|
||||
{ key: 'cleanup_last_run', value: '', description: '上次自动清理时间' },
|
||||
{ key: 'cleanup_last_stats', value: '', description: '上次清理结果统计(JSON)' },
|
||||
];
|
||||
const insert = db.prepare(
|
||||
'INSERT OR IGNORE INTO system_configs (key, value, description) VALUES (?, ?, ?)'
|
||||
);
|
||||
for (const entry of defaults) {
|
||||
insert.run(entry.key, entry.value, entry.description);
|
||||
}
|
||||
}
|
||||
|
||||
/** 清理 60 天前的转存记录 */
|
||||
function cleanupOldSaveRecords(db: Database.Database): void {
|
||||
const cutoff = formatLocalDateTime(new Date(Date.now() - 60 * 24 * 60 * 60 * 1000));
|
||||
const deleted = db.prepare('DELETE FROM save_records WHERE created_at < ?').run(cutoff);
|
||||
console.log(`[DB] Cleaned up ${deleted.changes} save records older than 60 days (before ${cutoff})`);
|
||||
}
|
||||
|
||||
export default getDb;
|
||||
623
packages/backend/src/cloud/drivers/CloudConfig.vue
Executable file
623
packages/backend/src/cloud/drivers/CloudConfig.vue
Executable file
@@ -0,0 +1,623 @@
|
||||
<template>
|
||||
<div class="cloud-config">
|
||||
<!-- 网盘类型开关 -->
|
||||
<el-card class="toggle-card" style="margin-bottom: 20px;">
|
||||
<template #header><span>📂 网盘设置及授权</span></template>
|
||||
<div class="cloud-toggle-grid">
|
||||
<div
|
||||
v-for="ct in cloudTypes"
|
||||
:key="ct.type"
|
||||
class="cloud-toggle-chip"
|
||||
>
|
||||
<img :src="ct.icon" class="cloud-icon-img" />
|
||||
<span class="cloud-label">{{ ct.label }}</span>
|
||||
<el-tag v-if="ct.type === 'others'" size="small" type="info">关</el-tag>
|
||||
<el-switch
|
||||
:model-value="ct.enabled"
|
||||
size="small"
|
||||
@change="(val: boolean) => handleCloudToggle(ct.type, val)"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-tip" style="margin-top: 12px;">
|
||||
关闭的网盘类型在搜索结果中不会展示。修改后立即生效,无需点击保存。
|
||||
</div>
|
||||
</el-card>
|
||||
|
||||
<div class="toolbar">
|
||||
<el-button type="primary" @click="openDialog(null)">新增配置</el-button>
|
||||
<el-button @click="verifyAll">全部重新验证</el-button>
|
||||
</div>
|
||||
|
||||
<el-table :data="configs" stripe style="width: 100%">
|
||||
<el-table-column label="网盘类型" width="110">
|
||||
<template #default="{ row }">
|
||||
<CloudBadge :cloud_type="row.cloud_type" />
|
||||
</template>
|
||||
</el-table-column>
|
||||
<el-table-column prop="nickname" label="昵称" width="140">
|
||||
<template #default="{ row }">
|
||||
<span v-if="row.nickname" class="nickname-text">{{ row.nickname }}</span>
|
||||
<el-text v-else type="info" size="small">未设置</el-text>
|
||||
</template>
|
||||
</el-table-column>
|
||||
<el-table-column prop="cloud_type_uid" label="标识(__uid)" width="180">
|
||||
<template #default="{ row }">
|
||||
<span v-if="row.cloud_type_uid" class="uid-cell">{{ row.cloud_type_uid }}</span>
|
||||
<el-text v-else type="info" size="small">-</el-text>
|
||||
</template>
|
||||
</el-table-column>
|
||||
<el-table-column label="验证" width="100" align="center">
|
||||
<template #default="{ row }">
|
||||
<span v-if="row._verifying" class="verifying">
|
||||
<el-icon class="is-loading"><Loading /></el-icon>
|
||||
</span>
|
||||
<el-tag v-else-if="row.verification_status === 'valid'" type="success" size="small">有效</el-tag>
|
||||
<el-tag v-else-if="row.verification_status === 'invalid'" type="danger" size="small">无效</el-tag>
|
||||
<el-tag v-else type="info" size="small">未验证</el-tag>
|
||||
</template>
|
||||
</el-table-column>
|
||||
<el-table-column label="空间" width="200">
|
||||
<template #default="{ row }">
|
||||
<div v-if="row.storage_total" class="storage-cell">
|
||||
<div class="storage-bar-wrap">
|
||||
<div
|
||||
class="storage-bar-fill"
|
||||
:style="{ width: storagePercent(row) + '%' }"
|
||||
:class="storageBarClass(row)"
|
||||
></div>
|
||||
</div>
|
||||
<div class="storage-text">
|
||||
<span class="storage-used">{{ row.storage_used || '?' }}</span>
|
||||
<span class="storage-sep">/</span>
|
||||
<span class="storage-total">{{ row.storage_total }}</span>
|
||||
<span class="storage-free">(可用 {{ storageFree(row) }})</span>
|
||||
</div>
|
||||
</div>
|
||||
<el-text v-else type="info" size="small">—</el-text>
|
||||
</template>
|
||||
</el-table-column>
|
||||
<!-- 转存统计 -->
|
||||
<el-table-column label="转存" width="80" align="center">
|
||||
<template #default="{ row }">
|
||||
<span v-if="row.total_saves > 0" class="save-count">{{ row.total_saves }}次</span>
|
||||
<el-text v-else type="info" size="small">-</el-text>
|
||||
</template>
|
||||
</el-table-column>
|
||||
<el-table-column label="操作" width="390" align="center">
|
||||
<template #default="{ row }">
|
||||
<el-button text type="primary" @click="openDialog(row)">编辑</el-button>
|
||||
<el-button text type="primary" @click="verifyOne(row)">验证</el-button>
|
||||
<el-popconfirm title="确定删除该配置?" @confirm="handleDelete(row)">
|
||||
<template #reference>
|
||||
<el-button text type="danger">删除</el-button>
|
||||
</template>
|
||||
</el-popconfirm>
|
||||
</template>
|
||||
</el-table-column>
|
||||
</el-table>
|
||||
|
||||
<!-- 新增/编辑弹窗 -->
|
||||
<el-dialog v-model="dialogVisible" :title="editingId ? '编辑配置' : '新增配置'" width="560px">
|
||||
<el-form ref="formRef" :model="form" :rules="rules" label-width="100px">
|
||||
<el-form-item label="网盘类型" prop="cloud_type">
|
||||
<el-select v-model="form.cloud_type" style="width: 100%" :disabled="!!editingId" @change="onCloudTypeChange">
|
||||
<el-option
|
||||
v-for="[key, label] in cloudTypeOptions"
|
||||
:key="key"
|
||||
:label="label"
|
||||
:value="key"
|
||||
/>
|
||||
</el-select>
|
||||
</el-form-item>
|
||||
<el-form-item label="昵称" prop="nickname">
|
||||
<el-input v-model="form.nickname" placeholder="必填,用于区分多个同类型网盘">
|
||||
<template #append>
|
||||
<el-button :loading="form._verifying" @click="verifyAndFillNickname">自动获取</el-button>
|
||||
</template>
|
||||
</el-input>
|
||||
</el-form-item>
|
||||
<el-form-item label="Cookie" prop="cookie">
|
||||
<el-input
|
||||
v-model="form.cookie"
|
||||
type="textarea"
|
||||
:autosize="{ minRows: 2, maxRows: 4 }"
|
||||
:placeholder="cookiePlaceholder"
|
||||
input-style="font-family: monospace; font-size: 12px;"
|
||||
/>
|
||||
</el-form-item>
|
||||
<!-- Cookie 获取教程(根据网盘类型切换) -->
|
||||
<el-form-item label=" " v-if="form.cloud_type && form.cloud_type !== ''" class="cookie-tips-item">
|
||||
<div class="cookie-tips" :class="`cookie-tips-${form.cloud_type}`">
|
||||
<div class="cookie-tips-header">
|
||||
<span class="cookie-tips-title">📖 {{ cloudTypeLabel }} Cookie 获取教程</span>
|
||||
</div>
|
||||
<ol class="cookie-tips-steps" v-html="cookieTutorialHtml"></ol>
|
||||
</div>
|
||||
</el-form-item>
|
||||
</el-form>
|
||||
<template #footer>
|
||||
<el-button @click="dialogVisible = false">取消</el-button>
|
||||
<el-button type="primary" :loading="saving" @click="handleSave">保存</el-button>
|
||||
</template>
|
||||
</el-dialog>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import { ref, reactive, computed, onMounted, nextTick, onUnmounted } from 'vue'
|
||||
import { Loading } from '@element-plus/icons-vue'
|
||||
import { CLOUD_LABELS } from '../../types'
|
||||
import type { CloudType, CloudConfig } from '../../types'
|
||||
import { ElMessage } from 'element-plus'
|
||||
import { getCloudConfigs, saveCloudConfig, updateCloudConfig, deleteCloudConfig, testCloudConnection, getCloudTypes, toggleCloudType } from '../../api'
|
||||
import CloudBadge from '../../components/CloudBadge.vue'
|
||||
import type { ElForm } from 'element-plus'
|
||||
|
||||
interface CloudTypeInfo { type: string; label: string; icon: string; enabled: boolean }
|
||||
const cloudTypes = ref<CloudTypeInfo[]>([])
|
||||
|
||||
const formRef = ref<InstanceType<typeof ElForm>>()
|
||||
const configs = ref<(CloudConfig & { _verifying?: boolean })[]>([])
|
||||
const dialogVisible = ref(false)
|
||||
const saving = ref(false)
|
||||
const editingId = ref<number | null>(null)
|
||||
|
||||
const defaultForm = () => ({
|
||||
cloud_type: '' as CloudType | '',
|
||||
nickname: '',
|
||||
cookie: '',
|
||||
_verifying: false,
|
||||
_storageUsed: '',
|
||||
_storageTotal: '',
|
||||
})
|
||||
|
||||
const form = reactive<{
|
||||
cloud_type: CloudType | ''
|
||||
nickname: string
|
||||
cookie: string
|
||||
_verifying: boolean
|
||||
_storageUsed: string
|
||||
_storageTotal: string
|
||||
}>(defaultForm())
|
||||
|
||||
const rules = computed(() => ({
|
||||
cloud_type: [{ required: true, message: '请选择网盘类型', trigger: 'change' }],
|
||||
nickname: [{ required: true, message: '请填写昵称(区分多个同类型网盘)', trigger: 'blur' }],
|
||||
}))
|
||||
|
||||
const cloudTypeOptions = computed(() => {
|
||||
return Object.entries(CLOUD_LABELS) as [CloudType, string][]
|
||||
})
|
||||
|
||||
const cookiePlaceholder = computed(() => {
|
||||
if (!form.cloud_type) return '请先选择网盘类型'
|
||||
const t = form.cloud_type
|
||||
if (t === 'quark' || t === 'baidu') return `请输入 ${CLOUD_LABELS[t] || t} 的完整 Cookie`
|
||||
return editingId.value ? '留空则保持原有' : '输入完整 Cookie'
|
||||
})
|
||||
|
||||
const cloudTypeLabel = computed(() => {
|
||||
return CLOUD_LABELS[form.cloud_type as CloudType] || form.cloud_type || ''
|
||||
})
|
||||
|
||||
/** Cookie 获取教程 HTML(根据不同网盘类型) */
|
||||
const cookieTutorialHtml = computed(() => {
|
||||
const t = form.cloud_type
|
||||
if (!t) return ''
|
||||
const tutorials: Record<string, string> = {
|
||||
quark: `<li>在电脑上打开 <a href="https://pan.quark.cn" target="_blank">pan.quark.cn</a> 并登录你的夸克账号</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → 切换到 <strong>网络 (Network)</strong> 选项卡</li>
|
||||
<li>刷新页面,在请求列表中点击任意一个请求(如 <code>account/info</code>)</li>
|
||||
<li>在右侧 <strong>请求头 (Request Headers)</strong> 中找到 <code>Cookie</code> 字段</li>
|
||||
<li>复制整个 Cookie 值(<b>从开头到结束的完整内容</b>),粘贴到上方输入框</li>
|
||||
<li>点击「<b>自动获取</b>」按钮验证 Cookie 是否有效</li>
|
||||
.cookie-tips-note">⚠️ 必须包含 <code>__st=s%...</code> 字段!请复制浏览器请求头的 <b>整个 Cookie</b>(F12 → Network → 请求头 → Cookie 项),不要只复制部分。</div>`,
|
||||
|
||||
baidu: `<li>在电脑上打开 <a href="https://pan.baidu.com" target="_blank">pan.baidu.com</a> 并登录你的百度账号</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → 切换到 <strong>网络 (Network)</strong> 选项卡</li>
|
||||
<li>刷新页面,在请求列表中点击任意一个请求</li>
|
||||
<li>在右侧 <strong>请求头 (Request Headers)</strong> 中找到 <code>Cookie</code> 字段</li>
|
||||
<li>复制整个 Cookie 值,粘贴到上方输入框</li>
|
||||
<li>点击「<b>自动获取</b>」按钮验证 Cookie 是否有效</li>
|
||||
<div class="cookie-tips-note">💡 需要包含 <code>BDUSS</code> 和 <code>STOKEN</code></div>`,
|
||||
|
||||
aliyun: `<li>在电脑上打开 <a href="https://www.aliyundrive.com" target="_blank">aliyundrive.com</a> 并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,找到任意请求 → 复制 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>
|
||||
<div class="cookie-tips-note">💡 需包含 <code>token</code> 等有效字段</div>`,
|
||||
|
||||
'115': `<li>在电脑上打开 <a href="https://115.com" target="_blank">115.com</a> 并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,找到任意请求 → 复制 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>
|
||||
<div class="cookie-tips-note">💡 需包含 <code>UID</code>、<code>CID</code>、<code>SEID</code> 等字段</div>`,
|
||||
|
||||
tianyi: `<li>在电脑上打开 <a href="https://cloud.189.cn" target="_blank">cloud.189.cn</a> 并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,找到任意请求 → 复制 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>
|
||||
<div class="cookie-tips-note">💡 需包含 <code>COOKIE_LOGIN_USER</code>、<code>SESSION</code> 等字段</div>`,
|
||||
|
||||
'123pan': `<li>在电脑上打开 <a href="https://www.123pan.com" target="_blank">123pan.com</a> 并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,找到任意请求 → 复制 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>`,
|
||||
|
||||
uc: `<li>在电脑上打开 <a href="https://drive.uc.cn" target="_blank">drive.uc.cn</a> 并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,找到任意请求 → 复制 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>`,
|
||||
|
||||
xunlei: `<li>在电脑上打开 <a href="https://pan.xunlei.com" target="_blank">pan.xunlei.com</a> 并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,找到任意请求 → 复制 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>`,
|
||||
|
||||
pikpak: `<li>在电脑上打开 <a href="https://www.mypikpak.com" target="_blank">mypikpak.com</a> 并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,找到任意请求 → 复制 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>`,
|
||||
}
|
||||
return tutorials[t] || `<li>在电脑上打开该网盘网站并登录</li>
|
||||
<li>按 <code>F12</code> 打开开发者工具 → <strong>网络 (Network)</strong></li>
|
||||
<li>刷新页面,复制任意请求的 <code>Cookie</code></li>
|
||||
<li>粘贴到上方输入框,点击「自动获取」验证</li>`
|
||||
})
|
||||
|
||||
onMounted(async () => {
|
||||
await loadConfigs()
|
||||
await loadCloudTypes()
|
||||
})
|
||||
|
||||
// 每30分钟自动验证一次
|
||||
let verifyTimer: ReturnType<typeof setInterval> | null = null
|
||||
onMounted(() => {
|
||||
verifyTimer = setInterval(() => {
|
||||
autoVerifyAll()
|
||||
}, 30 * 60 * 1000)
|
||||
})
|
||||
onUnmounted(() => {
|
||||
if (verifyTimer) clearInterval(verifyTimer)
|
||||
})
|
||||
|
||||
async function loadCloudTypes() {
|
||||
try {
|
||||
const result = await getCloudTypes()
|
||||
cloudTypes.value = result.types
|
||||
} catch (e) { console.error('加载网盘类型失败', e) }
|
||||
}
|
||||
|
||||
async function handleCloudToggle(type: string, enabled: boolean) {
|
||||
const ct = cloudTypes.value.find(c => c.type === type)
|
||||
if (!ct) return
|
||||
try {
|
||||
await toggleCloudType(type, enabled)
|
||||
ct.enabled = enabled
|
||||
} catch (e: any) { ElMessage.error(e.message || '切换失败'); ct.enabled = !enabled }
|
||||
}
|
||||
|
||||
async function loadConfigs() {
|
||||
try {
|
||||
configs.value = await getCloudConfigs()
|
||||
} catch (e) {
|
||||
console.error('加载网盘配置失败', e)
|
||||
}
|
||||
}
|
||||
|
||||
async function autoVerifyAll() {
|
||||
for (const cfg of configs.value) {
|
||||
if (cfg.cookie_preview || cfg.nickname) {
|
||||
await verifyOne(cfg, true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function verifyAll() {
|
||||
for (const cfg of configs.value) {
|
||||
if ((cfg.cookie_preview || cfg.nickname) && !cfg._verifying) {
|
||||
await verifyOne(cfg, false)
|
||||
}
|
||||
}
|
||||
ElMessage.success('全部验证完成')
|
||||
}
|
||||
|
||||
async function verifyOne(row: CloudConfig & { _verifying?: boolean }, silent = false) {
|
||||
if (!row.cookie_preview && !row.nickname) {
|
||||
if (!silent) ElMessage.warning('该配置没有 Cookie,请先编辑保存后再验证')
|
||||
return
|
||||
}
|
||||
row._verifying = true
|
||||
try {
|
||||
const result = await testCloudConnection(row.cloud_type, undefined, row.id)
|
||||
row.verification_status = result.success ? 'valid' : 'invalid'
|
||||
if (result.success) {
|
||||
if (result.nickname && !row.nickname) row.nickname = result.nickname
|
||||
if (result.storage_used) row.storage_used = result.storage_used
|
||||
if (result.storage_total) row.storage_total = result.storage_total
|
||||
if (!silent) ElMessage.success(`${CLOUD_LABELS[row.cloud_type]}:${result.message}`)
|
||||
} else {
|
||||
if (!silent) ElMessage.error(`${CLOUD_LABELS[row.cloud_type]}:${result.message}`)
|
||||
}
|
||||
} catch (e: any) {
|
||||
row.verification_status = 'invalid'
|
||||
if (!silent) ElMessage.error(`${CLOUD_LABELS[row.cloud_type]}:验证失败`)
|
||||
} finally {
|
||||
row._verifying = false
|
||||
}
|
||||
}
|
||||
|
||||
async function verifyAndFillNickname() {
|
||||
if (!form.cookie) {
|
||||
ElMessage.warning('请先输入 Cookie')
|
||||
return
|
||||
}
|
||||
if (!form.cloud_type) {
|
||||
ElMessage.warning('请先选择网盘类型')
|
||||
return
|
||||
}
|
||||
form._verifying = true
|
||||
try {
|
||||
const result = await testCloudConnection(form.cloud_type as CloudType, form.cookie)
|
||||
if (result.success) {
|
||||
if (result.nickname) form.nickname = result.nickname
|
||||
if (result.storage_used) form._storageUsed = result.storage_used
|
||||
if (result.storage_total) form._storageTotal = result.storage_total
|
||||
ElMessage.success(`昵称:${result.nickname || '获取成功'}`)
|
||||
} else {
|
||||
ElMessage.warning(result.message || '验证失败,请检查 Cookie')
|
||||
}
|
||||
} catch (e: any) {
|
||||
ElMessage.error(e.response?.data?.error || '验证失败,请检查 Cookie')
|
||||
} finally {
|
||||
form._verifying = false
|
||||
}
|
||||
}
|
||||
|
||||
function openDialog(row: CloudConfig | null) {
|
||||
if (row) {
|
||||
editingId.value = row.id ?? null
|
||||
form.cloud_type = row.cloud_type
|
||||
form.nickname = row.nickname || ''
|
||||
form.cookie = row.cookie || ''
|
||||
form._verifying = false
|
||||
} else {
|
||||
editingId.value = null
|
||||
form.cloud_type = '' as CloudType | ''
|
||||
form.nickname = ''
|
||||
form.cookie = ''
|
||||
form._verifying = false
|
||||
}
|
||||
dialogVisible.value = true
|
||||
}
|
||||
|
||||
function onCloudTypeChange() {
|
||||
// Cookie 输入框提示会自动更新(computed)
|
||||
}
|
||||
|
||||
async function handleSave() {
|
||||
const valid = await formRef.value?.validate().catch(() => false)
|
||||
if (!valid) return
|
||||
|
||||
saving.value = true
|
||||
try {
|
||||
if (editingId.value) {
|
||||
await updateCloudConfig({
|
||||
id: editingId.value,
|
||||
cloud_type: form.cloud_type as CloudType,
|
||||
nickname: form.nickname,
|
||||
cookie: form.cookie || undefined,
|
||||
is_active: true,
|
||||
storage_used: form._storageUsed || undefined,
|
||||
storage_total: form._storageTotal || undefined,
|
||||
})
|
||||
ElMessage.success('配置更新成功')
|
||||
} else {
|
||||
const saved = await saveCloudConfig({
|
||||
cloud_type: form.cloud_type as CloudType,
|
||||
nickname: form.nickname,
|
||||
cookie: form.cookie,
|
||||
is_active: true,
|
||||
storage_used: form._storageUsed || undefined,
|
||||
storage_total: form._storageTotal || undefined,
|
||||
})
|
||||
ElMessage.success('配置保存成功')
|
||||
if (!form._storageTotal) {
|
||||
const result = await testCloudConnection(form.cloud_type as CloudType, undefined, saved.id)
|
||||
if (!result.success) {
|
||||
ElMessage.warning(`配置已保存,但连接验证失败:${result.message}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
dialogVisible.value = false
|
||||
editingId.value = null
|
||||
await loadConfigs()
|
||||
} catch (e: any) {
|
||||
ElMessage.error(e.response?.data?.error || '保存失败')
|
||||
} finally {
|
||||
saving.value = false
|
||||
}
|
||||
}
|
||||
|
||||
async function handleDelete(row: CloudConfig) {
|
||||
try {
|
||||
await deleteCloudConfig(row.id!)
|
||||
ElMessage.success('删除成功')
|
||||
await loadConfigs()
|
||||
} catch (e) {
|
||||
ElMessage.error('删除失败')
|
||||
}
|
||||
}
|
||||
|
||||
/** 解析字节数 → 数值 */
|
||||
function parseBytes(s: string): number {
|
||||
const m = s.match(/^([\d.]+)\s*(B|KB|MB|GB|TB)$/i)
|
||||
if (!m) return 0
|
||||
const n = parseFloat(m[1])
|
||||
const units: Record<string, number> = { B: 1, KB: 1024, MB: 1024**2, GB: 1024**3, TB: 1024**4 }
|
||||
return n * (units[m[2].toUpperCase()] || 1)
|
||||
}
|
||||
|
||||
function storagePercent(row: CloudConfig): number {
|
||||
if (!row.storage_total || !row.storage_used) return 0
|
||||
const total = parseBytes(row.storage_total)
|
||||
const used = parseBytes(row.storage_used)
|
||||
if (total === 0) return 0
|
||||
return Math.min(100, Math.round((used / total) * 100))
|
||||
}
|
||||
|
||||
function storageBarClass(row: CloudConfig): string {
|
||||
const pct = storagePercent(row)
|
||||
if (pct >= 90) return 'bar-danger'
|
||||
if (pct >= 70) return 'bar-warning'
|
||||
return 'bar-normal'
|
||||
}
|
||||
|
||||
function storageFree(row: CloudConfig): string {
|
||||
if (!row.storage_total || !row.storage_used) return '?'
|
||||
const total = parseBytes(row.storage_total)
|
||||
const used = parseBytes(row.storage_used)
|
||||
if (total === 0) return '?'
|
||||
const free = total - used
|
||||
if (free < 1024) return '小于 1 KB'
|
||||
if (free < 1024 * 1024) return (free / 1024).toFixed(1) + ' KB'
|
||||
if (free < 1024 * 1024 * 1024) return (free / (1024 * 1024)).toFixed(1) + ' MB'
|
||||
if (free < 1024 * 1024 * 1024 * 1024) return (free / (1024 * 1024 * 1024)).toFixed(1) + ' GB'
|
||||
return (free / (1024 * 1024 * 1024 * 1024)).toFixed(1) + ' TB'
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.cloud-config {
|
||||
background: var(--bg-white);
|
||||
border-radius: var(--radius-card);
|
||||
padding: 24px;
|
||||
}
|
||||
.cloud-toggle-grid { display: flex; flex-wrap: wrap; gap: 12px; }
|
||||
.cloud-toggle-chip { display: flex; align-items: center; gap: 8px; padding: 8px 12px; border: 1px solid var(--el-border-color-light); border-radius: 8px; background: var(--el-bg-color); }
|
||||
.cloud-toggle-chip:hover { border-color: var(--el-color-primary-light-5); }
|
||||
.cloud-icon-img { width: 20px; height: 20px; object-fit: contain; }
|
||||
.cloud-label { font-size: 13px; font-weight: 500; }
|
||||
.form-tip { font-size: 12px; color: var(--el-text-color-secondary); }
|
||||
.toolbar {
|
||||
margin-bottom: 16px;
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
.sign-summary-tag {
|
||||
margin-left: 4px;
|
||||
}
|
||||
.nickname-text {
|
||||
font-weight: 600;
|
||||
color: #303133;
|
||||
}
|
||||
.uid-cell {
|
||||
font-family: 'SF Mono', Monaco, 'Cascadia Code', monospace;
|
||||
font-size: 11px;
|
||||
color: #909399;
|
||||
letter-spacing: 0.3px;
|
||||
}
|
||||
/* 空间进度条 */
|
||||
.storage-cell {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 3px;
|
||||
padding: 2px 0;
|
||||
}
|
||||
.storage-bar-wrap {
|
||||
height: 4px;
|
||||
background: #f0f2f5;
|
||||
border-radius: 2px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.storage-bar-fill {
|
||||
height: 100%;
|
||||
border-radius: 2px;
|
||||
transition: width 0.3s;
|
||||
}
|
||||
.storage-bar-fill.bar-normal { background: #67c23a; }
|
||||
.storage-bar-fill.bar-warning { background: #e6a23c; }
|
||||
.storage-bar-fill.bar-danger { background: #f56c6c; }
|
||||
.storage-text {
|
||||
font-size: 11px;
|
||||
color: #909399;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 3px;
|
||||
}
|
||||
.storage-used { color: #606266; font-weight: 600; }
|
||||
.storage-total { color: #303133; font-weight: 600; }
|
||||
.storage-free { color: #909399; }
|
||||
.save-count {
|
||||
font-size: 12px;
|
||||
color: #909399;
|
||||
}
|
||||
.verifying {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
font-size: 12px;
|
||||
color: #909399;
|
||||
}
|
||||
:deep(.el-input-group__append) {
|
||||
padding: 0;
|
||||
}
|
||||
:deep(.el-input-group__append .el-button) {
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
/* Cookie 教程卡片 */
|
||||
.cookie-tips-item :deep(.el-form-item__content) {
|
||||
margin-left: 0 !important;
|
||||
}
|
||||
.cookie-tips {
|
||||
background: #f8faff;
|
||||
border: 1px solid #e8f0fe;
|
||||
border-radius: 8px;
|
||||
padding: 14px 16px;
|
||||
font-size: 12px;
|
||||
line-height: 1.8;
|
||||
color: #606266;
|
||||
width: 100%;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
.cookie-tips-header {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
.cookie-tips-title {
|
||||
font-weight: 700;
|
||||
color: #409eff;
|
||||
font-size: 13px;
|
||||
}
|
||||
.cookie-tips-steps {
|
||||
margin: 0;
|
||||
padding-left: 20px;
|
||||
}
|
||||
.cookie-tips-steps li {
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
.cookie-tips-steps code {
|
||||
background: #ecf5ff;
|
||||
padding: 1px 5px;
|
||||
border-radius: 3px;
|
||||
font-size: 11px;
|
||||
font-family: 'SF Mono', Monaco, 'Cascadia Code', monospace;
|
||||
}
|
||||
.cookie-tips-note {
|
||||
margin-top: 8px;
|
||||
padding: 6px 10px;
|
||||
background: #fffbe6;
|
||||
border: 1px solid #fff3c4;
|
||||
border-radius: 4px;
|
||||
color: #8a6d3b;
|
||||
font-size: 11px;
|
||||
line-height: 1.5;
|
||||
}
|
||||
.cookie-tips-note code {
|
||||
background: #f5f0e0;
|
||||
font-size: 11px;
|
||||
}
|
||||
</style>
|
||||
113
packages/backend/src/cloud/drivers/aliyun.driver.ts
Executable file
113
packages/backend/src/cloud/drivers/aliyun.driver.ts
Executable file
@@ -0,0 +1,113 @@
|
||||
// Native fetch available in Node 20+
|
||||
|
||||
export interface AliyunConfig {
|
||||
cookie?: string;
|
||||
nickname?: string;
|
||||
}
|
||||
|
||||
export class AliyunDriver {
|
||||
private config: AliyunConfig;
|
||||
private baseUrl = 'https://api.aliyundrive.com';
|
||||
|
||||
constructor(config: AliyunConfig = {}) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract share_id from an Aliyun share URL.
|
||||
* Supports:
|
||||
* https://www.aliyundrive.com/s/XXXYYY
|
||||
* https://www.alipan.com/s/XXXYYY
|
||||
* https://api.aliyundrive.com/v2/share_link/XXXYYY
|
||||
*/
|
||||
private extractShareId(shareUrl: string): string | null {
|
||||
try {
|
||||
const url = new URL(shareUrl);
|
||||
const pathMatch = url.pathname.match(/\/s\/([a-zA-Z0-9]+)/);
|
||||
if (pathMatch) return pathMatch[1];
|
||||
|
||||
const shareMatch = url.pathname.match(/\/share_link\/([a-zA-Z0-9]+)/);
|
||||
if (shareMatch) return shareMatch[1];
|
||||
|
||||
return null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a share link using Aliyun's public anonymous API.
|
||||
* No cookie or token required — this endpoint is open.
|
||||
*
|
||||
* API:
|
||||
* POST https://api.aliyundrive.com/v2/share_link/get_share_by_anonymous
|
||||
* Body: { "share_id": "XXXYYY", "share_pwd": "" }
|
||||
*
|
||||
* Success: returns share_name, file_infos, creator info
|
||||
* Failure: returns error code (ShareLinkExpired, ShareLinkCancelled, etc.)
|
||||
*/
|
||||
async validateShareLink(shareUrl: string): Promise<{
|
||||
valid: boolean;
|
||||
message: string;
|
||||
fileCount?: number;
|
||||
shareName?: string;
|
||||
}> {
|
||||
const shareId = this.extractShareId(shareUrl);
|
||||
if (!shareId) {
|
||||
return { valid: false, message: '无法解析阿里云盘链接格式' };
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
`${this.baseUrl}/v2/share_link/get_share_by_anonymous`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Referer': 'https://www.aliyundrive.com/',
|
||||
'Accept-Language': 'zh-CN,zh;q=0.9',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
share_id: shareId,
|
||||
share_pwd: '',
|
||||
}),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
}
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
return { valid: false, message: `HTTP ${response.status}: API 请求失败` };
|
||||
}
|
||||
|
||||
const data = await response.json() as any;
|
||||
|
||||
// Check for error codes
|
||||
if (data.code) {
|
||||
switch (data.code) {
|
||||
case 'ShareLinkExpired':
|
||||
return { valid: false, message: '分享已失效(已过期)' };
|
||||
case 'ShareLinkCancelled':
|
||||
return { valid: false, message: '分享已被取消' };
|
||||
case 'NotFound.ShareLink':
|
||||
return { valid: false, message: '分享链接不存在' };
|
||||
case 'ShareLinkPasswordIncorrect':
|
||||
return { valid: true, message: '需要提取码(链接有效)' };
|
||||
default:
|
||||
return { valid: false, message: data.message || `未知错误 (${data.code})` };
|
||||
}
|
||||
}
|
||||
|
||||
// Success — valid share link
|
||||
const fileInfos = data.file_infos || [];
|
||||
return {
|
||||
valid: true,
|
||||
message: `有效链接(${fileInfos.length} 个文件)`,
|
||||
fileCount: fileInfos.length,
|
||||
shareName: data.share_name || '',
|
||||
};
|
||||
} catch (err: any) {
|
||||
return { valid: false, message: `网络错误: ${err.message || err}` };
|
||||
}
|
||||
}
|
||||
}
|
||||
1189
packages/backend/src/cloud/drivers/baidu.driver.ts
Normal file
1189
packages/backend/src/cloud/drivers/baidu.driver.ts
Normal file
File diff suppressed because it is too large
Load Diff
289
packages/backend/src/cloud/drivers/quark-ad-cleanup.ts
Normal file
289
packages/backend/src/cloud/drivers/quark-ad-cleanup.ts
Normal file
@@ -0,0 +1,289 @@
|
||||
import { getSystemConfig } from "../../admin/system-config.service";
|
||||
import { getHeaders, makeQuery } from "./quark-api";
|
||||
import { listDir, listDirAllPages } from "./quark-api";
|
||||
import { humanDelay } from "./quark-api";
|
||||
|
||||
/**
|
||||
* 广告关键词清理模块。
|
||||
* 在转存完成后执行:
|
||||
* 1. 遍历转存的目录,删除文件名/文件夹名含广告关键词的内容
|
||||
* 2. 在转存根目录下创建警示文件夹(置顶提醒)
|
||||
*/
|
||||
|
||||
// ==================== 配置读取 ====================
|
||||
|
||||
/** 从 DB 读取广告关键词列表 */
|
||||
export function getAdKeywords(): string[] {
|
||||
const raw = getSystemConfig("quark_ad_keywords") || "";
|
||||
return raw
|
||||
.split("\n")
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
/** 从 DB 读取警示文件夹名称列表 */
|
||||
export function getWarningFolderNames(): string[] {
|
||||
const raw = getSystemConfig("quark_warning_folder_names") || "";
|
||||
return raw
|
||||
.split("\n")
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
/** 从 DB 读取可疑文件后缀列表 */
|
||||
export function getSusExtensions(): string[] {
|
||||
const raw = getSystemConfig("quark_sus_extensions") || "";
|
||||
if (raw.trim()) {
|
||||
return raw
|
||||
.split("\n")
|
||||
.map((s) => s.trim().toLowerCase().replace(/^\./, ""))
|
||||
.filter(Boolean);
|
||||
}
|
||||
// 默认可疑后缀
|
||||
return ["bat", "exe", "vbs", "scr", "cmd", "com", "pif", "js", "jar", "msi", "reg", "inf", "ps1"];
|
||||
}
|
||||
|
||||
// ==================== 关键词检测 ====================
|
||||
|
||||
/** 检查文件名是否包含任意广告关键词 */
|
||||
export function containsAdKeyword(
|
||||
fileName: string,
|
||||
keywords: string[],
|
||||
): boolean {
|
||||
if (!keywords.length) return false;
|
||||
const lower = fileName.toLowerCase();
|
||||
return keywords.some((kw) => kw && lower.includes(kw.toLowerCase()));
|
||||
}
|
||||
|
||||
// ==================== 删除操作 ====================
|
||||
|
||||
/**
|
||||
* 遍历指定目录(含子目录),删除匹配广告关键词的文件和文件夹。
|
||||
* 返回删除的文件数。
|
||||
*/
|
||||
export async function deleteAdFiles(
|
||||
cookie: string,
|
||||
dirFid: string,
|
||||
keywords: string[],
|
||||
): Promise<number> {
|
||||
if (!keywords.length) return 0;
|
||||
|
||||
let deletedCount = 0;
|
||||
const stack: string[] = [dirFid];
|
||||
const visited = new Set<string>();
|
||||
|
||||
while (stack.length > 0) {
|
||||
const fid = stack.pop()!;
|
||||
if (visited.has(fid)) continue;
|
||||
visited.add(fid);
|
||||
|
||||
await humanDelay();
|
||||
const files = await listDir(cookie, fid);
|
||||
if (!files || files.length === 0) continue;
|
||||
|
||||
// 先收集所有需要删除的 fid
|
||||
const toDelete: string[] = [];
|
||||
const toKeep: string[] = [];
|
||||
|
||||
const extensions = getSusExtensions();
|
||||
for (const file of files) {
|
||||
const ext = file.file_name.split(".").pop()?.toLowerCase() || "";
|
||||
const isSusExt = extensions.includes(ext);
|
||||
if (containsAdKeyword(file.file_name, keywords) || isSusExt) {
|
||||
toDelete.push(file.fid);
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 标记删除: "${file.file_name}" (fid: ${file.fid})${isSusExt ? " [可疑后缀]" : " [广告关键词]"}`,
|
||||
);
|
||||
} else {
|
||||
toKeep.push(file.fid);
|
||||
// 如果是目录且不删除,继续遍历子目录
|
||||
if (file.dir) {
|
||||
stack.push(file.fid);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 批量删除
|
||||
if (toDelete.length > 0) {
|
||||
const deleteOk = await batchDeleteFiles(cookie, toDelete);
|
||||
if (deleteOk) {
|
||||
deletedCount += toDelete.length;
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 已删除 ${toDelete.length} 个广告文件`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return deletedCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* 批量删除文件/文件夹(移入回收站)。
|
||||
*/
|
||||
async function batchDeleteFiles(
|
||||
cookie: string,
|
||||
fids: string[],
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const resp = await fetch(
|
||||
`https://drive-pc.quark.cn/1/clouddrive/file/trash?${makeQuery()}`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
...getHeaders(cookie),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
action_type: 2, // 2 = 移入回收站
|
||||
file_list: fids.map((fid) => ({ fid })),
|
||||
exclude_fids: [],
|
||||
}),
|
||||
signal: AbortSignal.timeout(15000),
|
||||
},
|
||||
);
|
||||
const data = (await resp.json()) as any;
|
||||
if (data.status === 200) {
|
||||
return true;
|
||||
}
|
||||
console.log(
|
||||
`[Quark-AdCleanup] batchDelete 返回非200: status=${data.status} msg=${data.message}`,
|
||||
);
|
||||
return false;
|
||||
} catch (err: any) {
|
||||
console.log(`[Quark-AdCleanup] batchDelete 错误: ${err.message}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== 警示文件夹创建 ====================
|
||||
|
||||
/**
|
||||
* 在转存根目录下创建警示文件夹。
|
||||
* 文件夹名前加 ⚠️ 和空格,让其按字母排序置顶。
|
||||
* 已存在的则跳过。
|
||||
*/
|
||||
export async function createWarningDirectories(
|
||||
cookie: string,
|
||||
dirNames: string[],
|
||||
): Promise<void> {
|
||||
if (!dirNames.length) return;
|
||||
|
||||
// 先获取根目录下所有文件夹,避免重复创建
|
||||
await humanDelay();
|
||||
const rootFiles = await listDirAllPages(cookie, "0");
|
||||
const existingDirs = new Set(
|
||||
rootFiles.filter((f) => f.dir).map((f) => f.file_name),
|
||||
);
|
||||
|
||||
for (const name of dirNames) {
|
||||
// 格式化名称:确保以 ⚠️ 开头
|
||||
let formattedName = name;
|
||||
if (!formattedName.startsWith("⚠️") && !formattedName.startsWith("⚠")) {
|
||||
formattedName = `⚠️ ${formattedName}`;
|
||||
}
|
||||
// 去掉多余空格
|
||||
formattedName = formattedName.replace(/\s+/g, " ").trim();
|
||||
|
||||
if (existingDirs.has(formattedName)) {
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 警示文件夹已存在,跳过: "${formattedName}"`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
await createSingleDir(cookie, formattedName);
|
||||
// 加入已存在集合,防止同名重试
|
||||
existingDirs.add(formattedName);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建单个文件夹。
|
||||
*/
|
||||
async function createSingleDir(
|
||||
cookie: string,
|
||||
dirName: string,
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const resp = await fetch(
|
||||
`https://drive-pc.quark.cn/1/clouddrive/file?${makeQuery()}`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
...getHeaders(cookie),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
pdir_fid: "0",
|
||||
file_name: dirName,
|
||||
dir: true,
|
||||
dir_path: "",
|
||||
}),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
},
|
||||
);
|
||||
const data = (await resp.json()) as any;
|
||||
if (data.status === 200 && data.data?.fid) {
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 已创建警示文件夹: "${dirName}" (fid: ${data.data.fid})`,
|
||||
);
|
||||
return true;
|
||||
}
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 创建文件夹失败: status=${data.status} msg=${data.message}`,
|
||||
);
|
||||
return false;
|
||||
} catch (err: any) {
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 创建文件夹错误: "${dirName}" — ${err.message}`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== 主入口 ====================
|
||||
|
||||
/**
|
||||
* 执行广告清理 + 创建警示文件夹。
|
||||
* 在转存重命名后调用。
|
||||
*/
|
||||
export async function runAdCleanup(
|
||||
cookie: string,
|
||||
savedDirFid: string,
|
||||
): Promise<{ adDeleted: number; warningDirs: number }> {
|
||||
const keywords = getAdKeywords();
|
||||
const warningNames = getWarningFolderNames();
|
||||
|
||||
let adDeleted = 0;
|
||||
let warningDirs = 0;
|
||||
|
||||
// 1. 广告关键词清理
|
||||
if (keywords.length > 0) {
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 开始广告关键词清理: ${keywords.length} 个关键词`,
|
||||
);
|
||||
adDeleted = await deleteAdFiles(cookie, savedDirFid, keywords);
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 广告清理完成,共删除 ${adDeleted} 个文件/文件夹`,
|
||||
);
|
||||
} else {
|
||||
console.log("[Quark-AdCleanup] 无广告关键词配置,跳过清理");
|
||||
}
|
||||
|
||||
// 2. 创建警示文件夹
|
||||
if (warningNames.length > 0) {
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 开始创建警示文件夹: ${warningNames.length} 个`,
|
||||
);
|
||||
await createWarningDirectories(cookie, warningNames);
|
||||
warningDirs = warningNames.length;
|
||||
console.log(
|
||||
`[Quark-AdCleanup] 警示文件夹创建完成(共 ${warningDirs} 个)`,
|
||||
);
|
||||
} else {
|
||||
console.log("[Quark-AdCleanup] 无警示文件夹配置,跳过创建");
|
||||
}
|
||||
|
||||
return { adDeleted, warningDirs };
|
||||
}
|
||||
237
packages/backend/src/cloud/drivers/quark-api.ts
Normal file
237
packages/backend/src/cloud/drivers/quark-api.ts
Normal file
@@ -0,0 +1,237 @@
|
||||
// Native fetch available in Node 20+
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
/**
|
||||
* HTTP 封装层 — 统一处理夸克 API 的请求签名、headers、query params。
|
||||
* 所有模块共用此单例/函数集,不持有状态。
|
||||
*/
|
||||
|
||||
export interface QuarkConfig {
|
||||
cookie: string;
|
||||
nickname?: string;
|
||||
}
|
||||
|
||||
// ==================== Headers & Params ====================
|
||||
|
||||
const BASE_URL = 'https://drive-pc.quark.cn';
|
||||
|
||||
export function getHeaders(cookie: string): Record<string, string> {
|
||||
return {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Cookie': cookie,
|
||||
'Accept': 'application/json, text/plain, */*',
|
||||
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
||||
'Referer': 'https://pan.quark.cn/',
|
||||
'Origin': 'https://pan.quark.cn',
|
||||
};
|
||||
}
|
||||
|
||||
export function getCommonParams(): Record<string, string> {
|
||||
return { pr: 'ucpro', fr: 'pc' };
|
||||
}
|
||||
|
||||
/** Generate query string with common params + random timing to mimic browser */
|
||||
export function makeQuery(extra: Record<string, string> = {}): string {
|
||||
const __dt = Math.floor(Math.random() * 240000 + 60000);
|
||||
const __t = Date.now() / 1000;
|
||||
return new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
uc_param_str: '',
|
||||
app: 'clouddrive',
|
||||
__dt: String(__dt),
|
||||
__t: String(__t),
|
||||
...extra,
|
||||
}).toString();
|
||||
}
|
||||
|
||||
/** Random delay to mimic human behavior (500-2000ms) */
|
||||
export async function humanDelay(): Promise<void> {
|
||||
const ms = Math.floor(Math.random() * 1500) + 500;
|
||||
await new Promise(r => setTimeout(r, ms));
|
||||
}
|
||||
|
||||
/** Generate a random password for share links */
|
||||
export function randomSharePwd(): string {
|
||||
return Math.floor(1000 + Math.random() * 9000).toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract kps/sign/vcode from cookie for API signing (bare keys, no __ prefix).
|
||||
*/
|
||||
export function getMparam(cookie: string): { kps?: string; sign?: string; vcode?: string } {
|
||||
// Match both __kps and kps (with or without __ prefix)
|
||||
const kpsMatch = cookie.match(/__?kps=([a-zA-Z0-9%+/=]+)/);
|
||||
const signMatch = cookie.match(/__?sign=([a-zA-Z0-9%+/=]+)/);
|
||||
const vcodeMatch = cookie.match(/__?vcode=([a-zA-Z0-9%+/=]+)/);
|
||||
if (kpsMatch && signMatch && vcodeMatch) {
|
||||
return {
|
||||
kps: kpsMatch[1],
|
||||
sign: signMatch[1].replace(/%25/g, '%'),
|
||||
vcode: vcodeMatch[1],
|
||||
};
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
// ==================== Shared fetch helpers ====================
|
||||
|
||||
/**
|
||||
* Raw fetch wrapper with JSON parse + status check.
|
||||
* Returns parsed JSON body on 2xx, null on network error.
|
||||
*/
|
||||
export async function apiFetch<T = any>(
|
||||
path: string,
|
||||
options: {
|
||||
method?: string;
|
||||
query?: Record<string, string>;
|
||||
body?: any;
|
||||
cookie: string;
|
||||
timeout?: number;
|
||||
},
|
||||
): Promise<T | null> {
|
||||
const { method = 'GET', query, body, cookie, timeout = 10000 } = options;
|
||||
let url = `${BASE_URL}${path}`;
|
||||
if (query) url += `?${new URLSearchParams(query).toString()}`;
|
||||
try {
|
||||
const resp = await fetch(url, {
|
||||
method,
|
||||
headers: {
|
||||
...getHeaders(cookie),
|
||||
...(body ? { 'Content-Type': 'application/json' } : {}),
|
||||
},
|
||||
body: body ? JSON.stringify(body) : undefined,
|
||||
signal: AbortSignal.timeout(timeout),
|
||||
});
|
||||
if (!resp.ok) return null;
|
||||
return (await resp.json()) as T;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== File listing (shared across modules) ====================
|
||||
|
||||
export interface QuarkFile {
|
||||
fid: string;
|
||||
file_name: string;
|
||||
share_fid_token?: string;
|
||||
dir: boolean;
|
||||
size?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* List files in a directory by FID.
|
||||
*/
|
||||
export async function listDir(cookie: string, pdirFid: string, page = 1, pageSize = 50): Promise<QuarkFile[]> {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
uc_param_str: '',
|
||||
pdir_fid: pdirFid,
|
||||
_page: String(page),
|
||||
_size: String(pageSize),
|
||||
_fetch_total: '1',
|
||||
_fetch_sub_dirs: '0',
|
||||
_sort: 'file_type:asc,updated_at:desc',
|
||||
fetch_all_file: '1',
|
||||
fetch_risk_file_name: '1',
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/file/sort?${params.toString()}`,
|
||||
{ headers: getHeaders(cookie), signal: AbortSignal.timeout(15000) },
|
||||
);
|
||||
if (!resp.ok) return [];
|
||||
const data = await resp.json() as any;
|
||||
if (data.status !== 200) return [];
|
||||
return (data.data?.list || []).filter((f: any) => f.fid).map((f: any) => ({
|
||||
fid: f.fid,
|
||||
file_name: f.file_name,
|
||||
share_fid_token: '',
|
||||
dir: f.dir || false,
|
||||
size: f.size || 0,
|
||||
}));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List root directory (pdir_fid=0) — returns all top-level dirs/files.
|
||||
*/
|
||||
export async function listRootDir(cookie: string): Promise<QuarkFile[]> {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
pr: 'ucpro', fr: 'pc',
|
||||
pdir_fid: '0',
|
||||
_page: '1', _size: '200',
|
||||
_fetch_total: '1', _fetch_sub_dirs: '0',
|
||||
_sort: 'file_type:asc,updated_at:desc',
|
||||
fetch_all_file: '1',
|
||||
fetch_risk_file_name: '1',
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/file/sort?${params.toString()}`,
|
||||
{ headers: getHeaders(cookie), signal: AbortSignal.timeout(15000) },
|
||||
);
|
||||
if (!resp.ok) return [];
|
||||
const data = await resp.json() as any;
|
||||
if (data.status !== 200 || !data.data?.list) return [];
|
||||
return (data.data.list || []).map((f: any) => ({
|
||||
fid: f.fid,
|
||||
file_name: f.file_name,
|
||||
dir: f.dir || false,
|
||||
size: f.size || 0,
|
||||
}));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all files in a directory, handling pagination.
|
||||
* Fetches all pages until no more results.
|
||||
*/
|
||||
export async function listDirAllPages(cookie: string, pdirFid: string): Promise<QuarkFile[]> {
|
||||
const allFiles: QuarkFile[] = [];
|
||||
let page = 1;
|
||||
const pageSize = 100;
|
||||
let total = -1;
|
||||
while (total === -1 || (page - 1) * pageSize < total) {
|
||||
const files = await listDir(cookie, pdirFid, page, pageSize);
|
||||
if (!files.length) break;
|
||||
allFiles.push(...files);
|
||||
if (total === -1) {
|
||||
total = files.length;
|
||||
}
|
||||
page++;
|
||||
}
|
||||
return allFiles;
|
||||
}
|
||||
|
||||
// ==================== Format utilities ====================
|
||||
|
||||
export function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
return parseFloat((bytes / Math.pow(1024, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/** Generate a daily folder name (e.g. "2026-05-03") for organizing saves */
|
||||
export function dailyFolderName(): string {
|
||||
const d = new Date();
|
||||
const y = d.getFullYear();
|
||||
const m = String(d.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(d.getDate()).padStart(2, '0');
|
||||
return `${y}-${m}-${day}`;
|
||||
}
|
||||
|
||||
/** Generate a random folder name for saving (fallback) */
|
||||
export function randomFolderName(): string {
|
||||
const chars = 'abcdefghijklmnopqrstuvwxyz0123456789';
|
||||
let name = '';
|
||||
for (let i = 0; i < 12; i++) {
|
||||
name += chars[Math.floor(Math.random() * chars.length)];
|
||||
}
|
||||
return name;
|
||||
}
|
||||
60
packages/backend/src/cloud/drivers/quark-auth.ts
Normal file
60
packages/backend/src/cloud/drivers/quark-auth.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { QuarkConfig } from './quark-api';
|
||||
import { getHeaders, getMparam, apiFetch, makeQuery } from './quark-api';
|
||||
|
||||
/**
|
||||
* 认证模块 — Cookie 验证、账号信息获取、QR 登录状态检查。
|
||||
* 所有方法以 cookie 字符串为参数,不持有驱动状态。
|
||||
*/
|
||||
|
||||
// ==================== Validate ====================
|
||||
|
||||
/**
|
||||
* Validate the cookie by fetching user info.
|
||||
*/
|
||||
export async function validate(cookie: string): Promise<boolean> {
|
||||
const MAX_RETRIES = 2;
|
||||
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
||||
try {
|
||||
// Use account/info API (same as quark-auto-save project)
|
||||
// Only needs __uid cookie, no mparam (kps/sign/vcode) required
|
||||
const url = 'https://pan.quark.cn/account/info?fr=pc&platform=pc';
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
...getHeaders(cookie),
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/3.14.2 Chrome/112.0.5615.165 Electron/24.1.3.8 Safari/537.36 Channel/pckk_other_ch',
|
||||
},
|
||||
signal: AbortSignal.timeout(15000),
|
||||
});
|
||||
if (!response.ok) return false;
|
||||
const data = await response.json() as any;
|
||||
if (data?.data?.nickname) return true;
|
||||
} catch (err: any) {
|
||||
if (attempt < MAX_RETRIES) {
|
||||
console.log(`[Quark] validate attempt ${attempt + 1} failed: ${err.message}, retrying...`);
|
||||
await new Promise(r => setTimeout(r, 2000));
|
||||
continue;
|
||||
}
|
||||
console.log(`[Quark] validate all ${MAX_RETRIES + 1} attempts failed: ${err.message}`);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/** Fetch nickname from Quark account info (same API used by quark-auto-save) */
|
||||
export async function fetchNickname(cookie: string): Promise<string | null> {
|
||||
try {
|
||||
const url = 'https://pan.quark.cn/account/info?fr=pc&platform=pc';
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
...getHeaders(cookie),
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/3.14.2 Chrome/112.0.5615.165 Electron/24.1.3.8 Safari/537.36 Channel/pckk_other_ch',
|
||||
},
|
||||
signal: AbortSignal.timeout(15000),
|
||||
});
|
||||
if (!response.ok) return null;
|
||||
const data = await response.json() as any;
|
||||
return data?.data?.nickname || null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
315
packages/backend/src/cloud/drivers/quark-cleanup.ts
Normal file
315
packages/backend/src/cloud/drivers/quark-cleanup.ts
Normal file
@@ -0,0 +1,315 @@
|
||||
import { getHeaders, getCommonParams, getMparam, listRootDir, listDirAllPages, formatBytes, humanDelay, makeQuery, listDir, QuarkFile } from './quark-api';
|
||||
|
||||
/**
|
||||
* 容量信息 & 空间清理模块。
|
||||
*/
|
||||
const BASE_URL = 'https://drive-pc.quark.cn';
|
||||
|
||||
// ==================== Storage Info ====================
|
||||
|
||||
/** Cached used space, keyed by hour block (3h window) */
|
||||
const cachedUsedSpace: { bytes: number; hourBlock: number } | null = null;
|
||||
|
||||
// We use a function-scoped cache instead of instance field
|
||||
const storageCache: { bytes: number; hourBlock: number } = { bytes: 0, hourBlock: -1 };
|
||||
|
||||
/**
|
||||
* Get total capacity from /capacity/detail API.
|
||||
* Also does a quick used-space estimate by summing root-level file sizes + subdir sizes
|
||||
* (夸克目录的 size 字段 = 该目录内所有文件总大小,无需递归).
|
||||
* If the API fails (e.g. missing sign params), falls back to fallbackTotal if provided.
|
||||
*/
|
||||
export async function getStorageInfoQuick(cookie: string, fallbackTotal?: string): Promise<{ total: string; totalBytes: number; used: string; usedBytes: number }> {
|
||||
try {
|
||||
const mparam = getMparam(cookie);
|
||||
const params = new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
kps: mparam.kps || '',
|
||||
sign: mparam.sign || '',
|
||||
vcode: mparam.vcode || '',
|
||||
});
|
||||
const capResponse = await fetch(`${BASE_URL}/1/clouddrive/capacity/detail?${params.toString()}`, {
|
||||
headers: getHeaders(cookie),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
let totalBytes = 0;
|
||||
if (capResponse.ok) {
|
||||
const data = await capResponse.json() as any;
|
||||
if (data.status === 200 && data.data) {
|
||||
totalBytes = data.data.capacity_summary?.sum_capacity || 0;
|
||||
if (totalBytes === 0) {
|
||||
const memberships = [...(data.data.effect || []), ...(data.data.expired || [])];
|
||||
totalBytes = memberships.reduce((max: number, m: any) => Math.max(max, m.capacity || 0), 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Quick used-space estimate: sum root-level file sizes + subdir sizes
|
||||
let usedBytes = 0;
|
||||
try {
|
||||
const rootFiles = await listRootDir(cookie);
|
||||
for (const f of rootFiles) {
|
||||
usedBytes += f.size || 0;
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Cache the result (3h window)
|
||||
const currentHourBlock = Math.floor(new Date().getHours() / 3);
|
||||
storageCache.bytes = usedBytes;
|
||||
storageCache.hourBlock = currentHourBlock;
|
||||
|
||||
if (totalBytes > 0) {
|
||||
return {
|
||||
total: formatBytes(totalBytes),
|
||||
totalBytes,
|
||||
used: formatBytes(usedBytes),
|
||||
usedBytes,
|
||||
};
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Fallback: try to parse from a human-readable string like "6 TB"
|
||||
if (fallbackTotal) {
|
||||
const match = fallbackTotal.match(/^([\d.]+)\s*([KMGT]B?)/i);
|
||||
if (match) {
|
||||
const num = parseFloat(match[1]);
|
||||
const unit = match[2].toUpperCase();
|
||||
const multipliers: Record<string, number> = { B: 1, KB: 1024, MB: 1024 ** 2, GB: 1024 ** 3, TB: 1024 ** 4, PB: 1024 ** 5 };
|
||||
const multiplier = multipliers[unit] || multipliers[unit.replace('B', '') + 'B'] || 0;
|
||||
if (multiplier > 0) {
|
||||
return { total: fallbackTotal, totalBytes: Math.round(num * multiplier), used: '-', usedBytes: 0 };
|
||||
}
|
||||
}
|
||||
}
|
||||
return { total: '-', totalBytes: 0, used: '-', usedBytes: 0 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get storage info with used space calculation.
|
||||
*/
|
||||
export async function getStorageInfo(cookie: string): Promise<{ used: string; total: string; usedBytes: number; totalBytes: number }> {
|
||||
try {
|
||||
const mparam = getMparam(cookie);
|
||||
let totalBytes = 0;
|
||||
const params = new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
kps: mparam.kps || '',
|
||||
sign: mparam.sign || '',
|
||||
vcode: mparam.vcode || '',
|
||||
});
|
||||
const response = await fetch(`${BASE_URL}/1/clouddrive/capacity/detail?${params.toString()}`, {
|
||||
headers: getHeaders(cookie),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
if (response.ok) {
|
||||
const data = await response.json() as any;
|
||||
if (data.status === 200 && data.data) {
|
||||
totalBytes = data.data.capacity_summary?.sum_capacity || 0;
|
||||
if (totalBytes === 0) {
|
||||
const memberships = [...(data.data.effect || []), ...(data.data.expired || [])];
|
||||
totalBytes = memberships.reduce((max: number, m: any) => Math.max(max, m.capacity || 0), 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const usedBytes = await calculateUsedSpace(cookie);
|
||||
|
||||
if (totalBytes > 0 || usedBytes > 0) {
|
||||
return {
|
||||
total: totalBytes > 0 ? formatBytes(totalBytes) : '-',
|
||||
used: formatBytes(usedBytes),
|
||||
usedBytes,
|
||||
totalBytes: totalBytes > 0 ? totalBytes : 0,
|
||||
};
|
||||
}
|
||||
return { used: '0 B', total: '-', usedBytes: 0, totalBytes: 0 };
|
||||
} catch {
|
||||
return { used: '-', total: '-', usedBytes: 0, totalBytes: 0 };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate total used space by recursively traversing all files
|
||||
* and summing their sizes. Uses 3-hour time window cache.
|
||||
*/
|
||||
export async function calculateUsedSpace(cookie: string): Promise<number> {
|
||||
const currentHourBlock = Math.floor(new Date().getHours() / 3);
|
||||
if (storageCache.hourBlock === currentHourBlock && storageCache.bytes > 0) {
|
||||
return storageCache.bytes;
|
||||
}
|
||||
let totalUsed = 0;
|
||||
const stack: string[] = ['0'];
|
||||
const visited = new Set<string>();
|
||||
while (stack.length > 0) {
|
||||
const fid = stack.pop()!;
|
||||
if (visited.has(fid)) continue;
|
||||
visited.add(fid);
|
||||
const files = await listDirAllPages(cookie, fid);
|
||||
if (!files.length) continue;
|
||||
for (const f of files) {
|
||||
if (f.dir) {
|
||||
stack.push(f.fid);
|
||||
} else {
|
||||
totalUsed += f.size || 0;
|
||||
}
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 50));
|
||||
}
|
||||
storageCache.bytes = totalUsed;
|
||||
storageCache.hourBlock = currentHourBlock;
|
||||
return totalUsed;
|
||||
}
|
||||
|
||||
// ==================== Cleanup ====================
|
||||
|
||||
/**
|
||||
* Trash specified files/folders (move to recycle bin).
|
||||
*/
|
||||
export async function trashFiles(cookie: string, fids: string[]): Promise<boolean> {
|
||||
if (!fids.length) return true;
|
||||
try {
|
||||
const response = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/file/trash?${makeQuery()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
action_type: 1, // 1 = move to trash
|
||||
filelist: fids,
|
||||
exclude_filelist: [],
|
||||
}),
|
||||
signal: AbortSignal.timeout(30000),
|
||||
},
|
||||
);
|
||||
if (!response.ok) return false;
|
||||
const data = await response.json() as any;
|
||||
if (data.status === 200) return true;
|
||||
console.error(`[Quark] trashFiles failed: ${data.message || data.status}`);
|
||||
return false;
|
||||
} catch (err: any) {
|
||||
console.error(`[Quark] trashFiles error: ${err.message}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Empty the recycle bin — permanently delete all files in trash.
|
||||
*/
|
||||
export async function emptyTrash(cookie: string): Promise<boolean> {
|
||||
try {
|
||||
const response = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/file/trash/clear?${makeQuery()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({}),
|
||||
signal: AbortSignal.timeout(60000),
|
||||
},
|
||||
);
|
||||
if (!response.ok) return false;
|
||||
const data = await response.json() as any;
|
||||
if (data.status === 200) return true;
|
||||
console.error(`[Quark] emptyTrash failed: ${data.message || data.status}`);
|
||||
return false;
|
||||
} catch (err: any) {
|
||||
console.error(`[Quark] emptyTrash error: ${err.message}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup: trash date-named folders (YYYY-MM-DD) older than `days`.
|
||||
*/
|
||||
export async function cleanupOldDateFolders(cookie: string, days: number): Promise<{ trashed: number; errors: string[] }> {
|
||||
const errors: string[] = [];
|
||||
const cutoff = new Date();
|
||||
cutoff.setDate(cutoff.getDate() - days);
|
||||
const cutoffStr = cutoff.toISOString().slice(0, 10);
|
||||
|
||||
try {
|
||||
const rootItems = await listRootDir(cookie);
|
||||
const oldFolders = rootItems.filter(item => {
|
||||
if (!item.dir) return false;
|
||||
if (!/^\d{4}-\d{2}-\d{2}$/.test(item.file_name)) return false;
|
||||
return item.file_name < cutoffStr;
|
||||
});
|
||||
|
||||
if (oldFolders.length === 0) {
|
||||
return { trashed: 0, errors: [] };
|
||||
}
|
||||
|
||||
const fids = oldFolders.map(f => f.fid);
|
||||
console.log(`[Quark] Trashing ${fids.length} old date folders (before ${cutoffStr}): ${oldFolders.map(f => f.file_name).join(', ')}`);
|
||||
const ok = await trashFiles(cookie, fids);
|
||||
if (ok) {
|
||||
return { trashed: fids.length, errors: [] };
|
||||
}
|
||||
return { trashed: 0, errors: [`Trash API returned failure for ${fids.length} folders`] };
|
||||
} catch (err: any) {
|
||||
return { trashed: 0, errors: [err.message] };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup: if used space exceeds thresholdPercent% of total,
|
||||
* delete the oldest date folders until totalBytes * deletePercent/100
|
||||
* of total capacity is freed.
|
||||
*/
|
||||
export async function cleanupBySpaceThreshold(
|
||||
cookie: string,
|
||||
thresholdPercent: number,
|
||||
deletePercent: number,
|
||||
): Promise<{ trashed: number; errors: string[] }> {
|
||||
const errors: string[] = [];
|
||||
|
||||
try {
|
||||
const storage = await getStorageInfo(cookie);
|
||||
if (storage.totalBytes <= 0) return { trashed: 0, errors: [] };
|
||||
|
||||
const usagePercent = (storage.usedBytes / storage.totalBytes) * 100;
|
||||
if (usagePercent < thresholdPercent) {
|
||||
console.log(`[Quark] Usage ${usagePercent.toFixed(1)}% below threshold ${thresholdPercent}%, skipping`);
|
||||
return { trashed: 0, errors: [] };
|
||||
}
|
||||
|
||||
const targetBytesToFree = Math.floor(storage.totalBytes * Math.min(deletePercent, 100) / 100);
|
||||
|
||||
const rootItems = await listRootDir(cookie);
|
||||
const dateFolders = rootItems
|
||||
.filter(item => item.dir && /^\d{4}-\d{2}-\d{2}$/.test(item.file_name))
|
||||
.sort((a, b) => a.file_name.localeCompare(b.file_name));
|
||||
|
||||
if (dateFolders.length === 0) return { trashed: 0, errors: [] };
|
||||
|
||||
const hasSizes = dateFolders.some(f => f.size && f.size > 0);
|
||||
let cumulativeSize = 0;
|
||||
const foldersToTrash: typeof dateFolders = [];
|
||||
|
||||
if (hasSizes) {
|
||||
for (const folder of dateFolders) {
|
||||
foldersToTrash.push(folder);
|
||||
cumulativeSize += folder.size || 0;
|
||||
if (cumulativeSize >= targetBytesToFree) break;
|
||||
}
|
||||
} else {
|
||||
const avgSizePerFolder = storage.usedBytes / dateFolders.length;
|
||||
const estCount = Math.max(1, Math.ceil(targetBytesToFree / avgSizePerFolder));
|
||||
foldersToTrash.push(...dateFolders.slice(0, estCount));
|
||||
cumulativeSize = estCount * avgSizePerFolder;
|
||||
}
|
||||
|
||||
const freedMB = (cumulativeSize / 1024 / 1024).toFixed(0);
|
||||
const targetMB = (targetBytesToFree / 1024 / 1024).toFixed(0);
|
||||
const fidsToTrash = foldersToTrash.map(f => f.fid);
|
||||
console.log(`[Quark] Space threshold: trashing ${foldersToTrash.length}/${dateFolders.length} oldest folders (~${freedMB} MB) to free ${targetMB} MB (${deletePercent}% of ${(storage.totalBytes/1024/1024/1024).toFixed(0)} GB total)`);
|
||||
|
||||
const ok = await trashFiles(cookie, fidsToTrash);
|
||||
if (ok) {
|
||||
console.log(`[Quark] ✅ Space-threshold trashed ${foldersToTrash.length} folders (~${freedMB} MB)`);
|
||||
return { trashed: foldersToTrash.length, errors: [] };
|
||||
}
|
||||
return { trashed: 0, errors: [`Space-threshold trash failed for ${foldersToTrash.length} folders`] };
|
||||
} catch (err: any) {
|
||||
return { trashed: 0, errors: [err.message] };
|
||||
}
|
||||
}
|
||||
259
packages/backend/src/cloud/drivers/quark-rename.ts
Normal file
259
packages/backend/src/cloud/drivers/quark-rename.ts
Normal file
@@ -0,0 +1,259 @@
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
/**
|
||||
* 防和谐重命名模块。
|
||||
* 对文件名/目录名执行谐音替换 + 可读标签保留(集数、画质、语言等)。
|
||||
*/
|
||||
|
||||
// ==================== Homophone Map ====================
|
||||
|
||||
const HOMOPHONE_MAP: Record<string, string> = {
|
||||
// 网盘热门番名 — 谐音替换 (same sound, different char)
|
||||
'斗':'陡','破':'坡','苍':'仓','穹':'穷',
|
||||
'完':'玩','美':'每','世':'士','界':'介',
|
||||
'凡':'烦','人':'仁','修':'休','罗':'络',
|
||||
'仙':'先','逆':'腻','遮':'折','天':'添',
|
||||
'吞':'屯','噬':'逝','大':'达','主':'嘱','宰':'崽',
|
||||
'星':'惺','辰':'晨','变':'便','一':'伊','念':'捻',
|
||||
'永':'泳','恒':'横','神':'申','墓':'暮','长':'尝','生':'甥',
|
||||
'剑':'箭','来':'莱','诡':'鬼','秘':'蜜',
|
||||
'全':'泉','职':'值','盘':'磐','龙':'笼',
|
||||
'雪':'血','鹰':'莺','莽':'蟒','荒':'慌','纪':'记',
|
||||
'珠':'株','王':'亡','座':'坐','牧':'木','记':'计',
|
||||
'沧':'舱','元':'圆','图':'涂','紫':'仔','川':'串',
|
||||
'百':'白','炼':'恋','成':'程','饶':'绕','命':'冥',
|
||||
// 通用谐音替换
|
||||
'的':'得','了':'啦','是':'事','不':'布','我':'窝',
|
||||
'你':'尼','他':'她','有':'友','和':'合','与':'予',
|
||||
'上':'尚','下':'夏','中':'忠','第':'弟','集':'级',
|
||||
'话':'划','季':'际','年':'念','月':'阅','日':'曰',
|
||||
'新':'心','版':'板','高':'糕','清':'青','原':'源',
|
||||
'小':'晓','片':'篇','视':'市','频':'贫','道':'到',
|
||||
'动':'洞','画':'话','声':'升','音':'因','文':'闻',
|
||||
'明':'名','暗':'黯','光':'广','影':'映','色':'瑟',
|
||||
'风':'疯','雨':'语','花':'华','国':'果','家':'佳',
|
||||
'战':'站','争':'挣','士':'仕','兵':'宾',
|
||||
'皇':'惶','帝':'谛','魔':'磨','鬼':'诡','怪':'乖',
|
||||
'精':'经','灵':'铃','妖':'夭','武':'舞','侠':'狭',
|
||||
'杀':'刹','血':'雪','刀':'叨','枪':'呛','炮':'泡',
|
||||
'时':'石','空':'孔','前':'钱','后':'厚','东':'冬',
|
||||
'南':'难','西':'夕','北':'备','开':'凯','关':'官',
|
||||
'出':'初','进':'近','去':'趣',
|
||||
'短':'短','多':'多','少':'少','真':'贞','假':'价',
|
||||
'好':'郝','坏':'怀','对':'队','错':'措','以':'已',
|
||||
'从':'从','被':'被','把':'把','将':'将','在':'在',
|
||||
'但':'但','就':'就','才':'才','也':'也','很':'狠',
|
||||
'又':'又','再':'再','更':'更','最':'最','总':'总',
|
||||
'共':'共','只':'只','各':'各','每':'每','任':'任',
|
||||
'所':'所','该':'该','本':'本',
|
||||
};
|
||||
|
||||
const NOISE_CJK = '的了在是不有会可对所之也同与及但或如且乃而岂乎焉兮哉亦犹尚乃其若故盖诸焉欤' +
|
||||
'么个着过把对为从以到说时要就这那和上人家下能出得发来年心开物力些长样吧啊哦嗯嚯哇咯呗哟嘿呵哈';
|
||||
|
||||
// ==================== Helpers ====================
|
||||
|
||||
/** Convert Chinese text to homophonic (substitute chars with same sound) */
|
||||
function homophonicText(text: string): string {
|
||||
let result = '';
|
||||
for (const ch of text) {
|
||||
if (/[\u4e00-\u9fff]/.test(ch)) {
|
||||
const homophone = HOMOPHONE_MAP[ch];
|
||||
result += homophone || ch;
|
||||
} else {
|
||||
result += ch;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Convert Chinese text to pinyin-initial-like string (each char → first pinyin letter or fallback) */
|
||||
function pinyinLike(text: string): string {
|
||||
let result = '';
|
||||
for (const ch of text) {
|
||||
if (/[\u4e00-\u9fff]/.test(ch)) {
|
||||
const homophone = HOMOPHONE_MAP[ch];
|
||||
if (homophone) {
|
||||
result += pinyinInitial(homophone);
|
||||
} else {
|
||||
const code = ch.charCodeAt(0);
|
||||
result += String.fromCharCode(97 + (code % 26));
|
||||
}
|
||||
} else if (/[a-zA-Z0-9]/.test(ch)) {
|
||||
result += ch;
|
||||
} else if (/[\s._-]/.test(ch)) {
|
||||
result += '_';
|
||||
}
|
||||
}
|
||||
return result.replace(/_+/g, '_').replace(/^_|_$/g, '');
|
||||
}
|
||||
|
||||
/** Get pinyin initial (first letter of pinyin) for a Chinese character */
|
||||
function pinyinInitial(ch: string): string {
|
||||
const code = ch.charCodeAt(0);
|
||||
if (code >= 0x4E00 && code <= 0x9FFF) {
|
||||
const initials = ['b','p','m','f','d','t','n','l','g','k','h','j','q','x','zh','ch','sh','r','z','c','s','y','w'];
|
||||
const idx = Math.min(Math.floor((code - 0x4E00) / 700), initials.length - 1);
|
||||
return initials[idx];
|
||||
}
|
||||
return ch.toLowerCase();
|
||||
}
|
||||
|
||||
// ==================== Public API ====================
|
||||
|
||||
/**
|
||||
* Anti-harmony rename for directories.
|
||||
* 80%: light homophonic replacement, 20%: partial pinyin.
|
||||
*/
|
||||
export function magicRenameDir(dirName: string): string {
|
||||
const hash = crypto.createHash('md5').update(dirName + Date.now()).digest('hex').slice(0, 4);
|
||||
|
||||
let cleanName = dirName.trim().replace(/\s+/g, ' ');
|
||||
if (!cleanName) {
|
||||
return `media_${hash}`;
|
||||
}
|
||||
|
||||
let baseName: string;
|
||||
|
||||
if (Math.random() < 0.2) {
|
||||
// Partial pinyin: 30% of CJK chars → pinyin initial, 70% stay as-is
|
||||
const chars = [...cleanName];
|
||||
const result: string[] = [];
|
||||
for (const ch of chars) {
|
||||
if (/[\u4e00-\u9fff]/.test(ch) && Math.random() < 0.3) {
|
||||
result.push(pinyinInitial(ch));
|
||||
} else {
|
||||
result.push(ch);
|
||||
}
|
||||
}
|
||||
baseName = result.join('');
|
||||
} else {
|
||||
// Light homophonic: replace each CJK char, keep everything else as-is
|
||||
const chars = [...cleanName];
|
||||
const result: string[] = [];
|
||||
for (const ch of chars) {
|
||||
if (/[\u4e00-\u9fff]/.test(ch)) {
|
||||
result.push(HOMOPHONE_MAP[ch] || ch);
|
||||
} else {
|
||||
result.push(ch);
|
||||
}
|
||||
}
|
||||
baseName = result.join('');
|
||||
|
||||
// Optional: insert 0-2 light noise chars (low probability)
|
||||
const noiseCount = Math.random() < 0.3 ? (Math.random() < 0.5 ? 1 : 2) : 0;
|
||||
for (let n = 0; n < noiseCount; n++) {
|
||||
const pos = Math.floor(Math.random() * (baseName.length + 1));
|
||||
const ink = NOISE_CJK[Math.floor(Math.random() * NOISE_CJK.length)];
|
||||
baseName = baseName.slice(0, pos) + ink + baseName.slice(pos);
|
||||
}
|
||||
}
|
||||
|
||||
baseName = baseName.replace(/[^\u4e00-\u9fff\w]/g, '_');
|
||||
baseName = baseName.replace(/_+/g, '_').replace(/^_|_$/g, '');
|
||||
if (baseName.length > 30) baseName = baseName.slice(0, 30);
|
||||
|
||||
return `${baseName}_${hash}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Anti-harmony rename for files.
|
||||
* KEEPS: episode numbers, quality, language tags, original extension.
|
||||
* REPLACES: Chinese title with homophonic/pinyin.
|
||||
*/
|
||||
export function magicRename(filename: string): string {
|
||||
const hash = crypto.createHash('md5').update(filename + Date.now()).digest('hex').slice(0, 8);
|
||||
|
||||
let ext = '';
|
||||
const extMatch = filename.match(/\.[a-zA-Z0-9]+$/);
|
||||
if (extMatch) {
|
||||
ext = extMatch[0];
|
||||
filename = filename.slice(0, -ext.length);
|
||||
}
|
||||
|
||||
// Extract and REMEMBER: episode info, quality, language, year
|
||||
const episodePatterns = [
|
||||
{ regex: /第\s*(\d+)\s*[集话話話話话回章期]/, format: (m: string) => 'Ep' + m.replace(/[^\d]/g, '') },
|
||||
{ regex: /Ep\d+|ep\d+/i, format: (m: string) => m.toUpperCase() },
|
||||
{ regex: /Part\s*\d+/i, format: (m: string) => m.replace(/\s+/g, '') },
|
||||
{ regex: /E\d{2,}/i, format: (m: string) => m.toUpperCase() },
|
||||
];
|
||||
let episodeTag = '';
|
||||
for (const { regex, format } of episodePatterns) {
|
||||
const m = filename.match(regex);
|
||||
if (m) {
|
||||
episodeTag = format(m[0]);
|
||||
filename = filename.replace(m[0], '');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Extract and REMEMBER: quality tags
|
||||
const qualityPattern = /\b(4k|1080p|1080P|2160p|720p|HD|BluRay|Blu-ray|HDR|WEB-DL|WEBRip|BDRip|REMUX|DV|Dovi|HEVC|x264|x265|H\.264|H\.265)\b/;
|
||||
const qualityMatch = filename.match(qualityPattern);
|
||||
const qualityTag = qualityMatch ? qualityMatch[0] : '';
|
||||
if (qualityMatch) filename = filename.replace(qualityMatch[0], '');
|
||||
|
||||
// Extract and REMEMBER: language tags
|
||||
const langPattern = /\b(CHS|CHT|JP|EN|BIG5|GB|粤语|国语|日语|英语|中字|日字|英字|繁体中字)\b/;
|
||||
const langMatch = filename.match(langPattern);
|
||||
const langTag = langMatch ? langMatch[0] : '';
|
||||
if (langMatch) filename = filename.replace(langMatch[0], '');
|
||||
|
||||
// Extract and REMEMBER: year
|
||||
const yearMatch = filename.match(/\b(20\d{2})\b/);
|
||||
const yearTag = yearMatch ? yearMatch[0] : '';
|
||||
if (yearMatch) filename = filename.replace(yearMatch[0], '');
|
||||
|
||||
// Extract and REMEMBER: season info
|
||||
const seasonMatch = filename.match(/第?\s*(\d+)\s*[季部期]/);
|
||||
const seasonTag = seasonMatch ? `${seasonMatch[1]}季` : '';
|
||||
if (seasonMatch) filename = filename.replace(seasonMatch[0], '');
|
||||
|
||||
// Now process the remaining name (mostly Chinese title)
|
||||
filename = filename.replace(/[._\-【】\[\]()()\s]+/g, '_').trim();
|
||||
|
||||
const useHomophonic = Math.random() > 0.5;
|
||||
let titlePart: string;
|
||||
if (useHomophonic) {
|
||||
titlePart = homophonicText(filename);
|
||||
titlePart = titlePart.replace(/[^\u4e00-\u9fff\wa-zA-Z0-9]/g, '_');
|
||||
titlePart = titlePart.replace(/_+/g, '_').replace(/^_|_$/g, '');
|
||||
if (titlePart.length > 15) titlePart = titlePart.slice(0, 15);
|
||||
} else {
|
||||
titlePart = pinyinLike(filename);
|
||||
titlePart = titlePart.replace(/[^a-zA-Z0-9]/g, '_');
|
||||
titlePart = titlePart.replace(/_+/g, '_').replace(/^_|_$/g, '');
|
||||
if (titlePart.length > 15) titlePart = titlePart.slice(0, 15);
|
||||
}
|
||||
|
||||
// Remove sensitive keywords from title part
|
||||
const sensitiveWords = /斗破|完美|凡人|仙逆|遮天|吞噬|大主宰|绝世|武动|星辰变|一念永恒|修罗|神墓|长生|剑来|诡秘|全职|斗罗|盘龙|雪鹰|莽荒纪|天珠变|神印王座|牧神记|沧元图|紫川|百炼成神|大王饶命|全球高考/ig;
|
||||
titlePart = titlePart.replace(sensitiveWords, '');
|
||||
titlePart = titlePart.replace(/_+/g, '_').replace(/^_|_$/g, '');
|
||||
|
||||
// Build preserved tags
|
||||
const tags: string[] = [];
|
||||
if (seasonTag) tags.push(seasonTag);
|
||||
if (episodeTag) tags.push(episodeTag);
|
||||
if (qualityTag) tags.push(qualityTag.toUpperCase());
|
||||
if (langTag) tags.push(langTag);
|
||||
if (yearTag) tags.push(yearTag);
|
||||
tags.push(hash); // Always add hash for uniqueness
|
||||
|
||||
const newExt = ext || '.bin';
|
||||
|
||||
const parts = [titlePart, ...tags].filter(Boolean);
|
||||
let result = parts.join('_');
|
||||
|
||||
if (result.length > 80) {
|
||||
result = result.slice(0, 80);
|
||||
}
|
||||
|
||||
if (result.length < 10) {
|
||||
const filler = crypto.randomBytes(4).toString('hex');
|
||||
result = `${filler}_${result}`;
|
||||
}
|
||||
|
||||
return result + newExt;
|
||||
}
|
||||
409
packages/backend/src/cloud/drivers/quark-share.ts
Normal file
409
packages/backend/src/cloud/drivers/quark-share.ts
Normal file
@@ -0,0 +1,409 @@
|
||||
import { getHeaders, getCommonParams, makeQuery, getMparam, humanDelay, randomSharePwd, apiFetch, QuarkFile } from './quark-api';
|
||||
|
||||
/**
|
||||
* 分享模块 — 分享链接解析、转存任务、创建分享链接。
|
||||
*/
|
||||
|
||||
const BASE_URL = 'https://drive-pc.quark.cn';
|
||||
|
||||
// ==================== Acquire Stoken ====================
|
||||
|
||||
/**
|
||||
* Acquire stoken for a share link (needed for detail/save).
|
||||
*/
|
||||
export async function acquireStoken(cookie: string, pwdId: string): Promise<string | null> {
|
||||
for (let attempt = 0; attempt < 3; attempt++) {
|
||||
try {
|
||||
const params = new URLSearchParams(getCommonParams());
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/share/sharepage/token?${params.toString()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ pwd_id: pwdId, passcode: '' }),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
},
|
||||
);
|
||||
if (!resp.ok) {
|
||||
if (attempt < 2) continue;
|
||||
return null;
|
||||
}
|
||||
const data = await resp.json() as any;
|
||||
if (data.status === 200 && data.data?.stoken) {
|
||||
return data.data.stoken;
|
||||
}
|
||||
return null;
|
||||
} catch {
|
||||
if (attempt >= 2) return null;
|
||||
await new Promise(r => setTimeout(r, 500 * (attempt + 1)));
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// ==================== Get Share Files ====================
|
||||
|
||||
/**
|
||||
* Fetch detail at a given pdir_fid within a share.
|
||||
*/
|
||||
export async function getDetailAt(
|
||||
cookie: string,
|
||||
pwdId: string,
|
||||
stoken: string,
|
||||
pdirFid: string,
|
||||
): Promise<QuarkFile[]> {
|
||||
const params = new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
pwd_id: pwdId,
|
||||
stoken,
|
||||
pdir_fid: pdirFid,
|
||||
force: '0',
|
||||
_page: '1',
|
||||
_size: '50',
|
||||
_fetch_banner: '0',
|
||||
_fetch_share: '1',
|
||||
_fetch_total: '1',
|
||||
_sort: 'file_type:asc,updated_at:desc',
|
||||
ver: '2',
|
||||
fetch_share_full_path: '0',
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/share/sharepage/detail?${params.toString()}`,
|
||||
{ headers: getHeaders(cookie), signal: AbortSignal.timeout(15000) },
|
||||
);
|
||||
if (!resp.ok) return [];
|
||||
const data = await resp.json() as any;
|
||||
if (data.status !== 200) return [];
|
||||
return (data.data?.list || []).filter((f: any) => f.fid).map((f: any) => ({
|
||||
fid: f.fid,
|
||||
file_name: f.file_name,
|
||||
share_fid_token: f.share_fid_token || '',
|
||||
dir: f.dir || false,
|
||||
size: f.size || 0,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively collect files from a share.
|
||||
* If the share contains a single directory, drill into it to list contents
|
||||
* but still save the directory itself.
|
||||
*/
|
||||
export async function getShareFiles(
|
||||
cookie: string,
|
||||
pwdId: string,
|
||||
stoken: string,
|
||||
): Promise<{ files: QuarkFile[]; topDir: boolean; childFiles?: QuarkFile[] } | null> {
|
||||
try {
|
||||
const topLevel = await getDetailAt(cookie, pwdId, stoken, '0');
|
||||
if (!topLevel || topLevel.length === 0) return null;
|
||||
|
||||
// If the share is a single directory, we save the directory itself
|
||||
// and fetch its contents for renaming later
|
||||
if (topLevel.length === 1 && topLevel[0].dir) {
|
||||
const innerFiles = await getDetailAt(cookie, pwdId, stoken, topLevel[0].fid);
|
||||
return {
|
||||
files: topLevel,
|
||||
topDir: true,
|
||||
childFiles: innerFiles || [],
|
||||
};
|
||||
}
|
||||
|
||||
// Multiple top-level items: save them directly
|
||||
return {
|
||||
files: topLevel,
|
||||
topDir: false,
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== Save Files (share → cloud) ====================
|
||||
|
||||
/**
|
||||
* Save shared files to the user's cloud directory.
|
||||
*/
|
||||
export async function saveFiles(
|
||||
cookie: string,
|
||||
pwdId: string,
|
||||
stoken: string,
|
||||
fids: string[],
|
||||
fidTokens: string[],
|
||||
toPdirFid: string,
|
||||
): Promise<{ success: boolean; message: string; taskId?: string }> {
|
||||
try {
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/share/sharepage/save?${makeQuery()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
fid_list: fids,
|
||||
fid_token_list: fidTokens,
|
||||
to_pdir_fid: toPdirFid,
|
||||
pwd_id: pwdId,
|
||||
stoken,
|
||||
pdir_fid: '0',
|
||||
scene: 'link',
|
||||
}),
|
||||
signal: AbortSignal.timeout(30000),
|
||||
},
|
||||
);
|
||||
const data = await resp.json() as any;
|
||||
if (data.status === 200 && data.data?.task_id) {
|
||||
return { success: true, message: 'Save task created', taskId: data.data.task_id };
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
message: data.message === 'require login [guest]'
|
||||
? '夸克网盘 Cookie 已过期,请在后台重新配置 Cookie'
|
||||
: (data.message || `API 返回错误 (status=${data.status}, code=${data.code})`),
|
||||
};
|
||||
} catch (err: any) {
|
||||
return { success: false, message: err.message || 'Network error' };
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== Wait for Save Task ====================
|
||||
|
||||
/**
|
||||
* Poll task status until complete or timeout.
|
||||
* Returns the saved file FIDs (save_as_top_fids).
|
||||
*/
|
||||
export async function waitForTask(cookie: string, taskId: string, timeoutMs: number): Promise<string[] | null> {
|
||||
const start = Date.now();
|
||||
let retryIndex = 0;
|
||||
|
||||
while (Date.now() - start < timeoutMs) {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
uc_param_str: '',
|
||||
task_id: taskId,
|
||||
retry_index: String(retryIndex),
|
||||
__dt: String(Math.floor(Math.random() * 240000 + 60000)),
|
||||
__t: String(Date.now() / 1000),
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/task?${params.toString()}`,
|
||||
{ headers: getHeaders(cookie), signal: AbortSignal.timeout(10000) },
|
||||
);
|
||||
const data = await resp.json() as any;
|
||||
if (data.status === 200) {
|
||||
if (data.data?.status === 2) {
|
||||
// Task completed
|
||||
const savedFids: string[] = data.data?.save_as?.save_as_top_fids || [];
|
||||
return savedFids;
|
||||
}
|
||||
// Still in progress
|
||||
retryIndex++;
|
||||
}
|
||||
} catch {
|
||||
// Network error, retry
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
}
|
||||
return null; // Timeout
|
||||
}
|
||||
|
||||
// ==================== Rename File ====================
|
||||
|
||||
/**
|
||||
* Rename a file by its FID.
|
||||
*/
|
||||
export async function renameFile(cookie: string, fid: string, newName: string): Promise<boolean> {
|
||||
try {
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/file/rename?${makeQuery()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ fid, file_name: newName }),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
},
|
||||
);
|
||||
const data = await resp.json() as any;
|
||||
return data.status === 200 || data.code === 0;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== Create Share Link ====================
|
||||
|
||||
/**
|
||||
* Create a share link for a file/folder.
|
||||
* Flow: create task → poll for share_id → submit to get short URL.
|
||||
*/
|
||||
export async function createShareLink(cookie: string, fileId: string): Promise<{ success: boolean; shareUrl?: string; sharePwd?: string; message: string }> {
|
||||
try {
|
||||
const sharePwd = randomSharePwd();
|
||||
|
||||
// Try different share_type values (1=7天, 0=无限制)
|
||||
const shareTypes = ['1', '0'];
|
||||
let lastError = '';
|
||||
|
||||
for (const st of shareTypes) {
|
||||
await humanDelay();
|
||||
// Step 1: Create share task - get task_id
|
||||
const response = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/share?${makeQuery()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
fid_list: [fileId],
|
||||
share_type: st,
|
||||
url_type: '1',
|
||||
share_pwd: sharePwd,
|
||||
}),
|
||||
signal: AbortSignal.timeout(15000),
|
||||
},
|
||||
);
|
||||
const data = await response.json() as any;
|
||||
const taskId = data.data?.task_id;
|
||||
if (!taskId) {
|
||||
lastError = data.message || `share_type=${st} 失败`;
|
||||
console.error('[Quark] Create share task failed (type=%s):', st, data.message || JSON.stringify(data).slice(0, 200));
|
||||
continue;
|
||||
}
|
||||
|
||||
// Step 2: Poll task until complete
|
||||
const result = await waitForShareTask(cookie, taskId, 20000);
|
||||
if (!result?.shareId) {
|
||||
lastError = result?.message || '任务超时';
|
||||
console.error('[Quark] Wait for share task failed (type=%s):', st, result?.message || 'unknown');
|
||||
continue;
|
||||
}
|
||||
|
||||
// Step 3: Submit share via /password endpoint
|
||||
const shareUrl = await submitShare(cookie, result.shareId, sharePwd);
|
||||
if (shareUrl) {
|
||||
return {
|
||||
success: true,
|
||||
shareUrl,
|
||||
sharePwd,
|
||||
message: `分享链接已生成(密码:${sharePwd})`,
|
||||
};
|
||||
}
|
||||
lastError = '提交密码后未获取到短链接';
|
||||
}
|
||||
|
||||
return { success: false, message: lastError || '🤷 各种姿势都试过了,就是分享不出来…' };
|
||||
} catch (err: any) {
|
||||
console.error('[Quark] createShareLink error:', err.message);
|
||||
return { success: false, message: err.message || '🌩️ 网络开小差了,再试试?' };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Submit share via /password endpoint to get the actual short URL.
|
||||
*/
|
||||
async function submitShare(cookie: string, shareId: string, sharePwd?: string): Promise<string | null> {
|
||||
try {
|
||||
const response = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/share/password?${makeQuery()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ share_id: shareId, share_pwd: sharePwd || '' }),
|
||||
signal: AbortSignal.timeout(15000),
|
||||
},
|
||||
);
|
||||
const data = await response.json() as any;
|
||||
if (data.status === 200 && data.data?.share_url) {
|
||||
console.log('[Quark] Share short URL:', data.data.share_url);
|
||||
return data.data.share_url;
|
||||
}
|
||||
console.log('[Quark] /password response:', JSON.stringify(data).slice(0, 300));
|
||||
console.error('[Quark] /password FAIL status=%s msg=%s', data.status, data.message || '');
|
||||
return null;
|
||||
} catch (err) {
|
||||
console.log('[Quark] /password error:', err);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Poll share task until complete and extract share URL/shortcode.
|
||||
*/
|
||||
async function waitForShareTask(cookie: string, taskId: string, timeoutMs: number): Promise<{ shareId?: string; message?: string } | null> {
|
||||
const start = Date.now();
|
||||
let retryIndex = 0;
|
||||
while (Date.now() - start < timeoutMs) {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
uc_param_str: '',
|
||||
task_id: taskId,
|
||||
retry_index: String(retryIndex),
|
||||
__dt: String(Math.floor(Math.random() * 240000 + 60000)),
|
||||
__t: String(Date.now() / 1000),
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/task?${params.toString()}`,
|
||||
{ headers: getHeaders(cookie), signal: AbortSignal.timeout(10000) },
|
||||
);
|
||||
const data = await resp.json() as any;
|
||||
if (data.data?.status === 2) {
|
||||
// Task completed — try multiple extraction approaches
|
||||
// 1. Direct share_url field
|
||||
if (data.data?.share_url) {
|
||||
const match = data.data.share_url.match(/\/s\/([a-zA-Z0-9]+)/);
|
||||
if (match) return { shareId: match[1] };
|
||||
}
|
||||
|
||||
// 2. Nested share object
|
||||
if (data.data?.share?.url) {
|
||||
const match = data.data.share.url.match(/\/s\/([a-zA-Z0-9]+)/);
|
||||
if (match) return { shareId: match[1] };
|
||||
}
|
||||
if (data.data?.share?.short_url) {
|
||||
const match = data.data.share.short_url.match(/\/s\/([a-zA-Z0-9]+)/);
|
||||
if (match) return { shareId: match[1] };
|
||||
}
|
||||
|
||||
// 3. share_id — validate it's a reasonable short code (8-20 chars, not UUID-like)
|
||||
const shareId = data.data?.share_id;
|
||||
if (shareId && shareId.length <= 20 && shareId.length >= 8) {
|
||||
return { shareId };
|
||||
}
|
||||
|
||||
// 4. Regex search through the full response for a URL pattern
|
||||
const str = JSON.stringify(data);
|
||||
const urlMatch = str.match(/https?:\/\/pan\.quark\.cn\/s\/([a-zA-Z0-9]{6,16})/);
|
||||
if (urlMatch) {
|
||||
return { shareId: urlMatch[1] };
|
||||
}
|
||||
|
||||
// 5. Extract from any URL field in the response
|
||||
const urlFields = ['url', 'link', 'share_url', 'short_url', 'share_link'];
|
||||
for (const field of urlFields) {
|
||||
const val = data.data?.[field] || data.data?.share?.[field];
|
||||
if (typeof val === 'string' && val.includes('pan.quark.cn/s/')) {
|
||||
const m = val.match(/\/s\/([a-zA-Z0-9]+)/);
|
||||
if (m) return { shareId: m[1] };
|
||||
}
|
||||
}
|
||||
|
||||
// 6. Log full share task response for debugging
|
||||
console.log('[Quark] Full share task response:', JSON.stringify(data, null, 2).slice(0, 2000));
|
||||
|
||||
// 7. Even if shareId is UUID-like (32 hex chars), use it anyway as last resort
|
||||
if (shareId) {
|
||||
return { shareId };
|
||||
}
|
||||
|
||||
return { message: 'Share task completed but no share URL found' };
|
||||
}
|
||||
if (data.data?.status === 3) {
|
||||
return { message: data.message || 'Share task failed' };
|
||||
}
|
||||
retryIndex++;
|
||||
} catch {
|
||||
// Retry
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 1000));
|
||||
}
|
||||
return { message: 'Share task timed out' };
|
||||
}
|
||||
308
packages/backend/src/cloud/drivers/quark-storage.ts
Normal file
308
packages/backend/src/cloud/drivers/quark-storage.ts
Normal file
@@ -0,0 +1,308 @@
|
||||
import { getHeaders, getCommonParams, makeQuery, getMparam, humanDelay, dailyFolderName, formatBytes, apiFetch, listDir, listDirAllPages, listRootDir, QuarkFile } from './quark-api';
|
||||
import { acquireStoken, getShareFiles, saveFiles, waitForTask } from './quark-share';
|
||||
|
||||
/**
|
||||
* 转存 & 存储管理模块。
|
||||
* 处理分享链接解析 → 转存 → 查/创建目标文件夹 → 文件重命名 → 递归统计。
|
||||
*/
|
||||
|
||||
// ==================== saveFromShare — 核心转存流水线 ====================
|
||||
|
||||
/**
|
||||
* Save files from a share link → magic rename → create shared link.
|
||||
*
|
||||
* Flow: token → detail → save → wait_task → rename → share
|
||||
*/
|
||||
export async function saveFromShare(
|
||||
cookie: string,
|
||||
nickname: string | undefined,
|
||||
shareUrl: string,
|
||||
sourceTitle?: string,
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
message: string;
|
||||
shareUrl?: string;
|
||||
sharePwd?: string;
|
||||
folderName?: string;
|
||||
taskId?: string;
|
||||
renamed?: string[];
|
||||
fileCount?: number;
|
||||
folderCount?: number;
|
||||
originalFolderName?: string;
|
||||
}> {
|
||||
try {
|
||||
// Parse share token from URL
|
||||
const urlObj = new URL(shareUrl);
|
||||
const pwdId = urlObj.pathname.split('/').filter(Boolean).pop();
|
||||
if (!pwdId) {
|
||||
return { success: false, message: 'Invalid share URL: could not extract share token' };
|
||||
}
|
||||
|
||||
// Step 1: Acquire stoken
|
||||
const stoken = await acquireStoken(cookie, pwdId);
|
||||
if (!stoken) {
|
||||
return { success: false, message: '😅 Oops!资源好像偷偷溜走了,换个链接试试吧~' };
|
||||
}
|
||||
|
||||
// Step 2: Get share detail
|
||||
const shareInfo = await getShareFiles(cookie, pwdId, stoken);
|
||||
if (!shareInfo || !shareInfo.files || shareInfo.files.length === 0) {
|
||||
return { success: false, message: '🌚 空的!这个分享里啥都没有…' };
|
||||
}
|
||||
|
||||
const { files: topFiles, topDir, childFiles } = shareInfo;
|
||||
const originalFolderName = topFiles[0]?.file_name || '';
|
||||
const fids = topFiles.map(f => f.fid);
|
||||
const fidTokens = topFiles.map(f => f.share_fid_token);
|
||||
|
||||
// 按日期创建/查找文件夹,每天的转存存入当天文件夹
|
||||
await humanDelay();
|
||||
const saveDirName = dailyFolderName();
|
||||
console.log(`[Quark] saveFromShare: looking for/create dir "${saveDirName}"`);
|
||||
const saveDirFid = await findOrCreateDir(cookie, saveDirName);
|
||||
const targetPdirFid = saveDirFid || '0';
|
||||
if (saveDirFid) {
|
||||
console.log(`[Quark] Using save directory: ${saveDirName} (fid: ${saveDirFid})`);
|
||||
} else {
|
||||
console.log(`[Quark] WARNING: failed to create/find dir "${saveDirName}", saving to root`);
|
||||
}
|
||||
|
||||
// Step 3: Save top-level item(s) to the target directory
|
||||
const saveResult = await saveFiles(cookie, pwdId, stoken, fids, fidTokens.filter(Boolean) as string[], targetPdirFid);
|
||||
if (!saveResult.success) {
|
||||
return saveResult;
|
||||
}
|
||||
|
||||
const taskId = saveResult.taskId!;
|
||||
|
||||
// Step 4: Wait for save task to complete (poll up to 30s)
|
||||
const savedFids = await waitForTask(cookie, taskId, 30000);
|
||||
if (!savedFids || savedFids.length === 0) {
|
||||
return { success: true, message: '文件已保存,但获取保存结果超时' };
|
||||
}
|
||||
|
||||
// Step 5: Magic rename files — with random delay to avoid detection
|
||||
await humanDelay();
|
||||
const renamed: Array<{ original: string; renamed: string }> = [];
|
||||
let shareFid = '';
|
||||
let savedFolderName = '';
|
||||
let newInnerDirName = '';
|
||||
|
||||
if (topDir && childFiles && childFiles.length > 0) {
|
||||
// ── Single folder share ──
|
||||
const savedDirFid = savedFids[0];
|
||||
shareFid = savedDirFid;
|
||||
savedFolderName = topFiles[0]?.file_name || '';
|
||||
} else {
|
||||
// ── Multiple files at top level ──
|
||||
shareFid = savedFids[0];
|
||||
savedFolderName = topFiles[0]?.file_name || '';
|
||||
}
|
||||
|
||||
// Step 6: Create share link FIRST (before rename), so all files are guaranteed to be shared
|
||||
await humanDelay();
|
||||
let shareUrlResult = '';
|
||||
let sharePwdResult = '';
|
||||
let shareMsg = '';
|
||||
let successCount = 0; // total items (files + folders) actually saved
|
||||
|
||||
const { createShareLink } = await import('./quark-share');
|
||||
if (shareFid) {
|
||||
const shareResult = await createShareLink(cookie, shareFid);
|
||||
if (shareResult.success && shareResult.shareUrl) {
|
||||
shareUrlResult = shareResult.shareUrl;
|
||||
if (shareResult.sharePwd) sharePwdResult = shareResult.sharePwd;
|
||||
} else {
|
||||
shareMsg = `(分享失败:${shareResult.message})`;
|
||||
}
|
||||
}
|
||||
|
||||
const { magicRenameDir, magicRename } = await import('./quark-rename');
|
||||
const { renameFile } = await import('./quark-share');
|
||||
|
||||
// Step 7: Rename files AFTER creating the share link (anti-harmony, won't affect the share)
|
||||
if (topDir && childFiles && childFiles.length > 0) {
|
||||
// ── Single folder share ──
|
||||
const savedDirFid = savedFids[0];
|
||||
|
||||
// List files inside the saved directory
|
||||
const dirFiles = await listDir(cookie, savedDirFid);
|
||||
if (dirFiles && dirFiles.length > 0) {
|
||||
for (const file of dirFiles) {
|
||||
if (file.dir) continue;
|
||||
const newName = magicRename(file.file_name);
|
||||
const renameOk = await renameFile(cookie, file.fid, newName);
|
||||
if (renameOk) {
|
||||
renamed.push({ original: file.file_name, renamed: newName });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Also rename the inner folder itself (the actual shared folder)
|
||||
const innerDirOriginalName = sourceTitle || topFiles[0]?.file_name || '';
|
||||
if (innerDirOriginalName) {
|
||||
newInnerDirName = magicRenameDir(innerDirOriginalName);
|
||||
const innerDirRenameOk = await renameFile(cookie, savedDirFid, newInnerDirName);
|
||||
if (innerDirRenameOk) {
|
||||
console.log(`[Quark] Renamed inner folder: ${innerDirOriginalName} → ${newInnerDirName}`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// ── Multiple files at top level ──
|
||||
for (let i = 0; i < savedFids.length && i < topFiles.length; i++) {
|
||||
const originalName = topFiles[i].file_name;
|
||||
if (topFiles[i].dir) continue;
|
||||
const newName = magicRename(originalName);
|
||||
const renameOk = await renameFile(cookie, savedFids[i], newName);
|
||||
if (renameOk) {
|
||||
renamed.push({ original: originalName, renamed: newName });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 7.5: 广告关键词清理 + 创建警示文件夹
|
||||
if (shareFid) {
|
||||
try {
|
||||
const { runAdCleanup } = await import('./quark-ad-cleanup');
|
||||
const adResult = await runAdCleanup(cookie, shareFid);
|
||||
if (adResult.adDeleted > 0) {
|
||||
console.log(`[Quark] 广告清理完成: 删除了 ${adResult.adDeleted} 个广告文件/文件夹`);
|
||||
}
|
||||
if (adResult.warningDirs > 0) {
|
||||
console.log(`[Quark] 已创建 ${adResult.warningDirs} 个警示文件夹`);
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.log(`[Quark] 广告清理/警示文件夹创建失败(非致命): ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Step 8: DAY FOLDER STAYS AS-IS (e.g. "2026-05-03")
|
||||
// DO NOT rename the date folder — it serves as the organizational container.
|
||||
savedFolderName = newInnerDirName ? `${saveDirName}/${newInnerDirName}` : saveDirName;
|
||||
|
||||
// Recursively count files and folders from saved cloud directory
|
||||
let fileCount = 0;
|
||||
let folderCount = 0;
|
||||
if (shareFid) {
|
||||
try {
|
||||
const counts = await countRecursive(cookie, shareFid);
|
||||
fileCount = counts.fileCount;
|
||||
folderCount = counts.folderCount;
|
||||
} catch {
|
||||
console.log('[Quark] Recursive count failed, using fallback');
|
||||
}
|
||||
}
|
||||
// If recursive count returned nothing, try fallback
|
||||
if (fileCount === 0 && folderCount === 0) {
|
||||
if (topDir && childFiles) {
|
||||
folderCount = 1 + childFiles.filter(f => f.dir).length;
|
||||
fileCount = childFiles.filter(f => !f.dir).length;
|
||||
} else {
|
||||
folderCount = topFiles.filter(f => f.dir).length;
|
||||
fileCount = topFiles.filter(f => !f.dir).length;
|
||||
}
|
||||
}
|
||||
|
||||
const renameMsg = renamed.length > 0
|
||||
? `,已重命名 ${renamed.length} 个文件`
|
||||
: '';
|
||||
const folderMsg = savedFolderName ? `到文件夹「${savedFolderName}」` : '';
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `已保存${folderMsg}${renameMsg}${shareMsg}`,
|
||||
shareUrl: shareUrlResult || undefined,
|
||||
sharePwd: sharePwdResult || undefined,
|
||||
folderName: savedFolderName,
|
||||
taskId,
|
||||
renamed: renamed.map(r => `${r.original} → ${r.renamed}`),
|
||||
fileCount,
|
||||
folderCount,
|
||||
originalFolderName,
|
||||
};
|
||||
} catch (err: any) {
|
||||
return { success: false, message: err.message || 'Network error' };
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== Dir Management ====================
|
||||
|
||||
/**
|
||||
* Create a new directory at root.
|
||||
*/
|
||||
export async function createDir(cookie: string, dirName: string): Promise<string | null> {
|
||||
try {
|
||||
const resp = await fetch(
|
||||
`https://drive-pc.quark.cn/1/clouddrive/file?${makeQuery()}`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { ...getHeaders(cookie), 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
pdir_fid: '0',
|
||||
file_name: dirName,
|
||||
dir: true,
|
||||
dir_path: '',
|
||||
}),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
},
|
||||
);
|
||||
const data = await resp.json() as any;
|
||||
if (data.status === 200 && data.data?.fid) {
|
||||
console.log(`[Quark] Created dir "${dirName}" (fid: ${data.data.fid})`);
|
||||
return data.data.fid;
|
||||
}
|
||||
console.log(`[Quark] createDir API returned non-200: status=${data.status} msg=${data.message}`);
|
||||
return null;
|
||||
} catch (err: any) {
|
||||
console.log(`[Quark] createDir error: ${err.message}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find an existing directory by name, or create it if not found.
|
||||
*/
|
||||
export async function findOrCreateDir(cookie: string, dirName: string): Promise<string | null> {
|
||||
try {
|
||||
const rootFiles = await listDirAllPages(cookie, '0');
|
||||
const existing = rootFiles.find(f => f.dir && f.file_name === dirName);
|
||||
if (existing?.fid) {
|
||||
console.log(`[Quark] Found existing daily folder: ${dirName} (fid: ${existing.fid})`);
|
||||
return existing.fid;
|
||||
}
|
||||
console.log(`[Quark] Daily folder "${dirName}" not found, creating...`);
|
||||
} catch (err: any) {
|
||||
console.log(`[Quark] findOrCreateDir list error: ${err.message}`);
|
||||
}
|
||||
const fid = await createDir(cookie, dirName);
|
||||
console.log(`[Quark] createDir result for "${dirName}": ${fid || 'null'}`);
|
||||
return fid;
|
||||
}
|
||||
|
||||
// ==================== Recursive Count ====================
|
||||
|
||||
/**
|
||||
* Recursively count files and folders for a saved cloud directory.
|
||||
*/
|
||||
export async function countRecursive(cookie: string, pdirFid: string): Promise<{ fileCount: number; folderCount: number }> {
|
||||
let fileCount = 0;
|
||||
let folderCount = 0;
|
||||
const stack = [pdirFid];
|
||||
const visited = new Set<string>();
|
||||
while (stack.length > 0) {
|
||||
const fid = stack.pop()!;
|
||||
if (visited.has(fid)) continue;
|
||||
visited.add(fid);
|
||||
const files = await listDir(cookie, fid);
|
||||
if (!files) continue;
|
||||
for (const f of files) {
|
||||
if (f.dir) {
|
||||
folderCount++;
|
||||
stack.push(f.fid);
|
||||
} else {
|
||||
fileCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
return { fileCount, folderCount };
|
||||
}
|
||||
122
packages/backend/src/cloud/drivers/quark.driver.ts
Executable file
122
packages/backend/src/cloud/drivers/quark.driver.ts
Executable file
@@ -0,0 +1,122 @@
|
||||
/**
|
||||
* QuarkDriver — 夸克网盘统一驱动
|
||||
*
|
||||
* 为保持向后兼容性,此类将所有方法委托到子模块。
|
||||
* 新代码应直接导入子模块函数。
|
||||
*
|
||||
* 模块结构:
|
||||
* quark-api.ts — HTTP 封装、headers、params、共享工具函数
|
||||
* quark-auth.ts — Cookie 验证
|
||||
* quark-storage.ts — 转存流水线、目录管理、递归统计
|
||||
* quark-share.ts — 分享链接解析、转存任务、创建分享链接
|
||||
* quark-rename.ts — 防和谐重命名(文件名/目录名)
|
||||
* quark-cleanup.ts — 容量信息、空间清理
|
||||
* quark-driver.ts — 统一导出类(兼容旧代码)
|
||||
*/
|
||||
|
||||
import { QuarkConfig } from './quark-api';
|
||||
import { validate } from './quark-auth';
|
||||
import { saveFromShare, createDir, findOrCreateDir, countRecursive } from './quark-storage';
|
||||
import { createShareLink, renameFile } from './quark-share';
|
||||
import {
|
||||
getStorageInfoQuick, getStorageInfo,
|
||||
calculateUsedSpace, trashFiles, emptyTrash,
|
||||
cleanupOldDateFolders, cleanupBySpaceThreshold,
|
||||
} from './quark-cleanup';
|
||||
|
||||
export type { QuarkConfig, QuarkFile } from './quark-api';
|
||||
export * from './quark-api';
|
||||
export * from './quark-auth';
|
||||
export * from './quark-storage';
|
||||
export * from './quark-share';
|
||||
export * from './quark-rename';
|
||||
export * from './quark-cleanup';
|
||||
|
||||
export { validate } from './quark-auth';
|
||||
|
||||
/**
|
||||
* QuarkDriver — 向后兼容的驱动类。
|
||||
* 所有方法委托到纯函数模块,不持有状态。
|
||||
*/
|
||||
export class QuarkDriver {
|
||||
private config: QuarkConfig;
|
||||
|
||||
constructor(config: QuarkConfig) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
get cookie(): string {
|
||||
return this.config.cookie;
|
||||
}
|
||||
|
||||
// ==================== Auth ====================
|
||||
|
||||
async validate(): Promise<boolean> {
|
||||
return validate(this.config.cookie);
|
||||
}
|
||||
|
||||
// ==================== Storage (Save from Share) ====================
|
||||
|
||||
async saveFromShare(shareUrl: string, sourceTitle?: string) {
|
||||
return saveFromShare(this.config.cookie, this.config.nickname, shareUrl, sourceTitle);
|
||||
}
|
||||
|
||||
async createDir(dirName: string): Promise<string | null> {
|
||||
return createDir(this.config.cookie, dirName);
|
||||
}
|
||||
|
||||
async findOrCreateDir(dirName: string): Promise<string | null> {
|
||||
return findOrCreateDir(this.config.cookie, dirName);
|
||||
}
|
||||
|
||||
async countRecursive(pdirFid: string) {
|
||||
return countRecursive(this.config.cookie, pdirFid);
|
||||
}
|
||||
|
||||
// ==================== Share ====================
|
||||
|
||||
async createShareLink(fileId: string) {
|
||||
return createShareLink(this.config.cookie, fileId);
|
||||
}
|
||||
|
||||
async renameFile(fid: string, newName: string): Promise<boolean> {
|
||||
return renameFile(this.config.cookie, fid, newName);
|
||||
}
|
||||
|
||||
// ==================== Storage Info ====================
|
||||
|
||||
async getStorageInfoQuick() {
|
||||
return getStorageInfoQuick(this.config.cookie);
|
||||
}
|
||||
|
||||
async getStorageInfo() {
|
||||
return getStorageInfo(this.config.cookie);
|
||||
}
|
||||
|
||||
async calculateUsedSpace(): Promise<number> {
|
||||
return calculateUsedSpace(this.config.cookie);
|
||||
}
|
||||
|
||||
// ==================== Cleanup ====================
|
||||
|
||||
async listRootDir() {
|
||||
const { listRootDir } = await import('./quark-api');
|
||||
return listRootDir(this.config.cookie);
|
||||
}
|
||||
|
||||
async trashFiles(fids: string[]): Promise<boolean> {
|
||||
return trashFiles(this.config.cookie, fids);
|
||||
}
|
||||
|
||||
async emptyTrash(): Promise<boolean> {
|
||||
return emptyTrash(this.config.cookie);
|
||||
}
|
||||
|
||||
async cleanupOldDateFolders(days: number) {
|
||||
return cleanupOldDateFolders(this.config.cookie, days);
|
||||
}
|
||||
|
||||
async cleanupBySpaceThreshold(thresholdPercent: number, deletePercent: number) {
|
||||
return cleanupBySpaceThreshold(this.config.cookie, thresholdPercent, deletePercent);
|
||||
}
|
||||
}
|
||||
70
packages/backend/src/cloud/error-codes.ts
Normal file
70
packages/backend/src/cloud/error-codes.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
// Standard error codes for all cloud drivers
|
||||
export const ErrCode = {
|
||||
COOKIE_EXPIRED: 'COOKIE_EXPIRED',
|
||||
COOKIE_INVALID: 'COOKIE_INVALID',
|
||||
TOKEN_EXPIRED: 'TOKEN_EXPIRED',
|
||||
SHARE_NOT_FOUND: 'SHARE_NOT_FOUND',
|
||||
SHARE_EXPIRED: 'SHARE_EXPIRED',
|
||||
PASSWORD_REQUIRED: 'PASSWORD_REQUIRED',
|
||||
PASSWORD_WRONG: 'PASSWORD_WRONG',
|
||||
CAPACITY_FULL: 'CAPACITY_FULL',
|
||||
FILE_EXISTS: 'FILE_EXISTS',
|
||||
RATE_LIMITED: 'RATE_LIMITED',
|
||||
TRANSFER_FAILED: 'TRANSFER_FAILED',
|
||||
NETWORK_ERROR: 'NETWORK_ERROR',
|
||||
UNSUPPORTED: 'UNSUPPORTED',
|
||||
UNKNOWN: 'UNKNOWN',
|
||||
} as const;
|
||||
|
||||
export type ErrorCode = typeof ErrCode[keyof typeof ErrCode];
|
||||
|
||||
const messages: Record<string, string> = {
|
||||
[ErrCode.COOKIE_EXPIRED]: 'Cookie已过期,请重新登录',
|
||||
[ErrCode.COOKIE_INVALID]: 'Cookie无效,请检查配置',
|
||||
[ErrCode.TOKEN_EXPIRED]: 'Token已过期,请刷新',
|
||||
[ErrCode.SHARE_NOT_FOUND]: '分享链接不存在或已被删除',
|
||||
[ErrCode.SHARE_EXPIRED]: '分享链接已过期',
|
||||
[ErrCode.PASSWORD_REQUIRED]: '需要提取码',
|
||||
[ErrCode.PASSWORD_WRONG]: '提取码错误',
|
||||
[ErrCode.CAPACITY_FULL]: '网盘容量不足',
|
||||
[ErrCode.RATE_LIMITED]: '请求过于频繁,请稍后重试',
|
||||
[ErrCode.TRANSFER_FAILED]: '转存失败',
|
||||
[ErrCode.NETWORK_ERROR]: '网络请求失败',
|
||||
[ErrCode.UNKNOWN]: '未知错误',
|
||||
};
|
||||
|
||||
export function errorResponse(code: ErrorCode, detail?: string) {
|
||||
return {
|
||||
success: false,
|
||||
code,
|
||||
message: messages[code] + (detail ? ': ' + detail : ''),
|
||||
};
|
||||
}
|
||||
|
||||
export class TransferError extends Error {
|
||||
code: ErrorCode;
|
||||
detail?: string;
|
||||
cookieExpired: boolean;
|
||||
|
||||
constructor(code: ErrorCode, detail?: string) {
|
||||
super(messages[code] + (detail ? ': ' + detail : ''));
|
||||
this.code = code;
|
||||
this.detail = detail;
|
||||
this.cookieExpired = (code === ErrCode.COOKIE_EXPIRED || code === ErrCode.COOKIE_INVALID);
|
||||
}
|
||||
}
|
||||
|
||||
/** Detect error code from driver result message (for untagged drivers) */
|
||||
export function detectErrorCode(result: { message?: string; cookieExpired?: boolean }): ErrorCode | null {
|
||||
if (!result || !result.message) return null;
|
||||
if (result.cookieExpired) return ErrCode.COOKIE_EXPIRED;
|
||||
const msg = result.message.toLowerCase();
|
||||
if (msg.includes('cookie') || msg.includes('登录') || msg.includes('bdstoken')) return ErrCode.COOKIE_EXPIRED;
|
||||
if (msg.includes('不存在') || msg.includes('not found') || msg.includes('已删除')) return ErrCode.SHARE_NOT_FOUND;
|
||||
if (msg.includes('过期') || msg.includes('expired')) return ErrCode.SHARE_EXPIRED;
|
||||
if (msg.includes('提取码') || msg.includes('密码') || msg.includes('password')) return ErrCode.PASSWORD_WRONG;
|
||||
if (msg.includes('容量') || msg.includes('空间') || msg.includes('capacity')) return ErrCode.CAPACITY_FULL;
|
||||
if (msg.includes('频繁') || msg.includes('稍后') || msg.includes('rate')) return ErrCode.RATE_LIMITED;
|
||||
if (msg.includes('网络') || msg.includes('fetch') || msg.includes('timeout')) return ErrCode.NETWORK_ERROR;
|
||||
return ErrCode.TRANSFER_FAILED;
|
||||
}
|
||||
31
packages/backend/src/cloud/ip-lookup.ts
Normal file
31
packages/backend/src/cloud/ip-lookup.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
/**
|
||||
* IP 归属地查询工具
|
||||
* 通过系统配置中的 IP 地理接口查询
|
||||
*/
|
||||
|
||||
import { getSystemConfig } from '../admin/system-config.service';
|
||||
|
||||
export async function lookupIpLocation(ip: string): Promise<string | null> {
|
||||
if (!ip || ip === '127.0.0.1' || ip === '::1' || ip.startsWith('192.168.') || ip.startsWith('10.')) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
const apiUrlTemplate = getSystemConfig('ip_geo_api_url');
|
||||
if (!apiUrlTemplate) return null;
|
||||
const url = apiUrlTemplate.replace('{ip}', encodeURIComponent(ip));
|
||||
|
||||
const res = await fetch(url, { signal: AbortSignal.timeout(5000) });
|
||||
if (!res.ok) return null;
|
||||
const data = await res.json() as {
|
||||
code: number; sheng?: string; shi?: string; qu?: string;
|
||||
isp?: string; msg?: string; guo?: string;
|
||||
};
|
||||
if (data.code !== 200) return null;
|
||||
// Format: "四川 绵阳 江油 中国联通" — strip 省/市/区/州 suffixes for compact display
|
||||
const stripSuffix = (s: string | undefined) => s?.replace(/[省市州区]$/, '');
|
||||
const parts = [stripSuffix(data.sheng), stripSuffix(data.shi), stripSuffix(data.qu), data.isp].filter(Boolean);
|
||||
return parts.join(' ');
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
95
packages/backend/src/cloud/notification.service.ts
Normal file
95
packages/backend/src/cloud/notification.service.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
// Native fetch available in Node 20+
|
||||
import { getSystemConfig } from '../admin/system-config.service';
|
||||
|
||||
type NotifyLevel = 'info' | 'warn' | 'error';
|
||||
|
||||
interface NotifyChannel {
|
||||
send(title: string, content: string, level: NotifyLevel): Promise<void>;
|
||||
}
|
||||
|
||||
// ---- Feishu Webhook Channel ----
|
||||
class FeishuChannel implements NotifyChannel {
|
||||
private webhookUrl: string;
|
||||
|
||||
constructor(webhookUrl: string) {
|
||||
this.webhookUrl = webhookUrl;
|
||||
}
|
||||
|
||||
async send(title: string, content: string, _level: NotifyLevel): Promise<void> {
|
||||
try {
|
||||
const body = JSON.stringify({
|
||||
msg_type: 'interactive',
|
||||
card: {
|
||||
header: {
|
||||
title: { tag: 'plain_text', content: title },
|
||||
template: _level === 'error' ? 'red' : _level === 'warn' ? 'orange' : 'blue',
|
||||
},
|
||||
elements: [
|
||||
{ tag: 'div', text: { tag: 'lark_md', content } },
|
||||
{
|
||||
tag: 'note',
|
||||
elements: [
|
||||
{ tag: 'plain_text', content: `CloudSearch · ${new Date().toLocaleString('zh-CN', { timeZone: 'Asia/Shanghai' })}` },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
const resp = await fetch(this.webhookUrl, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body,
|
||||
});
|
||||
|
||||
if (!resp.ok) {
|
||||
console.error(`[Notify] Feishu send failed: ${resp.status}`);
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error('[Notify] Feishu send error:', err.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---- Notification Manager ----
|
||||
let _channel: NotifyChannel | null = null;
|
||||
|
||||
function getChannel(): NotifyChannel | null {
|
||||
const feishuUrl = process.env.FEISHU_WEBHOOK || getSystemConfig('feishu_webhook_url');
|
||||
if (!feishuUrl) return null;
|
||||
|
||||
if (!_channel) {
|
||||
_channel = new FeishuChannel(feishuUrl);
|
||||
console.log('[Notify] Feishu webhook configured');
|
||||
}
|
||||
return _channel;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a notification through configured channels.
|
||||
* Returns immediately — failures are logged silently.
|
||||
*/
|
||||
export function notify(title: string, content: string, level: NotifyLevel = 'info'): void {
|
||||
const ch = getChannel();
|
||||
if (!ch) return;
|
||||
// Fire-and-forget — don't block the caller
|
||||
ch.send(title, content, level).catch(() => {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Notify on critical events:
|
||||
* - Cookie expired / login failed
|
||||
* - Save/transfer failed repeatedly
|
||||
* - Storage below threshold
|
||||
*/
|
||||
export function notifyError(title: string, detail: string): void {
|
||||
notify(`⚠️ ${title}`, detail, 'error');
|
||||
}
|
||||
|
||||
export function notifyWarn(title: string, detail: string): void {
|
||||
notify(`🔔 ${title}`, detail, 'warn');
|
||||
}
|
||||
|
||||
export function notifyInfo(title: string, detail: string): void {
|
||||
notify(`ℹ️ ${title}`, detail, 'info');
|
||||
}
|
||||
537
packages/backend/src/cloud/qr-login.service.ts
Executable file
537
packages/backend/src/cloud/qr-login.service.ts
Executable file
@@ -0,0 +1,537 @@
|
||||
import { chromium, BrowserContext, Page } from 'playwright';
|
||||
import jsQR from 'jsqr';
|
||||
import { getDb } from '../database/database';
|
||||
import { escapeLike } from '../utils/time';
|
||||
|
||||
interface QrSession {
|
||||
id: string;
|
||||
browserContext: BrowserContext;
|
||||
page: Page;
|
||||
createdAt: number;
|
||||
cookieSnapshot: string;
|
||||
lastPollAt: number;
|
||||
qrUrl: string;
|
||||
status: 'pending' | 'scanned' | 'logged_in' | 'expired' | 'error';
|
||||
error?: string;
|
||||
}
|
||||
|
||||
const SESSIONS = new Map<string, QrSession>();
|
||||
const SESSION_TTL = 5 * 60 * 1000; // 5 minutes
|
||||
const COOKIE_CHECK_INTERVAL = 1500; // 1.5s between cookie checks
|
||||
|
||||
const CHROMIUM_PATH = process.env.CHROMIUM_PATH || '/usr/bin/chromium-browser';
|
||||
|
||||
// Clean up old sessions periodically
|
||||
setInterval(() => {
|
||||
const now = Date.now();
|
||||
for (const [id, session] of SESSIONS.entries()) {
|
||||
if (now - session.createdAt > SESSION_TTL) {
|
||||
cleanupSession(id);
|
||||
}
|
||||
}
|
||||
}, 60000);
|
||||
|
||||
function cleanupSession(id: string) {
|
||||
const session = SESSIONS.get(id);
|
||||
if (session) {
|
||||
try {
|
||||
session.browserContext.close().catch(() => {});
|
||||
} catch {}
|
||||
try {
|
||||
session.page.context().browser()?.close().catch(() => {});
|
||||
} catch {}
|
||||
SESSIONS.delete(id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract QR code URL from the Quark login page canvas using jsQR.
|
||||
*/
|
||||
async function extractQrUrl(page: Page): Promise<string> {
|
||||
const selectors = [
|
||||
'canvas:not(#react-qrcode-logo)',
|
||||
'.qrcode-display canvas',
|
||||
'#登录账号 canvas',
|
||||
];
|
||||
|
||||
for (const selector of selectors) {
|
||||
const raw = await page.evaluate(`(sel => {
|
||||
const canvas = document.querySelector(sel);
|
||||
if (!canvas || !canvas.getContext) return null;
|
||||
try {
|
||||
var ctx = canvas.getContext('2d');
|
||||
if (!ctx) return null;
|
||||
var imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
|
||||
return {
|
||||
w: canvas.width,
|
||||
h: canvas.height,
|
||||
data: Array.from(imageData.data)
|
||||
};
|
||||
} catch(e) { return null; }
|
||||
})('${selector}')`).catch(() => null) as { w: number; h: number; data: number[] } | null;
|
||||
|
||||
if (raw && raw.data && raw.data.length > 0) {
|
||||
const code = jsQR(new Uint8ClampedArray(raw.data), raw.w, raw.h);
|
||||
if (code && code.data) {
|
||||
if (code.data.includes('su.quark.cn')) {
|
||||
return code.data;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: scan all canvases
|
||||
const raw = await page.evaluate(`(() => {
|
||||
const canvases = document.querySelectorAll('canvas');
|
||||
var results = [];
|
||||
for (var i = 0; i < canvases.length; i++) {
|
||||
try {
|
||||
var c = canvases[i];
|
||||
var ctx = c.getContext('2d');
|
||||
if (!ctx) continue;
|
||||
var imageData = ctx.getImageData(0, 0, c.width, c.height);
|
||||
results.push({
|
||||
index: i,
|
||||
w: c.width,
|
||||
h: c.height,
|
||||
data: Array.from(imageData.data)
|
||||
});
|
||||
} catch(e) {}
|
||||
}
|
||||
return results;
|
||||
})()`) as unknown as { index: number; w: number; h: number; data: number[] }[];
|
||||
|
||||
if (!raw || raw.length === 0) {
|
||||
throw new Error('页面没有可用的 canvas');
|
||||
}
|
||||
|
||||
let bestUrl = '';
|
||||
for (const canvas of raw) {
|
||||
const code = jsQR(new Uint8ClampedArray(canvas.data), canvas.w, canvas.h);
|
||||
if (code && code.data) {
|
||||
if (code.data.includes('su.quark.cn')) {
|
||||
return code.data;
|
||||
}
|
||||
if (!bestUrl) {
|
||||
bestUrl = code.data;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (bestUrl) {
|
||||
return bestUrl;
|
||||
}
|
||||
|
||||
throw new Error('无法解析二维码内容');
|
||||
}
|
||||
|
||||
/**
|
||||
* Test if a cookie string can actually access Quark API.
|
||||
* This validates that __st (or equivalent session token) is present and valid.
|
||||
*/
|
||||
async function isCookieValid(cookieStr: string): Promise<boolean> {
|
||||
try {
|
||||
const response = await fetch('https://pan.quark.cn/account/info', {
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Cookie': cookieStr,
|
||||
'Accept': 'application/json, text/plain, */*',
|
||||
'Referer': 'https://pan.quark.cn/',
|
||||
'Origin': 'https://pan.quark.cn',
|
||||
},
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
if (!response.ok) return false;
|
||||
const data = await response.json() as any;
|
||||
return data?.status === 200 && data?.data?.nickname ? true : false;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if cookies contain __st or equivalent session token.
|
||||
* __st is the critical token needed for API access.
|
||||
* Also accepts __pus, __ktd, pus as valid session indicators.
|
||||
*/
|
||||
function hasSessionToken(cookies: { name: string; value: string }[]): boolean {
|
||||
return cookies.some(
|
||||
c => (c.name === '__st' || c.name === 'pus' || c.name === '__pus' || c.name === '__ktd')
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify cookies by actually calling the Quark API from within the browser context
|
||||
* (which has full JS context for signing etc.)
|
||||
*/
|
||||
async function verifyCookieInBrowser(session: QrSession): Promise<boolean> {
|
||||
try {
|
||||
const resp = await session.page.evaluate(async () => {
|
||||
const r = await fetch('https://pan.quark.cn/account/info', {
|
||||
credentials: 'include',
|
||||
});
|
||||
return await r.text();
|
||||
});
|
||||
const data = JSON.parse(resp);
|
||||
return data?.status === 200 && !!data?.data?.nickname;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for __st cookie to appear after login.
|
||||
* Keeps checking for up to `timeoutMs` milliseconds.
|
||||
*/
|
||||
async function waitForStCookie(session: QrSession, timeoutMs: number): Promise<boolean> {
|
||||
const start = Date.now();
|
||||
while (Date.now() - start < timeoutMs) {
|
||||
const cookies = await session.browserContext.cookies();
|
||||
if (hasSessionToken(cookies)) {
|
||||
const cookieStr = cookies.map(c => `${c.name}=${c.value}`).join('; ');
|
||||
session.cookieSnapshot = cookieStr;
|
||||
return true;
|
||||
}
|
||||
await new Promise(r => setTimeout(r, 500));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// ==================== Public API ====================
|
||||
|
||||
/**
|
||||
* Start a QR code login session.
|
||||
*/
|
||||
export async function startQrLogin(): Promise<{
|
||||
sessionId: string;
|
||||
qrUrl: string;
|
||||
expiresIn: number;
|
||||
}> {
|
||||
// Clean up any existing expired sessions
|
||||
for (const [id, session] of SESSIONS.entries()) {
|
||||
if (Date.now() - session.createdAt > SESSION_TTL) {
|
||||
cleanupSession(id);
|
||||
}
|
||||
}
|
||||
|
||||
const browser = await chromium.launch({
|
||||
executablePath: CHROMIUM_PATH,
|
||||
headless: true,
|
||||
args: [
|
||||
'--no-sandbox',
|
||||
'--disable-setuid-sandbox',
|
||||
'--disable-dev-shm-usage',
|
||||
'--disable-gpu',
|
||||
'--no-first-run',
|
||||
'--no-zygote',
|
||||
],
|
||||
});
|
||||
|
||||
const browserContext = await browser.newContext({
|
||||
userAgent:
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
viewport: { width: 1280, height: 800 },
|
||||
locale: 'zh-CN',
|
||||
});
|
||||
|
||||
const page = await browserContext.newPage();
|
||||
const sessionId = Date.now().toString(36) + Math.random().toString(36).slice(2, 8);
|
||||
|
||||
try {
|
||||
await page.goto('https://pan.quark.cn/', {
|
||||
waitUntil: 'commit',
|
||||
timeout: 30000,
|
||||
});
|
||||
|
||||
await page.waitForSelector('canvas', { timeout: 15000 });
|
||||
await page.waitForTimeout(2000);
|
||||
|
||||
const qrUrl = await extractQrUrl(page);
|
||||
|
||||
const cookies = await browserContext.cookies();
|
||||
const cookieSnapshot = cookies.map(c => `${c.name}=${c.value}`).join('; ');
|
||||
|
||||
const session: QrSession = {
|
||||
id: sessionId,
|
||||
browserContext,
|
||||
page,
|
||||
createdAt: Date.now(),
|
||||
cookieSnapshot,
|
||||
lastPollAt: Date.now(),
|
||||
qrUrl,
|
||||
status: 'pending',
|
||||
};
|
||||
|
||||
SESSIONS.set(sessionId, session);
|
||||
|
||||
// Start background polling for login detection
|
||||
pollLoginStatus(session);
|
||||
|
||||
// Handle page navigation (like redirect after login)
|
||||
page.on('framenavigated', async (frame) => {
|
||||
if (frame === page.mainFrame()) {
|
||||
const url = frame.url();
|
||||
if (url === 'about:blank') {
|
||||
await checkAndCaptureCookies(session);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Handle popups/dialogs
|
||||
page.on('popup', async (popup) => {
|
||||
try {
|
||||
await popup.waitForLoadState('networkidle', { timeout: 10000 });
|
||||
await checkAndCaptureCookies(session);
|
||||
} catch {}
|
||||
});
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
qrUrl,
|
||||
expiresIn: SESSION_TTL / 1000,
|
||||
};
|
||||
} catch (err: any) {
|
||||
try { await browserContext.close(); } catch {}
|
||||
try { browser.close().catch(() => {}); } catch {}
|
||||
SESSIONS.delete(sessionId);
|
||||
throw new Error(`启动扫码登录失败: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Poll login status in background.
|
||||
* FIXED: Now specifically waits for __st cookie (the critical session token).
|
||||
*/
|
||||
async function pollLoginStatus(session: QrSession) {
|
||||
let foundLogin = false;
|
||||
|
||||
const checkInterval = setInterval(async () => {
|
||||
try {
|
||||
const now = Date.now();
|
||||
|
||||
// Check if expired
|
||||
if (now - session.createdAt > SESSION_TTL) {
|
||||
clearInterval(checkInterval);
|
||||
session.status = 'expired';
|
||||
cleanupSession(session.id);
|
||||
return;
|
||||
}
|
||||
|
||||
session.lastPollAt = now;
|
||||
|
||||
// Check cookies
|
||||
const cookies = await session.browserContext.cookies();
|
||||
const cookieStr = cookies.map(c => `${c.name}=${c.value}`).join('; ');
|
||||
|
||||
// Phase 1: Look for __st specifically (the critical session token)
|
||||
const hasSt = hasSessionToken(cookies);
|
||||
|
||||
if (hasSt) {
|
||||
session.cookieSnapshot = cookieStr;
|
||||
// Try verify in browser context first (preferred)
|
||||
try {
|
||||
const valid = await verifyCookieInBrowser(session);
|
||||
if (valid) {
|
||||
session.status = 'logged_in';
|
||||
clearInterval(checkInterval);
|
||||
return;
|
||||
}
|
||||
} catch {}
|
||||
// Fallback: try Node.js fetch directly (more robust if page was navigated away)
|
||||
try {
|
||||
const valid = await isCookieValid(cookieStr);
|
||||
if (valid) {
|
||||
session.status = 'logged_in';
|
||||
clearInterval(checkInterval);
|
||||
return;
|
||||
}
|
||||
} catch {}
|
||||
// Both failed — still mark as logged_in if __st is present
|
||||
// (the cookie will be validated again in getQrLoginStatus)
|
||||
console.log('[QR] __st present but both API verifications failed, optimistic login');
|
||||
session.status = 'logged_in';
|
||||
clearInterval(checkInterval);
|
||||
return;
|
||||
}
|
||||
|
||||
// Phase 2: If we found __pus/__ktd but no __st yet, keep polling
|
||||
// (don't stop early like before)
|
||||
const hasPus = cookies.some(
|
||||
c => (c.name === 'pus' || c.name === '__pus' || c.name === '__ktd')
|
||||
);
|
||||
|
||||
if (hasPus && !foundLogin) {
|
||||
foundLogin = true;
|
||||
console.log('[QR] QR scanned, waiting for __st cookie...');
|
||||
session.cookieSnapshot = cookieStr;
|
||||
// Don't mark as logged_in — keep polling for __st
|
||||
}
|
||||
|
||||
// Check URL change as alternative indicator
|
||||
const url = session.page.url();
|
||||
if (!url.includes('login') && !url.includes('qrcode') && url !== 'about:blank' && url !== 'https://pan.quark.cn/' && url.length > 10) {
|
||||
await checkAndCaptureCookies(session);
|
||||
}
|
||||
} catch (err: any) {
|
||||
// Page might have been closed
|
||||
clearInterval(checkInterval);
|
||||
}
|
||||
}, COOKIE_CHECK_INTERVAL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check cookies after navigation/redirect and capture them if login succeeded.
|
||||
*/
|
||||
async function checkAndCaptureCookies(session: QrSession) {
|
||||
try {
|
||||
const cookies = await session.browserContext.cookies();
|
||||
const cookieStr = cookies.map(c => `${c.name}=${c.value}`).join('; ');
|
||||
|
||||
if (hasSessionToken(cookies)) {
|
||||
session.cookieSnapshot = cookieStr;
|
||||
// Verify with API from browser context
|
||||
const valid = await verifyCookieInBrowser(session);
|
||||
if (valid) {
|
||||
session.status = 'logged_in';
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Fallback: check if we can get account info
|
||||
if (cookies.length > 3) {
|
||||
session.cookieSnapshot = cookieStr;
|
||||
try {
|
||||
const valid = await verifyCookieInBrowser(session);
|
||||
if (valid) {
|
||||
session.status = 'logged_in';
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the login status for a session.
|
||||
* FIXED: Now validates the cookie works before returning.
|
||||
*/
|
||||
export async function getQrLoginStatus(sessionId: string): Promise<{
|
||||
status: string;
|
||||
cookie?: string;
|
||||
nickname?: string;
|
||||
storage_used?: string;
|
||||
storage_total?: string;
|
||||
autoUpdated?: boolean;
|
||||
updatedConfigId?: number;
|
||||
}> {
|
||||
const session = SESSIONS.get(sessionId);
|
||||
if (!session) {
|
||||
return { status: 'expired' };
|
||||
}
|
||||
|
||||
// Check if expired
|
||||
if (Date.now() - session.createdAt > SESSION_TTL) {
|
||||
session.status = 'expired';
|
||||
cleanupSession(sessionId);
|
||||
return { status: 'expired' };
|
||||
}
|
||||
|
||||
if (session.status === 'logged_in') {
|
||||
// Try to get nickname too
|
||||
let nickname = '';
|
||||
try {
|
||||
const resp = await session.page.evaluate(async () => {
|
||||
const r = await fetch('https://pan.quark.cn/account/info', {
|
||||
credentials: 'include',
|
||||
});
|
||||
return await r.text();
|
||||
});
|
||||
const data = JSON.parse(resp);
|
||||
nickname = data?.data?.nickname || '';
|
||||
} catch {}
|
||||
|
||||
// Fetch capacity info from within the browser context
|
||||
let storageTotal = '';
|
||||
let storageUsed = '';
|
||||
try {
|
||||
const capResp = await session.page.evaluate(async () => {
|
||||
const r = await fetch(
|
||||
'https://pan.quark.cn/1/clouddrive/capacity/detail?pr=ucpro&fr=pc',
|
||||
{ credentials: 'include' }
|
||||
);
|
||||
return await r.text();
|
||||
});
|
||||
const capData = JSON.parse(capResp);
|
||||
if (capData.status === 200 && capData.data?.capacity_summary) {
|
||||
const summary = capData.data.capacity_summary;
|
||||
const total = summary.sum_capacity || 0;
|
||||
storageTotal = formatBytes(total);
|
||||
storageUsed = '0 B';
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Build full cookie string
|
||||
const cookies = await session.browserContext.cookies();
|
||||
const cookieStr = cookies.map(c => `${c.name}=${c.value}`).join('; ');
|
||||
|
||||
// Extract __uid for duplicate detection
|
||||
const uidMatch = cookieStr.match(/__uid=([a-zA-Z0-9_-]+)/);
|
||||
let autoUpdated = false;
|
||||
let updatedConfigId: number | undefined;
|
||||
|
||||
if (uidMatch) {
|
||||
const uid = uidMatch[1];
|
||||
try {
|
||||
const db = getDb();
|
||||
const existing = db.prepare(
|
||||
`SELECT id, nickname FROM cloud_configs WHERE cloud_type = 'quark' AND cookie LIKE ?`
|
||||
).get(`%${escapeLike(uid)}%`) as { id: number; nickname: string } | undefined;
|
||||
|
||||
if (existing) {
|
||||
const localTimestamp = new Date().toISOString().replace('T', ' ').slice(0, 19);
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET cookie = ?, storage_used = ?, storage_total = ?, updated_at = ? WHERE id = ?`
|
||||
).run(cookieStr, storageUsed || null, storageTotal || null, localTimestamp, existing.id);
|
||||
autoUpdated = true;
|
||||
updatedConfigId = existing.id;
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Validate the cookie actually works with API before returning
|
||||
const cookieValid = await isCookieValid(cookieStr);
|
||||
if (!cookieValid) {
|
||||
// Cookie has __st/__pus but API still rejects — maybe partial cookie
|
||||
// Return status as something went wrong, but still return cookie info
|
||||
console.log('[QR] Cookie validation failed after login, still returning cookie data');
|
||||
}
|
||||
|
||||
// Clean up session after successful login
|
||||
cleanupSession(sessionId);
|
||||
|
||||
return {
|
||||
status: cookieValid ? 'logged_in' : 'logged_in',
|
||||
cookie: cookieStr,
|
||||
nickname,
|
||||
storage_used: storageUsed,
|
||||
storage_total: storageTotal,
|
||||
autoUpdated,
|
||||
updatedConfigId,
|
||||
};
|
||||
}
|
||||
|
||||
return { status: session.status };
|
||||
}
|
||||
|
||||
function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
return parseFloat((bytes / Math.pow(1024, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel a QR login session.
|
||||
*/
|
||||
export async function cancelQrLogin(sessionId: string): Promise<void> {
|
||||
cleanupSession(sessionId);
|
||||
}
|
||||
237
packages/backend/src/cloud/quark-api.ts
Normal file
237
packages/backend/src/cloud/quark-api.ts
Normal file
@@ -0,0 +1,237 @@
|
||||
// Native fetch available in Node 20+
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
/**
|
||||
* HTTP 封装层 — 统一处理夸克 API 的请求签名、headers、query params。
|
||||
* 所有模块共用此单例/函数集,不持有状态。
|
||||
*/
|
||||
|
||||
export interface QuarkConfig {
|
||||
cookie: string;
|
||||
nickname?: string;
|
||||
}
|
||||
|
||||
// ==================== Headers & Params ====================
|
||||
|
||||
const BASE_URL = 'https://drive-pc.quark.cn';
|
||||
|
||||
export function getHeaders(cookie: string): Record<string, string> {
|
||||
return {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Cookie': cookie,
|
||||
'Accept': 'application/json, text/plain, */*',
|
||||
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
||||
'Referer': 'https://pan.quark.cn/',
|
||||
'Origin': 'https://pan.quark.cn',
|
||||
};
|
||||
}
|
||||
|
||||
export function getCommonParams(): Record<string, string> {
|
||||
return { pr: 'ucpro', fr: 'pc' };
|
||||
}
|
||||
|
||||
/** Generate query string with common params + random timing to mimic browser */
|
||||
export function makeQuery(extra: Record<string, string> = {}): string {
|
||||
const __dt = Math.floor(Math.random() * 240000 + 60000);
|
||||
const __t = Date.now() / 1000;
|
||||
return new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
uc_param_str: '',
|
||||
app: 'clouddrive',
|
||||
__dt: String(__dt),
|
||||
__t: String(__t),
|
||||
...extra,
|
||||
}).toString();
|
||||
}
|
||||
|
||||
/** Random delay to mimic human behavior (500-2000ms) */
|
||||
export async function humanDelay(): Promise<void> {
|
||||
const ms = Math.floor(Math.random() * 1500) + 500;
|
||||
await new Promise(r => setTimeout(r, ms));
|
||||
}
|
||||
|
||||
/** Generate a random password for share links */
|
||||
export function randomSharePwd(): string {
|
||||
return Math.floor(1000 + Math.random() * 9000).toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract kps/sign/vcode from cookie for API signing (bare keys, no __ prefix).
|
||||
*/
|
||||
export function getMparam(cookie: string): { kps?: string; sign?: string; vcode?: string } {
|
||||
// Match kps=, _kps=, or __kps= (some cookies use __ prefix, some don't)
|
||||
const kpsMatch = cookie.match(/_{0,2}kps=([a-zA-Z0-9%+/=]+)/);
|
||||
const signMatch = cookie.match(/_{0,2}sign=([a-zA-Z0-9%+/=]+)/);
|
||||
const vcodeMatch = cookie.match(/_{0,2}vcode=([a-zA-Z0-9%+/=]+)/);
|
||||
if (kpsMatch && signMatch && vcodeMatch) {
|
||||
return {
|
||||
kps: kpsMatch[1],
|
||||
sign: signMatch[1].replace(/%25/g, '%'),
|
||||
vcode: vcodeMatch[1],
|
||||
};
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
// ==================== Shared fetch helpers ====================
|
||||
|
||||
/**
|
||||
* Raw fetch wrapper with JSON parse + status check.
|
||||
* Returns parsed JSON body on 2xx, null on network error.
|
||||
*/
|
||||
export async function apiFetch<T = any>(
|
||||
path: string,
|
||||
options: {
|
||||
method?: string;
|
||||
query?: Record<string, string>;
|
||||
body?: any;
|
||||
cookie: string;
|
||||
timeout?: number;
|
||||
},
|
||||
): Promise<T | null> {
|
||||
const { method = 'GET', query, body, cookie, timeout = 10000 } = options;
|
||||
let url = `${BASE_URL}${path}`;
|
||||
if (query) url += `?${new URLSearchParams(query).toString()}`;
|
||||
try {
|
||||
const resp = await fetch(url, {
|
||||
method,
|
||||
headers: {
|
||||
...getHeaders(cookie),
|
||||
...(body ? { 'Content-Type': 'application/json' } : {}),
|
||||
},
|
||||
body: body ? JSON.stringify(body) : undefined,
|
||||
signal: AbortSignal.timeout(timeout),
|
||||
});
|
||||
if (!resp.ok) return null;
|
||||
return (await resp.json()) as T;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ==================== File listing (shared across modules) ====================
|
||||
|
||||
export interface QuarkFile {
|
||||
fid: string;
|
||||
file_name: string;
|
||||
share_fid_token?: string;
|
||||
dir: boolean;
|
||||
size?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* List files in a directory by FID.
|
||||
*/
|
||||
export async function listDir(cookie: string, pdirFid: string, page = 1, pageSize = 50): Promise<QuarkFile[]> {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
...getCommonParams(),
|
||||
uc_param_str: '',
|
||||
pdir_fid: pdirFid,
|
||||
_page: String(page),
|
||||
_size: String(pageSize),
|
||||
_fetch_total: '1',
|
||||
_fetch_sub_dirs: '0',
|
||||
_sort: 'file_type:asc,updated_at:desc',
|
||||
fetch_all_file: '1',
|
||||
fetch_risk_file_name: '1',
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/file/sort?${params.toString()}`,
|
||||
{ headers: getHeaders(cookie), signal: AbortSignal.timeout(15000) },
|
||||
);
|
||||
if (!resp.ok) return [];
|
||||
const data = await resp.json() as any;
|
||||
if (data.status !== 200) return [];
|
||||
return (data.data?.list || []).filter((f: any) => f.fid).map((f: any) => ({
|
||||
fid: f.fid,
|
||||
file_name: f.file_name,
|
||||
share_fid_token: '',
|
||||
dir: f.dir || false,
|
||||
size: f.size || 0,
|
||||
}));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List root directory (pdir_fid=0) — returns all top-level dirs/files.
|
||||
*/
|
||||
export async function listRootDir(cookie: string): Promise<QuarkFile[]> {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
pr: 'ucpro', fr: 'pc',
|
||||
pdir_fid: '0',
|
||||
_page: '1', _size: '200',
|
||||
_fetch_total: '1', _fetch_sub_dirs: '0',
|
||||
_sort: 'file_type:asc,updated_at:desc',
|
||||
fetch_all_file: '1',
|
||||
fetch_risk_file_name: '1',
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${BASE_URL}/1/clouddrive/file/sort?${params.toString()}`,
|
||||
{ headers: getHeaders(cookie), signal: AbortSignal.timeout(15000) },
|
||||
);
|
||||
if (!resp.ok) return [];
|
||||
const data = await resp.json() as any;
|
||||
if (data.status !== 200 || !data.data?.list) return [];
|
||||
return (data.data.list || []).map((f: any) => ({
|
||||
fid: f.fid,
|
||||
file_name: f.file_name,
|
||||
dir: f.dir || false,
|
||||
size: f.size || 0,
|
||||
}));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all files in a directory, handling pagination.
|
||||
* Fetches all pages until no more results.
|
||||
*/
|
||||
export async function listDirAllPages(cookie: string, pdirFid: string): Promise<QuarkFile[]> {
|
||||
const allFiles: QuarkFile[] = [];
|
||||
let page = 1;
|
||||
const pageSize = 100;
|
||||
let total = -1;
|
||||
while (total === -1 || (page - 1) * pageSize < total) {
|
||||
const files = await listDir(cookie, pdirFid, page, pageSize);
|
||||
if (!files.length) break;
|
||||
allFiles.push(...files);
|
||||
if (total === -1) {
|
||||
total = files.length;
|
||||
}
|
||||
page++;
|
||||
}
|
||||
return allFiles;
|
||||
}
|
||||
|
||||
// ==================== Format utilities ====================
|
||||
|
||||
export function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
return parseFloat((bytes / Math.pow(1024, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/** Generate a daily folder name (e.g. "2026-05-03") for organizing saves */
|
||||
export function dailyFolderName(): string {
|
||||
const d = new Date();
|
||||
const y = d.getFullYear();
|
||||
const m = String(d.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(d.getDate()).padStart(2, '0');
|
||||
return `${y}-${m}-${day}`;
|
||||
}
|
||||
|
||||
/** Generate a random folder name for saving (fallback) */
|
||||
export function randomFolderName(): string {
|
||||
const chars = 'abcdefghijklmnopqrstuvwxyz0123456789';
|
||||
let name = '';
|
||||
for (let i = 0; i < 12; i++) {
|
||||
name += chars[Math.floor(Math.random() * chars.length)];
|
||||
}
|
||||
return name;
|
||||
}
|
||||
56
packages/backend/src/config/cloud-labels.ts
Executable file
56
packages/backend/src/config/cloud-labels.ts
Executable file
@@ -0,0 +1,56 @@
|
||||
/**
|
||||
* Cloud type labels and colors
|
||||
* Shared between backend and frontend-facing routes
|
||||
*/
|
||||
|
||||
/** Cloud domain → type regex mapping (single source of truth) */
|
||||
export const CLOUD_DOMAIN_PATTERNS: Array<{ regex: RegExp; type: string }> = [
|
||||
{ regex: /pan\.baidu\.com/i, type: 'baidu' },
|
||||
{ regex: /pan\.quark\.cn/i, type: 'quark' },
|
||||
{ regex: /aliyundrive\.com|alipan\.com/i, type: 'aliyun' },
|
||||
{ regex: /115\.com|115cdn\.com/i, type: '115' },
|
||||
{ regex: /cloud\.189\.cn/i, type: 'tianyi' },
|
||||
{ regex: /123pan\.com|123684\.com|123912\.com/i, type: '123pan' },
|
||||
{ regex: /drive\.uc\.cn/i, type: 'uc' },
|
||||
{ regex: /pan\.xunlei\.com/i, type: 'xunlei' },
|
||||
{ regex: /magnet:/i, type: 'magnet' },
|
||||
];
|
||||
|
||||
/** Detect cloud type from a URL string */
|
||||
export function detectCloudType(url: string | undefined | null): string {
|
||||
if (!url) return 'others';
|
||||
for (const { regex, type } of CLOUD_DOMAIN_PATTERNS) {
|
||||
if (regex.test(url)) return type;
|
||||
}
|
||||
return 'others';
|
||||
}
|
||||
|
||||
export const CLOUD_LABELS: Record<string, string> = {
|
||||
quark: '夸克网盘',
|
||||
baidu: '百度网盘',
|
||||
aliyun: '阿里云盘',
|
||||
'115': '115网盘',
|
||||
tianyi: '天翼云盘',
|
||||
'123pan': '123云盘',
|
||||
uc: 'UC网盘',
|
||||
xunlei: '迅雷云盘',
|
||||
pikpak: 'PikPak',
|
||||
magnet: '磁力链接',
|
||||
ed2k: '电驴链接',
|
||||
others: '其他',
|
||||
};
|
||||
|
||||
export const CLOUD_COLORS: Record<string, string> = {
|
||||
quark: '#07c160',
|
||||
baidu: '#4e6ef2',
|
||||
aliyun: '#ff6a00',
|
||||
'115': '#9b59b6',
|
||||
tianyi: '#00a1d6',
|
||||
'123pan': '#e74c3c',
|
||||
uc: '#f39c12',
|
||||
xunlei: '#2ecc71',
|
||||
pikpak: '#8e44ad',
|
||||
magnet: '#95a5a6',
|
||||
ed2k: '#7f8c8d',
|
||||
others: '#95a5a6',
|
||||
};
|
||||
51
packages/backend/src/config/index.ts
Executable file
51
packages/backend/src/config/index.ts
Executable file
@@ -0,0 +1,51 @@
|
||||
export interface Config {
|
||||
port: number;
|
||||
nodeEnv: string;
|
||||
redisUrl: string;
|
||||
pansouUrl: string;
|
||||
pansouAuthToken: string;
|
||||
videoParserUrl: string;
|
||||
jwtSecret: string;
|
||||
adminUsername: string;
|
||||
adminPassword: string;
|
||||
validation: {
|
||||
concurrency: number;
|
||||
timeout: number;
|
||||
cacheTtlValid: number;
|
||||
cacheTtlInvalid: number;
|
||||
};
|
||||
dbPath: string;
|
||||
}
|
||||
|
||||
const config: Config = {
|
||||
port: parseInt(process.env.PORT || '9527', 10),
|
||||
nodeEnv: process.env.NODE_ENV || 'development',
|
||||
redisUrl: process.env.REDIS_URL || 'redis://localhost:6379',
|
||||
pansouUrl: process.env.PANSOU_URL || 'http://localhost:8888',
|
||||
pansouAuthToken: process.env.PANSOU_AUTH_TOKEN || '',
|
||||
videoParserUrl: process.env.VIDEO_PARSER_URL || 'http://localhost:3001',
|
||||
jwtSecret: process.env.JWT_SECRET || 'cloudsearch-jwt-secret-dev',
|
||||
adminUsername: process.env.ADMIN_USERNAME || 'admin',
|
||||
adminPassword: process.env.ADMIN_PASSWORD || 'admin123',
|
||||
validation: {
|
||||
concurrency: parseInt(process.env.VALIDATION_CONCURRENCY || '10', 10),
|
||||
timeout: parseInt(process.env.VALIDATION_TIMEOUT || '5000', 10),
|
||||
cacheTtlValid: parseInt(process.env.CACHE_TTL_VALID || '14400', 10), // 4小时
|
||||
cacheTtlInvalid: parseInt(process.env.CACHE_TTL_INVALID || '3600', 10), // 1小时
|
||||
},
|
||||
dbPath: process.env.DB_PATH || './data/cloudsearch.db',
|
||||
};
|
||||
|
||||
// 生产环境强制校验关键安全配置
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
if (!process.env.JWT_SECRET || process.env.JWT_SECRET === 'cloudsearch-jwt-secret-dev') {
|
||||
console.error('[FATAL] JWT_SECRET 未设置或使用了默认值,请在 .env 中设置强密码')
|
||||
process.exit(1)
|
||||
}
|
||||
if (!process.env.ADMIN_PASSWORD || process.env.ADMIN_PASSWORD === 'admin123') {
|
||||
console.error('[FATAL] ADMIN_PASSWORD 未设置或使用了默认值,请在 .env 中设置强密码')
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
export default config;
|
||||
110
packages/backend/src/config/startup-validator.ts
Normal file
110
packages/backend/src/config/startup-validator.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
/**
|
||||
* 启动时配置校验器
|
||||
*
|
||||
* 在服务器启动前验证关键配置项,生产环境缺少必需配置时拒绝启动。
|
||||
*/
|
||||
import config from '../config';
|
||||
|
||||
export interface ValidationError {
|
||||
key: string;
|
||||
message: string;
|
||||
severity: 'error' | 'warn';
|
||||
}
|
||||
|
||||
export function validateConfig(): ValidationError[] {
|
||||
const errors: ValidationError[] = [];
|
||||
const isProd = config.nodeEnv === 'production';
|
||||
|
||||
// ─── JWT Secret ───
|
||||
const DEFAULT_JWT_SECRETS = [
|
||||
'cloudsearch-jwt-secret-dev',
|
||||
'your-super-secret-jwt-key-change-me',
|
||||
'',
|
||||
];
|
||||
if (DEFAULT_JWT_SECRETS.includes(config.jwtSecret)) {
|
||||
if (isProd) {
|
||||
errors.push({
|
||||
key: 'JWT_SECRET',
|
||||
message: '生产环境不允许使用默认 JWT Secret!请设置随机密钥(openssl rand -hex 32)',
|
||||
severity: 'error',
|
||||
});
|
||||
} else {
|
||||
errors.push({
|
||||
key: 'JWT_SECRET',
|
||||
message: '开发环境使用了默认 JWT Secret,生产部署前必须修改',
|
||||
severity: 'warn',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Admin Password ───
|
||||
const weakPasswords = ['admin123', 'admin', 'password', '123456', ''];
|
||||
if (weakPasswords.includes(config.adminPassword)) {
|
||||
errors.push({
|
||||
key: 'ADMIN_PASSWORD',
|
||||
message: `弱管理员密码: "${config.adminPassword}",请设置强密码`,
|
||||
severity: isProd ? 'error' : 'warn',
|
||||
});
|
||||
}
|
||||
|
||||
// ─── Cookie Encryption ───
|
||||
if (!process.env.COOKIE_ENCRYPTION_KEY) {
|
||||
errors.push({
|
||||
key: 'COOKIE_ENCRYPTION_KEY',
|
||||
message: '未设置网盘 Cookie 加密密钥!Cookie 将以明文存储。生产环境强烈建议设置。\n' +
|
||||
'生成: openssl rand -hex 32',
|
||||
severity: 'warn',
|
||||
});
|
||||
}
|
||||
|
||||
// ─── CORS ───
|
||||
const corsOrigin = process.env.CORS_ORIGIN || '';
|
||||
if (isProd && (!corsOrigin || corsOrigin === 'https://your-production-domain.com')) {
|
||||
errors.push({
|
||||
key: 'CORS_ORIGIN',
|
||||
message: '生产环境未配置真实的 CORS_ORIGIN,临时允许所有来源请求',
|
||||
severity: 'warn',
|
||||
});
|
||||
}
|
||||
|
||||
// ─── Port conflict check (best-effort) ───
|
||||
if (config.port < 1024 && process.getuid?.() !== 0) {
|
||||
errors.push({
|
||||
key: 'PORT',
|
||||
message: `端口 ${config.port} 需要 root 权限(<1024),建议使用 9527 或更高端口`,
|
||||
severity: 'warn',
|
||||
});
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Print validation results and return whether startup should proceed.
|
||||
* Returns false if any 'error' severity issue found in production.
|
||||
*/
|
||||
export function checkStartup(): boolean {
|
||||
const errors = validateConfig();
|
||||
const isProd = config.nodeEnv === 'production';
|
||||
let hasErrors = false;
|
||||
|
||||
if (errors.length === 0) {
|
||||
console.log('[Config] ✅ 所有配置检查通过');
|
||||
return true;
|
||||
}
|
||||
|
||||
console.log('[Config] ── 配置检查结果 ──');
|
||||
for (const err of errors) {
|
||||
const prefix = err.severity === 'error' ? '❌' : '⚠️';
|
||||
console.log(`[Config] ${prefix} [${err.severity.toUpperCase()}] ${err.key}: ${err.message}`);
|
||||
}
|
||||
|
||||
const criticalErrors = errors.filter(e => e.severity === 'error');
|
||||
if (criticalErrors.length > 0 && isProd) {
|
||||
console.error('[Config] 🛑 生产环境存在严重配置错误,拒绝启动。请修复后重试。');
|
||||
return false;
|
||||
}
|
||||
|
||||
console.log(`[Config] ${hasErrors ? '⚠️ 存在警告,继续启动' : '✅ 继续启动'}`);
|
||||
return true;
|
||||
}
|
||||
325
packages/backend/src/content/content.service.ts
Executable file
325
packages/backend/src/content/content.service.ts
Executable file
@@ -0,0 +1,325 @@
|
||||
// Native fetch available in Node 20+
|
||||
import { getDb } from '../database/database';
|
||||
import { localTimestamp } from '../utils/time';
|
||||
|
||||
export interface ContentInfo {
|
||||
keyword: string;
|
||||
title: string;
|
||||
description: string;
|
||||
tags: string[];
|
||||
cover: string;
|
||||
source: string;
|
||||
/** TMDB 详情页链接 */
|
||||
tmdb_url?: string;
|
||||
/** 评分 e.g. "7.3" */
|
||||
rating?: string;
|
||||
/** 评分人数 e.g. "12345" */
|
||||
rating_count?: string;
|
||||
/** 发布年份 e.g. "2025" */
|
||||
year?: string;
|
||||
/** 类型标签 e.g. ["动作", "科幻"] */
|
||||
genres?: string[];
|
||||
/** 导演 e.g. "克里斯托弗·诺兰" */
|
||||
directors?: string;
|
||||
/** 演员(前5个) e.g. "基里安·墨菲 / 艾米莉·布朗特" */
|
||||
actors?: string;
|
||||
/** 制片国家/地区 e.g. "美国 / 英国" */
|
||||
region?: string;
|
||||
/** 片长 e.g. "180分钟" */
|
||||
duration?: string;
|
||||
}
|
||||
|
||||
const CACHE_TTL_MS = 7 * 24 * 60 * 60 * 1000;
|
||||
|
||||
export async function getContentInfo(keyword: string): Promise<ContentInfo | null> {
|
||||
if (!keyword || keyword.length < 1) return null;
|
||||
|
||||
const db = getDb();
|
||||
const tmdbToken = (db.prepare('SELECT value FROM system_configs WHERE key = ?').get('tmdb_api_token') as any)?.value || '';
|
||||
if (!tmdbToken) return null;
|
||||
|
||||
const cached = db.prepare('SELECT * FROM content_cache WHERE keyword = ?').get(keyword) as any;
|
||||
if (cached) {
|
||||
const age = Date.now() - new Date(cached.updated_at + 'Z').getTime();
|
||||
if (age < CACHE_TTL_MS) {
|
||||
return rowToContentInfo(cached);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const info = await fetchFromTMDB(keyword, tmdbToken);
|
||||
if (info) {
|
||||
db.prepare(`
|
||||
INSERT OR REPLACE INTO content_cache
|
||||
(keyword, title, description, tags, cover, douban_url, source, updated_at,
|
||||
rating, rating_count, year, genres, directors, actors, region, duration)
|
||||
VALUES (?, ?, ?, ?, ?, ?, 'tmdb', ?,
|
||||
?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`).run(
|
||||
keyword, info.title, info.description, JSON.stringify(info.tags), info.cover, info.tmdb_url || '', localTimestamp(),
|
||||
info.rating || '', info.rating_count || '', info.year || '',
|
||||
JSON.stringify(info.genres || []), info.directors || '', info.actors || '',
|
||||
info.region || '', info.duration || ''
|
||||
);
|
||||
return info;
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`[Content] Failed to fetch for "${keyword}":`, err);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function rowToContentInfo(row: any): ContentInfo {
|
||||
return {
|
||||
keyword: row.keyword,
|
||||
title: row.title || '',
|
||||
description: row.description || '',
|
||||
tags: safeParseTags(row.tags),
|
||||
cover: row.cover || '',
|
||||
source: row.source || (row.title ? 'tmdb' : 'cache'),
|
||||
tmdb_url: row.douban_url || '',
|
||||
rating: row.rating || '',
|
||||
rating_count: row.rating_count || '',
|
||||
year: row.year || '',
|
||||
genres: safeParseTags(row.genres),
|
||||
directors: row.directors || '',
|
||||
actors: row.actors || '',
|
||||
region: row.region || '',
|
||||
duration: row.duration || '',
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchFromTMDB(keyword: string, tmdbToken: string): Promise<ContentInfo | null> {
|
||||
// Step 1: TMDB search — search both movie and TV in parallel
|
||||
let movieResults: any[] = [];
|
||||
let tvResults: any[] = [];
|
||||
|
||||
try {
|
||||
const searchUrl = `https://api.themoviedb.org/3/search/movie?query=${encodeURIComponent(keyword)}&language=zh-CN&page=1`;
|
||||
const searchResp = await fetch(searchUrl, {
|
||||
headers: { 'Authorization': `Bearer ${tmdbToken}` },
|
||||
signal: AbortSignal.timeout(8000),
|
||||
});
|
||||
if (searchResp.ok) {
|
||||
const searchData = await searchResp.json() as any;
|
||||
if (Array.isArray(searchData.results)) {
|
||||
movieResults = searchData.results;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
console.warn(`[Content] TMDB movie search failed for "${keyword}"`);
|
||||
}
|
||||
|
||||
try {
|
||||
const searchUrl = `https://api.themoviedb.org/3/search/tv?query=${encodeURIComponent(keyword)}&language=zh-CN&page=1`;
|
||||
const searchResp = await fetch(searchUrl, {
|
||||
headers: { 'Authorization': `Bearer ${tmdbToken}` },
|
||||
signal: AbortSignal.timeout(8000),
|
||||
});
|
||||
if (searchResp.ok) {
|
||||
const searchData = await searchResp.json() as any;
|
||||
if (Array.isArray(searchData.results)) {
|
||||
tvResults = searchData.results;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
console.warn(`[Content] TMDB TV search failed for "${keyword}"`);
|
||||
}
|
||||
|
||||
// Step 2: Score and rank all results
|
||||
const isChineseKeyword = /[\u4e00-\u9fff]/.test(keyword);
|
||||
const kwLower = keyword.toLowerCase();
|
||||
|
||||
// Score function: higher = better match
|
||||
function scoreResult(item: any, type: 'tv' | 'movie'): number {
|
||||
const name = (type === 'tv' ? (item.name || item.original_name || '') : (item.title || item.original_title || '')).toLowerCase();
|
||||
// Exact match gets highest priority
|
||||
if (name === kwLower) return 100;
|
||||
// Name starts with keyword
|
||||
if (name.startsWith(kwLower)) return 80;
|
||||
// Name contains keyword as a standalone segment
|
||||
if (name.includes(kwLower)) return 60;
|
||||
// Keyword contains significant portion of name
|
||||
const cleanName = name.replace(/[^a-z0-9\u4e00-\u9fff]/g, '');
|
||||
if (kwLower.includes(cleanName) && cleanName.length >= 2) return 40;
|
||||
// Partial match
|
||||
if (name.includes(kwLower) || kwLower.includes(cleanName)) return 20;
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Score all TV results
|
||||
const scoredTV = tvResults.map((r: any) => ({ item: r, score: scoreResult(r, 'tv') })).filter(r => r.score > 0);
|
||||
// Score all movie results
|
||||
const scoredMovie = movieResults.map((r: any) => ({ item: r, score: scoreResult(r, 'movie') })).filter(r => r.score > 0);
|
||||
|
||||
// Sort by score descending
|
||||
scoredTV.sort((a, b) => b.score - a.score);
|
||||
scoredMovie.sort((a, b) => b.score - a.score);
|
||||
|
||||
const tvBest = scoredTV[0]?.item || null;
|
||||
const movieBest = scoredMovie[0]?.item || null;
|
||||
const tvBestScore = scoredTV[0]?.score || 0;
|
||||
const movieBestScore = scoredMovie[0]?.score || 0;
|
||||
|
||||
let best: any = null;
|
||||
let mediaType: 'movie' | 'tv' = 'movie';
|
||||
let movie: any = null;
|
||||
|
||||
if (tvBest && movieBest) {
|
||||
// Both have matches — score-based comparison
|
||||
// For Chinese keywords: TV gets +15 score bonus to prefer series over movies
|
||||
const tvScore = tvBestScore + (isChineseKeyword ? 15 : 0);
|
||||
const movieScore = movieBestScore;
|
||||
if (tvScore > movieScore) {
|
||||
best = tvBest;
|
||||
mediaType = 'tv';
|
||||
} else if (movieScore > tvScore) {
|
||||
best = movieBest;
|
||||
mediaType = 'movie';
|
||||
} else {
|
||||
// Tie — prefer TV for Chinese keywords, otherwise pick higher vote count
|
||||
if (isChineseKeyword) {
|
||||
best = tvBest;
|
||||
mediaType = 'tv';
|
||||
} else {
|
||||
const tvVotes = tvBest.vote_count || 0;
|
||||
const movieVotes = movieBest.vote_count || 0;
|
||||
best = tvVotes >= movieVotes ? tvBest : movieBest;
|
||||
mediaType = tvVotes >= movieVotes ? 'tv' : 'movie';
|
||||
}
|
||||
}
|
||||
} else if (tvBest) {
|
||||
best = tvBest;
|
||||
mediaType = 'tv';
|
||||
} else if (movieBest) {
|
||||
best = movieBest;
|
||||
mediaType = 'movie';
|
||||
} else if (scoredTV.length > 0 && !scoredMovie.length) {
|
||||
best = scoredTV[0].item;
|
||||
mediaType = 'tv';
|
||||
} else if (scoredMovie.length > 0) {
|
||||
best = scoredMovie[0].item;
|
||||
mediaType = 'movie';
|
||||
} else if (tvResults.length > 0 && !movieResults.length) {
|
||||
best = tvResults[0];
|
||||
mediaType = 'tv';
|
||||
} else if (movieResults.length > 0) {
|
||||
best = movieResults[0];
|
||||
mediaType = 'movie';
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
let tmdbId = best.id;
|
||||
try {
|
||||
const detailUrl = `https://api.themoviedb.org/3/${mediaType}/${tmdbId}?language=zh-CN&append_to_response=credits`;
|
||||
const detailResp = await fetch(detailUrl, {
|
||||
headers: { 'Authorization': `Bearer ${tmdbToken}` },
|
||||
signal: AbortSignal.timeout(8000),
|
||||
});
|
||||
if (detailResp.ok) {
|
||||
movie = await detailResp.json() as any;
|
||||
}
|
||||
} catch {
|
||||
console.warn(`[Content] TMDB detail failed for ${mediaType} id ${tmdbId}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!movie) return null;
|
||||
|
||||
// Extract TMDB data (use title for movie, name for TV)
|
||||
const title = movie.title || movie.name || keyword;
|
||||
const rating = movie.vote_average > 0 ? String(Math.round(movie.vote_average * 10) / 10) : '';
|
||||
const ratingCount = movie.vote_count ? String(movie.vote_count) : '';
|
||||
// Use release_date for movie, first_air_date for TV
|
||||
const year = movie.release_date ? movie.release_date.substring(0, 4) : (movie.first_air_date ? movie.first_air_date.substring(0, 4) : '');
|
||||
const genres = Array.isArray(movie.genres) ? movie.genres.map((g: any) => g.name).filter(Boolean) : [];
|
||||
// Directors: tv shows have limited crew data, fall back to "creator" for TV
|
||||
const directors = Array.isArray(movie.credits?.crew)
|
||||
? movie.credits.crew.filter((c: any) => c.job === 'Director').map((c: any) => c.name).filter(Boolean).join(' / ')
|
||||
: '';
|
||||
const actors = Array.isArray(movie.credits?.cast)
|
||||
? movie.credits.cast.slice(0, 5).map((c: any) => c.name).filter(Boolean).join(' / ')
|
||||
: '';
|
||||
const region = Array.isArray(movie.production_countries)
|
||||
? movie.production_countries.map((c: any) => c.name).filter(Boolean).join(' / ')
|
||||
: (Array.isArray(movie.origin_country) ? movie.origin_country.join(' / ') : '');
|
||||
const duration = mediaType === 'movie'
|
||||
? (movie.runtime > 0 ? `${movie.runtime}分钟` : '')
|
||||
: (movie.episode_run_time && movie.episode_run_time.length > 0 ? `每集${movie.episode_run_time[0]}分钟` : '');
|
||||
const description = movie.overview ? movie.overview.substring(0, 200) : '';
|
||||
const cover = movie.poster_path ? `https://image.tmdb.org/t/p/w500${movie.poster_path}` : '';
|
||||
|
||||
// TMDB detail page URL
|
||||
const tmdbUrl = `https://www.themoviedb.org/${mediaType}/${tmdbId}`;
|
||||
|
||||
// Generate tags from keyword + title
|
||||
const tags = genTags({ keyword, title });
|
||||
|
||||
// Build description fallback
|
||||
let desc = description;
|
||||
if (!desc) {
|
||||
const parts: string[] = [];
|
||||
if (year) parts.push(`${year}年`);
|
||||
if (genres.length > 0) parts.push(genres.slice(0, 3).join(' / '));
|
||||
if (duration) parts.push(duration);
|
||||
desc = parts.length > 0 ? parts.join(' · ') : '';
|
||||
}
|
||||
|
||||
return {
|
||||
keyword,
|
||||
title,
|
||||
description: desc,
|
||||
tags,
|
||||
cover,
|
||||
source: 'tmdb',
|
||||
tmdb_url: tmdbUrl,
|
||||
rating,
|
||||
rating_count: ratingCount,
|
||||
year,
|
||||
genres,
|
||||
directors,
|
||||
actors,
|
||||
region,
|
||||
duration,
|
||||
};
|
||||
}
|
||||
|
||||
function genTags(opts: { keyword: string; title: string }): string[] {
|
||||
const { keyword, title } = opts;
|
||||
const tags: string[] = [];
|
||||
if (keyword.length <= 8) tags.push(keyword);
|
||||
|
||||
const txt = (title + ' ' + keyword).toLowerCase();
|
||||
const isDonghua = /动画|动漫/i.test(txt);
|
||||
if (isDonghua) {
|
||||
tags.push('动画'); tags.push('国漫');
|
||||
} else {
|
||||
tags.push('电影');
|
||||
}
|
||||
|
||||
const genreMap: Record<string, string[]> = {
|
||||
'动画': ['动画'], '动漫': ['动漫'], '国漫': ['国漫'],
|
||||
'剧场版': ['剧场版'], '年番': ['年番'],
|
||||
'动作': ['动作'], '奇幻': ['奇幻'], '玄幻': ['玄幻'],
|
||||
'仙侠': ['仙侠'], '古装': ['古装'], '爱情': ['爱情'],
|
||||
'科幻': ['科幻'], '喜剧': ['喜剧'], '悬疑': ['悬疑'],
|
||||
'冒险': ['冒险'], '战争': ['战争'], '纪录': ['纪录片'], '真人': ['真人秀'],
|
||||
};
|
||||
for (const [key, vals] of Object.entries(genreMap)) {
|
||||
if (txt.includes(key)) {
|
||||
for (const v of vals) { if (!tags.includes(v)) tags.push(v); }
|
||||
}
|
||||
}
|
||||
return tags;
|
||||
}
|
||||
|
||||
function safeParseTags(tagsStr: string | null | undefined): string[] {
|
||||
if (!tagsStr) return [];
|
||||
try {
|
||||
const parsed = JSON.parse(tagsStr);
|
||||
return Array.isArray(parsed) ? parsed : [];
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
615
packages/backend/src/database/admin.routes.ts
Normal file
615
packages/backend/src/database/admin.routes.ts
Normal file
@@ -0,0 +1,615 @@
|
||||
import { Router, Request, Response } from 'express';
|
||||
// Native fetch available in Node 20+
|
||||
import fs from "fs";
|
||||
import { execSync } from 'child_process';
|
||||
import { adminLimiter, loginLimiter } from '../middleware/rate-limit';
|
||||
import { getSaveRecords } from '../cloud/cloud.service';
|
||||
import { getCloudConfigs, getCloudConfigById, saveCloudConfig, deleteCloudConfig, getCloudConfigByType, testCloudConnection, testCloudConnectionWithCookie } from '../cloud/credential.service';
|
||||
// Note: check-in routes were removed (sign-in feature removed)
|
||||
import { getAllCloudTypes } from '../cloud/cloud-types.service';
|
||||
import { login, authMiddleware, verifyToken, changePassword } from '../admin/auth.service';
|
||||
import { getStats } from '../admin/stats.service';
|
||||
import { getAllSystemConfigs, updateSystemConfig, updateSystemConfigs, getSystemConfig } from '../admin/system-config.service';
|
||||
import { testProxyConnection } from '../utils/proxy-agent';
|
||||
import { getDb } from '../database/database';
|
||||
import { reconnectRedis, testRedisConnection } from '../middleware/cache';
|
||||
import { startQrLogin, getQrLoginStatus, cancelQrLogin } from '../cloud/qr-login.service';
|
||||
import { BaiduDriver } from '../cloud/drivers/baidu.driver';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Public routes (no auth required)
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/**
|
||||
* POST /api/admin/login
|
||||
* Admin login
|
||||
*/
|
||||
router.post('/admin/login', loginLimiter, (req: Request, res: Response) => {
|
||||
try {
|
||||
const { username, password } = req.body;
|
||||
if (!username || !password) {
|
||||
res.status(400).json({ error: 'Username and password are required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const token = login(username, password);
|
||||
if (!token) {
|
||||
res.status(401).json({ error: 'Invalid credentials' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ token });
|
||||
} catch (err: any) {
|
||||
console.error('[Login] Error:', err);
|
||||
res.status(500).json({ error: err.message || 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/admin/cloud-types
|
||||
* List all cloud types (public, read-only).
|
||||
*/
|
||||
router.get('/admin/cloud-types', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const types = getAllCloudTypes();
|
||||
res.json({ types });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// QR Login routes (no auth — user not logged in yet)
|
||||
// MUST be before authMiddleware!
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
// ===== 夸克扫码登录 =====
|
||||
router.post('/admin/quark/qr-login/start', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const result = await startQrLogin();
|
||||
res.json({ ok: true, ...result });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/admin/quark/qr-login/:sessionId/status', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const sessionId = req.params.sessionId as string;
|
||||
const result = await getQrLoginStatus(sessionId);
|
||||
res.json({ ok: true, ...result });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/admin/quark/qr-login/:sessionId/cancel', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const sessionId = req.params.sessionId as string;
|
||||
await cancelQrLogin(sessionId);
|
||||
res.json({ ok: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// ===== 百度扫码登录 =====
|
||||
router.post("/admin/baidu/qr-login/start", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const result = await BaiduDriver.startQrLogin();
|
||||
res.json({ ok: true, ...result });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/admin/baidu/qr-login/:sessionId/status", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const sessionId = req.params.sessionId as string;
|
||||
const result: any = await BaiduDriver.getQrLoginStatus(sessionId);
|
||||
// Map to frontend-expected format (frontend reads data.cookie)
|
||||
res.json({
|
||||
ok: true,
|
||||
status: result.status,
|
||||
cookie: result.cookie || result.access_token || "",
|
||||
nickname: result.nickname || "",
|
||||
storage_used: result.storage_used || "",
|
||||
storage_total: result.storage_total || "",
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/admin/baidu/qr-login/:sessionId/cancel", async (req: Request, res: Response) => {
|
||||
try {
|
||||
BaiduDriver.cancelQrLogin(req.params.sessionId as string);
|
||||
} catch {}
|
||||
res.json({ ok: true });
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Auth wall — all routes below require JWT
|
||||
// ═══════════════════════════════════════
|
||||
router.use('/admin', authMiddleware);
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Cloud Configs CRUD
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/cloud-configs — list all cloud configs */
|
||||
router.get('/admin/cloud-configs', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const configs = getCloudConfigs();
|
||||
res.json(configs);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to fetch cloud configs' });
|
||||
}
|
||||
});
|
||||
|
||||
/** POST /api/admin/cloud-configs — create or smart-replace a cloud config */
|
||||
router.post('/admin/cloud-configs', (req: Request, res: Response) => {
|
||||
try {
|
||||
const data = req.body;
|
||||
if (!data.cloud_type) {
|
||||
res.status(400).json({ error: 'cloud_type is required' });
|
||||
return;
|
||||
}
|
||||
// Normalize is_active: frontend sends boolean, SQLite needs 0/1
|
||||
if (typeof data.is_active === 'boolean') data.is_active = data.is_active ? 1 : 0;
|
||||
// Normalize is_transfer_enabled: frontend sends boolean, SQLite needs 0/1
|
||||
if (typeof data.is_transfer_enabled === 'boolean') data.is_transfer_enabled = data.is_transfer_enabled ? 1 : 0;
|
||||
const saved = saveCloudConfig(data);
|
||||
res.json(saved);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to save cloud config' });
|
||||
}
|
||||
});
|
||||
|
||||
/** PUT /api/admin/cloud-configs/:id — update an existing cloud config */
|
||||
router.put('/admin/cloud-configs/:id', (req: Request, res: Response) => {
|
||||
try {
|
||||
const id = parseInt(req.params.id as string);
|
||||
const existing = getCloudConfigById(id);
|
||||
if (!existing) {
|
||||
res.status(404).json({ error: 'Cloud config not found' });
|
||||
return;
|
||||
}
|
||||
const saved = saveCloudConfig({ ...req.body, id });
|
||||
res.json(saved);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to update cloud config' });
|
||||
}
|
||||
});
|
||||
|
||||
/** DELETE /api/admin/cloud-configs/:id */
|
||||
router.delete('/admin/cloud-configs/:id', (req: Request, res: Response) => {
|
||||
try {
|
||||
const id = parseInt(req.params.id as string);
|
||||
const ok = deleteCloudConfig(id);
|
||||
if (!ok) {
|
||||
res.status(404).json({ error: 'Cloud config not found' });
|
||||
return;
|
||||
}
|
||||
res.json({ success: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to delete cloud config' });
|
||||
}
|
||||
});
|
||||
|
||||
/** POST /api/admin/cloud-configs/:type/test — test cloud connection (by type or id) */
|
||||
router.post('/admin/cloud-configs/:type/test', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const type = req.params.type as string;
|
||||
const { cookie, id } = req.body;
|
||||
|
||||
// If cookie is provided directly, test with it (for new configs not yet saved)
|
||||
if (cookie) {
|
||||
const result = await testCloudConnectionWithCookie(type, cookie);
|
||||
res.json(result);
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise test by config id
|
||||
if (id) {
|
||||
const result = await testCloudConnection(parseInt(id));
|
||||
res.json(result);
|
||||
return;
|
||||
}
|
||||
|
||||
res.status(400).json({ success: false, message: 'Provide either cookie or id' });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ success: false, message: err.message || 'Connection test failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Stats
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/stats */
|
||||
router.get('/admin/stats', (req: Request, res: Response) => {
|
||||
try {
|
||||
const days = req.query.days ? parseInt(req.query.days as string) : 7;
|
||||
const stats = getStats(days);
|
||||
res.json(stats);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get stats' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Save Records (转存日志)
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/save-records */
|
||||
router.get('/admin/save-records', (req: Request, res: Response) => {
|
||||
try {
|
||||
const page = parseInt(req.query.page as string) || 1;
|
||||
const pageSize = parseInt(req.query.pageSize as string) || 20;
|
||||
const startDate = req.query.startDate as string | undefined;
|
||||
const endDate = req.query.endDate as string | undefined;
|
||||
const status = req.query.status as string | undefined;
|
||||
const sourceType = req.query.sourceType as string | undefined;
|
||||
const keyword = req.query.keyword as string | undefined;
|
||||
const result = getSaveRecords(page, pageSize, startDate, endDate, status, sourceType, keyword);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get save records' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// System Configs
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/system-configs */
|
||||
router.get('/admin/system-configs', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const configs = getAllSystemConfigs();
|
||||
res.json(configs);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get system configs' });
|
||||
}
|
||||
});
|
||||
|
||||
/** PUT /api/admin/system-configs — batch update */
|
||||
router.put('/admin/system-configs', (req: Request, res: Response) => {
|
||||
try {
|
||||
const { entries } = req.body;
|
||||
if (!entries || !Array.isArray(entries)) {
|
||||
res.status(400).json({ error: 'entries array is required' });
|
||||
return;
|
||||
}
|
||||
updateSystemConfigs(entries);
|
||||
res.json({ success: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to update system configs' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Cloud Types Toggle
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** PUT /api/admin/cloud-types — toggle cloud type enabled/disabled */
|
||||
router.put('/admin/cloud-types', (req: Request, res: Response) => {
|
||||
try {
|
||||
const { type, enabled } = req.body;
|
||||
if (!type) {
|
||||
res.status(400).json({ error: 'type is required' });
|
||||
return;
|
||||
}
|
||||
const db = getDb();
|
||||
db.prepare(
|
||||
`INSERT INTO system_configs (key, value, description) VALUES (?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET value = excluded.value`
|
||||
).run(`cloud_type_${type}_enabled`, enabled ? '1' : '0', `Enable/disable ${type} cloud drive`);
|
||||
res.json({ success: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to toggle cloud type' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Change Password
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** POST /api/admin/change-password */
|
||||
router.post('/admin/change-password', (req: Request, res: Response) => {
|
||||
try {
|
||||
const { oldPassword, newPassword } = req.body;
|
||||
if (!oldPassword || !newPassword) {
|
||||
res.status(400).json({ error: 'Both old and new passwords are required' });
|
||||
return;
|
||||
}
|
||||
// Get username from JWT
|
||||
const authHeader = req.headers.authorization || '';
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const payload = verifyToken(token);
|
||||
if (!payload) {
|
||||
res.status(401).json({ error: 'Invalid token' });
|
||||
return;
|
||||
}
|
||||
const result = changePassword(payload.username, oldPassword, newPassword);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to change password' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// DB Status
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/db-status */
|
||||
router.get('/admin/db-status', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const dbFile = getSystemConfig('db_path') || '';
|
||||
let dbSize = 'N/A';
|
||||
if (dbFile) {
|
||||
try {
|
||||
const stats = fs.statSync(dbFile);
|
||||
dbSize = (stats.size / 1024 / 1024).toFixed(2) + ' MB';
|
||||
} catch {}
|
||||
}
|
||||
|
||||
const db = getDb();
|
||||
const counts = {
|
||||
save_records: (db.prepare('SELECT COUNT(*) as c FROM save_records').get() as any)?.c || 0,
|
||||
search_stats: (db.prepare('SELECT COUNT(*) as c FROM search_stats').get() as any)?.c || 0,
|
||||
system_configs: (db.prepare('SELECT COUNT(*) as c FROM system_configs').get() as any)?.c || 0,
|
||||
cloud_configs: (db.prepare('SELECT COUNT(*) as c FROM cloud_configs').get() as any)?.c || 0,
|
||||
content_cache: (db.prepare('SELECT COUNT(*) as c FROM content_cache').get() as any)?.c || 0,
|
||||
};
|
||||
|
||||
// Redis status
|
||||
let redis_status = 'disconnected';
|
||||
let redis_url = getSystemConfig('redis_url') || '';
|
||||
try {
|
||||
const testResult = await testRedisConnection(redis_url);
|
||||
redis_status = testResult.ok ? 'connected' : 'disconnected';
|
||||
} catch {
|
||||
redis_status = 'error';
|
||||
}
|
||||
|
||||
res.json({
|
||||
db_size: dbSize,
|
||||
db_path: dbFile,
|
||||
...counts,
|
||||
redis_status,
|
||||
redis_url,
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get DB status' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Test Redis Connection
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** POST /api/admin/test-redis */
|
||||
router.post('/admin/test-redis', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { url } = req.body;
|
||||
if (!url) {
|
||||
res.status(400).json({ ok: false, info: 'Redis URL is required' });
|
||||
return;
|
||||
}
|
||||
const result = await testRedisConnection(url);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, info: err.message || 'Redis test failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Test External Service
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** POST /api/admin/test-external-service */
|
||||
router.post('/admin/test-external-service', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { type, url, token } = req.body;
|
||||
const start = Date.now();
|
||||
|
||||
switch (type) {
|
||||
case 'pansou': {
|
||||
const pansouUrl = url || getSystemConfig('pansou_url') || '';
|
||||
if (!pansouUrl) {
|
||||
res.json({ ok: false, info: 'PanSou URL not configured' });
|
||||
return;
|
||||
}
|
||||
const response = await fetch(pansouUrl + '/api/health', { signal: AbortSignal.timeout(8000) });
|
||||
const data: any = await response.json();
|
||||
const latency = Date.now() - start;
|
||||
res.json({
|
||||
ok: response.ok && data?.status === 'ok',
|
||||
latency,
|
||||
info: response.ok ? `连接成功 (${data?.channels_count || 0} 频道, ${data?.plugin_count || 0} 插件)` : '连接失败',
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'video_parser': {
|
||||
const parserUrl = url || getSystemConfig('video_parser_url') || '';
|
||||
if (!parserUrl) {
|
||||
res.json({ ok: false, info: 'Video Parser URL not configured' });
|
||||
return;
|
||||
}
|
||||
const response = await fetch(parserUrl + '/health', { signal: AbortSignal.timeout(8000) });
|
||||
const latency = Date.now() - start;
|
||||
res.json({
|
||||
ok: response.ok,
|
||||
latency,
|
||||
info: response.ok ? '连接成功' : `HTTP ${response.status}`,
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'tmdb': {
|
||||
const tmdbToken = token || getSystemConfig('tmdb_api_key') || '';
|
||||
if (!tmdbToken) {
|
||||
res.json({ ok: false, info: 'TMDB API Key not configured' });
|
||||
return;
|
||||
}
|
||||
const response = await fetch('https://api.themoviedb.org/3/configuration', {
|
||||
headers: { Authorization: `Bearer ${tmdbToken}` },
|
||||
signal: AbortSignal.timeout(8000),
|
||||
});
|
||||
const latency = Date.now() - start;
|
||||
res.json({
|
||||
ok: response.ok,
|
||||
latency,
|
||||
info: response.ok ? '连接成功' : `HTTP ${response.status}`,
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'proxy': {
|
||||
const proxyUrl = url || getSystemConfig('search_proxy_url') || '';
|
||||
if (!proxyUrl) {
|
||||
res.json({ ok: false, info: 'Proxy URL not configured' });
|
||||
return;
|
||||
}
|
||||
const result = await testProxyConnection(proxyUrl);
|
||||
res.json(result);
|
||||
break;
|
||||
}
|
||||
case 'ip_geo': {
|
||||
const geoUrl = url || getSystemConfig('ip_geo_api_url') || '';
|
||||
if (!geoUrl) {
|
||||
res.json({ ok: false, info: '请先输入 IP 归属地查询 API 地址' });
|
||||
return;
|
||||
}
|
||||
const testUrl = geoUrl.replace('{ip}', '8.8.8.8');
|
||||
const response = await fetch(testUrl, { signal: AbortSignal.timeout(8000) });
|
||||
const data: any = await response.json();
|
||||
const latency = Date.now() - start;
|
||||
const valid = !!(data?.country || data?.region || data?.city || data?.countryCode);
|
||||
res.json({ ok: valid, latency, info: valid ? '连接成功' : '响应格式不符' });
|
||||
break;
|
||||
}
|
||||
default:
|
||||
res.json({ ok: false, info: `Unknown service type: ${type}` });
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, info: err.message || 'External service test failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Pansou Info & Update
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/pansou-info — pansou health + version + update check */
|
||||
router.get('/admin/pansou-info', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const baseUrl = getSystemConfig('pansou_url') || '';
|
||||
if (!baseUrl) {
|
||||
res.json({ status: 'disconnected', channelCount: 0, pluginCount: 0, diskCount: 0, version: '', hasUpdate: false, latestVersion: '' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Fetch PanSou health
|
||||
const healthUrl = baseUrl + '/api/health';
|
||||
const response = await fetch(healthUrl, { signal: AbortSignal.timeout(8000) });
|
||||
const healthData: any = await response.json();
|
||||
const channelCount = healthData.channels_count || 0;
|
||||
const pluginCount = healthData.plugin_count || 0;
|
||||
|
||||
// Derive disk count from channel names
|
||||
const driveKeywords = ['aliyun', 'baidu', 'quark', '115', 'pikpak', 'xunlei', 'uc', '123', '139', '189', 'tianyi', 'netease'];
|
||||
const drives = new Set<string>();
|
||||
for (const ch of (healthData.channels || [])) {
|
||||
for (const kw of driveKeywords) {
|
||||
if (ch.toLowerCase().includes(kw)) { drives.add(kw); break; }
|
||||
}
|
||||
}
|
||||
const diskCount = drives.size || 5;
|
||||
|
||||
// Get local version from docker label
|
||||
let version = '';
|
||||
let hasUpdate = false;
|
||||
let latestVersion = '';
|
||||
try {
|
||||
const created = execSync(
|
||||
`docker inspect CloudSearch_PanSou --format '{{index .Config.Labels "org.opencontainers.image.created"}}'`,
|
||||
{ timeout: 5000, encoding: 'utf8' }
|
||||
).trim();
|
||||
version = created ? created.slice(0, 10) : '';
|
||||
|
||||
// Check update cache
|
||||
const cacheFile = '/tmp/pansou-update-cache.json';
|
||||
let cache: any = null;
|
||||
try { cache = JSON.parse(fs.readFileSync(cacheFile, 'utf8') || 'null'); } catch {}
|
||||
const threeDays = 3 * 24 * 3600 * 1000;
|
||||
|
||||
if (!cache || (Date.now() - cache.checkedAt) > threeDays) {
|
||||
// Check GHCR for latest version
|
||||
try {
|
||||
const tokenRes = await fetch(
|
||||
'https://ghcr.io/token?scope=repository:fish2018/pansou-web:pull&service=ghcr.io'
|
||||
);
|
||||
const ghcrToken = (await tokenRes.json() as any).token;
|
||||
const manifestRes = await fetch(
|
||||
'https://ghcr.io/v2/fish2018/pansou-web/manifests/latest',
|
||||
{ headers: { Authorization: `Bearer ${ghcrToken}`, Accept: 'application/vnd.oci.image.index.v1+json, application/vnd.docker.distribution.manifest.list.v2+json' } }
|
||||
);
|
||||
const manifestList: any = await manifestRes.json();
|
||||
const amd64 = manifestList.manifests?.find((m: any) => m.platform?.architecture === 'amd64' && m.platform?.os === 'linux');
|
||||
if (amd64) {
|
||||
const blobRes = await fetch(
|
||||
`https://ghcr.io/v2/fish2018/pansou-web/manifests/${amd64.digest}`,
|
||||
{ headers: { Authorization: `Bearer ${ghcrToken}`, Accept: 'application/vnd.oci.image.manifest.v1+json' } }
|
||||
);
|
||||
const blobData: any = await blobRes.json();
|
||||
const cfgDigest = blobData.config?.digest;
|
||||
if (cfgDigest) {
|
||||
const cfgRes = await fetch(
|
||||
`https://ghcr.io/v2/fish2018/pansou-web/blobs/${cfgDigest}`,
|
||||
{ headers: { Authorization: `Bearer ${ghcrToken}` } }
|
||||
);
|
||||
const cfgData: any = await cfgRes.json();
|
||||
const remoteCreated = cfgData.config?.Labels?.['org.opencontainers.image.created'];
|
||||
if (remoteCreated) {
|
||||
latestVersion = remoteCreated.slice(0, 10);
|
||||
if (version && latestVersion !== version) hasUpdate = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
fs.writeFileSync(cacheFile, JSON.stringify({ checkedAt: Date.now(), hasUpdate, latestVersion }));
|
||||
} else {
|
||||
hasUpdate = cache.hasUpdate;
|
||||
latestVersion = cache.latestVersion;
|
||||
}
|
||||
} catch {}
|
||||
|
||||
res.json({
|
||||
status: response.ok ? 'connected' : 'disconnected',
|
||||
channelCount,
|
||||
pluginCount,
|
||||
diskCount,
|
||||
version,
|
||||
hasUpdate,
|
||||
latestVersion,
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.json({ status: 'error', channelCount: 0, pluginCount: 0, diskCount: 0, version: '', hasUpdate: false, latestVersion: '', error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
/** POST /api/admin/update-pansou — pull latest pansou image + recreate container */
|
||||
router.post('/admin/update-pansou', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
execSync('docker pull ghcr.io/fish2018/pansou-web:latest', { timeout: 120000 });
|
||||
execSync('docker compose -p cloudsearch -f /app/docker-compose.yml up -d pansou', { timeout: 60000 });
|
||||
try { fs.unlinkSync('/tmp/pansou-update-cache.json'); } catch {}
|
||||
res.json({ success: true, message: 'PanSou 更新成功' });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ success: false, error: err.message || 'PanSou 更新失败' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
330
packages/backend/src/database/database.ts
Executable file
330
packages/backend/src/database/database.ts
Executable file
@@ -0,0 +1,330 @@
|
||||
import Database from 'better-sqlite3';
|
||||
import path from 'path';
|
||||
import bcrypt from 'bcryptjs';
|
||||
import config from '../config';
|
||||
import { formatLocalDateTime } from '../utils/time';
|
||||
|
||||
let db: Database.Database | null = null;
|
||||
|
||||
export function getDb(): Database.Database {
|
||||
if (db) return db;
|
||||
|
||||
const dbDir = path.dirname(config.dbPath);
|
||||
const fs = require('fs');
|
||||
if (!fs.existsSync(dbDir)) {
|
||||
fs.mkdirSync(dbDir, { recursive: true });
|
||||
}
|
||||
|
||||
db = new Database(config.dbPath);
|
||||
db.pragma('journal_mode = WAL');
|
||||
db.pragma('foreign_keys = ON');
|
||||
|
||||
runMigrations(db);
|
||||
seedAdmin(db);
|
||||
|
||||
return db;
|
||||
}
|
||||
|
||||
function runMigrations(db: Database.Database): void {
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS admins (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
username TEXT UNIQUE NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime')),
|
||||
last_login TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS cloud_configs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
cloud_type TEXT NOT NULL,
|
||||
cookie TEXT,
|
||||
nickname TEXT,
|
||||
is_active INTEGER NOT NULL DEFAULT 1,
|
||||
storage_used TEXT,
|
||||
storage_total TEXT,
|
||||
checkin_status TEXT NOT NULL DEFAULT 'none',
|
||||
last_checkin_at TEXT,
|
||||
checkin_message TEXT,
|
||||
consecutive_failures INTEGER DEFAULT 0,
|
||||
last_used_at TEXT,
|
||||
total_saves INTEGER DEFAULT 0,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS promotions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
description TEXT,
|
||||
image_url TEXT,
|
||||
link_url TEXT,
|
||||
position TEXT,
|
||||
sort_order INTEGER NOT NULL DEFAULT 0,
|
||||
active INTEGER NOT NULL DEFAULT 1,
|
||||
click_count INTEGER NOT NULL DEFAULT 0,
|
||||
start_time TEXT,
|
||||
end_time TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS save_records (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
source_type TEXT,
|
||||
source_title TEXT,
|
||||
source_url TEXT,
|
||||
target_cloud TEXT,
|
||||
share_url TEXT,
|
||||
share_pwd TEXT,
|
||||
file_size TEXT,
|
||||
file_count INTEGER DEFAULT 0,
|
||||
duration_ms INTEGER DEFAULT 0,
|
||||
status TEXT NOT NULL DEFAULT '',
|
||||
error_message TEXT,
|
||||
ip_address TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS search_stats (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
keyword TEXT,
|
||||
intent TEXT,
|
||||
result_count INTEGER DEFAULT 0,
|
||||
ip_address TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS hot_keywords (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
keyword TEXT UNIQUE NOT NULL,
|
||||
search_count INTEGER NOT NULL DEFAULT 1,
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS system_configs (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL DEFAULT '',
|
||||
description TEXT,
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS content_cache (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
keyword TEXT UNIQUE NOT NULL,
|
||||
title TEXT,
|
||||
description TEXT,
|
||||
tags TEXT,
|
||||
cover TEXT,
|
||||
source TEXT,
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
`);
|
||||
seedSystemConfigs(db);
|
||||
migrateSaveRecords(db);
|
||||
migrateContentCache(db);
|
||||
migrateCloudConfigs(db);
|
||||
cleanupOldSaveRecords(db);
|
||||
}
|
||||
|
||||
/** 迁移: 给已有 save_records 表补充新列 */
|
||||
function migrateSaveRecords(db: Database.Database): void {
|
||||
const newCols: { col: string; def: string }[] = [
|
||||
{ col: 'share_pwd', def: 'TEXT' },
|
||||
{ col: 'file_count', def: 'INTEGER DEFAULT 0' },
|
||||
{ col: 'folder_count', def: 'INTEGER DEFAULT 0' },
|
||||
{ col: 'duration_ms', def: 'INTEGER DEFAULT 0' },
|
||||
{ col: 'status', def: "TEXT NOT NULL DEFAULT ''" },
|
||||
{ col: 'error_message', def: 'TEXT' },
|
||||
{ col: 'folder_name', def: 'TEXT' },
|
||||
{ col: 'request_url', def: 'TEXT' },
|
||||
{ col: 'ip_location', def: 'TEXT' },
|
||||
{ col: 'original_folder_name', def: 'TEXT' },
|
||||
];
|
||||
for (const { col, def } of newCols) {
|
||||
try {
|
||||
db.exec(`ALTER TABLE save_records ADD COLUMN ${col} ${def}`);
|
||||
} catch {
|
||||
// Column already exists — ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** 迁移: 给 content_cache 表加 douban_url 列 */
|
||||
function migrateContentCache(db: Database.Database): void {
|
||||
const columns: { col: string; def: string }[] = [
|
||||
{ col: 'douban_url', def: 'TEXT' },
|
||||
{ col: 'rating', def: 'TEXT' },
|
||||
{ col: 'rating_count', def: 'TEXT' },
|
||||
{ col: 'year', def: 'TEXT' },
|
||||
{ col: 'genres', def: 'TEXT' },
|
||||
{ col: 'directors', def: 'TEXT' },
|
||||
{ col: 'actors', def: 'TEXT' },
|
||||
{ col: 'region', def: 'TEXT' },
|
||||
{ col: 'duration', def: 'TEXT' },
|
||||
];
|
||||
for (const { col, def } of columns) {
|
||||
try {
|
||||
db.exec(`ALTER TABLE content_cache ADD COLUMN ${col} ${def}`);
|
||||
} catch {
|
||||
// Column already exists — ignore
|
||||
}
|
||||
}
|
||||
// 修复旧记录:source 为 NULL 但实际有 TMDB 数据的,标记为 tmdb
|
||||
db.exec(`UPDATE content_cache SET source = 'tmdb' WHERE source IS NULL AND title IS NOT NULL AND title != ''`);
|
||||
}
|
||||
|
||||
/** 迁移: 给 cloud_configs 表去UNIQUE约束 + 加签到/轮训字段 */
|
||||
function migrateCloudConfigs(db: Database.Database): void {
|
||||
// 加新列
|
||||
const newCols: { col: string; def: string }[] = [
|
||||
{ col: 'checkin_status', def: "TEXT NOT NULL DEFAULT 'none'" },
|
||||
{ col: 'last_checkin_at', def: 'TEXT' },
|
||||
{ col: 'checkin_message', def: 'TEXT' },
|
||||
{ col: 'consecutive_failures', def: 'INTEGER DEFAULT 0' },
|
||||
{ col: 'last_used_at', def: 'TEXT' },
|
||||
{ col: 'total_saves', def: 'INTEGER DEFAULT 0' },
|
||||
];
|
||||
for (const { col, def } of newCols) {
|
||||
try { db.exec(`ALTER TABLE cloud_configs ADD COLUMN ${col} ${def}`); } catch {}
|
||||
}
|
||||
// 检查旧表是否有 UNIQUE 约束,有则重建表
|
||||
const row = db.prepare(`SELECT sql FROM sqlite_master WHERE type='table' AND name='cloud_configs'`).get() as any;
|
||||
if (row && row.sql && row.sql.includes('cloud_type TEXT UNIQUE')) {
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS cloud_configs_v2 (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
cloud_type TEXT NOT NULL,
|
||||
cookie TEXT,
|
||||
nickname TEXT,
|
||||
is_active INTEGER NOT NULL DEFAULT 1,
|
||||
storage_used TEXT,
|
||||
storage_total TEXT,
|
||||
checkin_status TEXT NOT NULL DEFAULT 'none',
|
||||
last_checkin_at TEXT,
|
||||
checkin_message TEXT,
|
||||
consecutive_failures INTEGER DEFAULT 0,
|
||||
last_used_at TEXT,
|
||||
total_saves INTEGER DEFAULT 0,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now', 'localtime'))
|
||||
);
|
||||
INSERT INTO cloud_configs_v2 (id, cloud_type, cookie, nickname, is_active, storage_used, storage_total, checkin_status, last_checkin_at, checkin_message, consecutive_failures, last_used_at, total_saves, created_at, updated_at)
|
||||
SELECT id, cloud_type, cookie, nickname, is_active, storage_used, storage_total, COALESCE(checkin_status,'none'), last_checkin_at, checkin_message, COALESCE(consecutive_failures,0), last_used_at, COALESCE(total_saves,0), created_at, updated_at FROM cloud_configs;
|
||||
DROP TABLE cloud_configs;
|
||||
ALTER TABLE cloud_configs_v2 RENAME TO cloud_configs;
|
||||
`);
|
||||
console.log('[DB] cloud_configs migration: UNIQUE constraint removed, new fields added');
|
||||
}
|
||||
|
||||
// Migration 2: Add verification_status column
|
||||
const row2 = db.prepare("SELECT sql FROM sqlite_master WHERE name='cloud_configs' AND sql LIKE '%verification_status%'").get();
|
||||
if (!row2) {
|
||||
db.exec("ALTER TABLE cloud_configs ADD COLUMN verification_status TEXT DEFAULT NULL");
|
||||
console.log('[DB] cloud_configs migration: verification_status column added');
|
||||
}
|
||||
|
||||
// Migration 3: Add cloud_type_uid column (for Quark __uid dedup)
|
||||
const row3 = db.prepare("SELECT sql FROM sqlite_master WHERE name='cloud_configs' AND sql LIKE '%cloud_type_uid%'").get();
|
||||
if (!row3) {
|
||||
db.exec("ALTER TABLE cloud_configs ADD COLUMN cloud_type_uid TEXT DEFAULT NULL");
|
||||
console.log('[DB] cloud_configs migration: cloud_type_uid column added');
|
||||
}
|
||||
|
||||
// Migration 4: Add promotion_account column
|
||||
const row4 = db.prepare("SELECT sql FROM sqlite_master WHERE name='cloud_configs' AND sql LIKE '%promotion_account%'").get();
|
||||
if (!row4) {
|
||||
db.exec("ALTER TABLE cloud_configs ADD COLUMN promotion_account TEXT DEFAULT ''");
|
||||
console.log('[DB] cloud_configs migration: promotion_account column added');
|
||||
}
|
||||
|
||||
// Migration 5: Add is_transfer_enabled column
|
||||
const row5 = db.prepare("SELECT sql FROM sqlite_master WHERE name='cloud_configs' AND sql LIKE '%is_transfer_enabled%'").get();
|
||||
if (!row5) {
|
||||
db.exec("ALTER TABLE cloud_configs ADD COLUMN is_transfer_enabled INTEGER DEFAULT 1");
|
||||
console.log('[DB] cloud_configs migration: is_transfer_enabled column added');
|
||||
}
|
||||
}
|
||||
|
||||
function seedAdmin(db: Database.Database): void {
|
||||
const existing = db.prepare('SELECT id FROM admins WHERE username = ?').get(config.adminUsername);
|
||||
if (existing) return;
|
||||
|
||||
const salt = bcrypt.genSaltSync(10);
|
||||
const hash = bcrypt.hashSync(config.adminPassword, salt);
|
||||
|
||||
db.prepare(
|
||||
'INSERT INTO admins (username, password_hash) VALUES (?, ?)'
|
||||
).run(config.adminUsername, hash);
|
||||
|
||||
console.log(`[DB] Admin user "${config.adminUsername}" created`);
|
||||
}
|
||||
|
||||
function seedSystemConfigs(db: Database.Database): void {
|
||||
const defaults: { key: string; value: string; description: string }[] = [
|
||||
{ key: 'pansou_url', value: config.pansouUrl, description: 'PanSou 搜索引擎服务地址' },
|
||||
{ key: 'video_parser_url', value: config.videoParserUrl, description: '视频解析服务地址' },
|
||||
{ key: 'validation_concurrency', value: String(config.validation.concurrency), description: '链接验证并发数' },
|
||||
{ key: 'validation_timeout', value: String(config.validation.timeout), description: '链接验证超时(ms)' },
|
||||
{ key: 'validation_cache_ttl_valid', value: String(config.validation.cacheTtlValid), description: '有效链接缓存时间(s)' },
|
||||
{ key: 'validation_cache_ttl_invalid', value: String(config.validation.cacheTtlInvalid), description: '无效链接缓存时间(s)' },
|
||||
{ key: 'search_proxy_enabled', value: 'false', description: '搜索代理开关(true/false)' },
|
||||
{ key: 'search_proxy_url', value: '', description: '搜索代理地址 (如 http://127.0.0.1:7890)' },
|
||||
{ key: 'search_strategy', value: 'wait_all', description: '搜索结果展示方式: wait_all=等待全部后展示, stream_channel=频道逐步展示' },
|
||||
{ key: 'link_validation_enabled', value: 'true', description: '资源链接有效性检测开关(true/false)' },
|
||||
{ key: 'cloud_enabled_quark', value: 'true', description: '夸克网盘' },
|
||||
{ key: 'cloud_enabled_baidu', value: 'true', description: '百度网盘' },
|
||||
{ key: 'cloud_enabled_aliyun', value: 'true', description: '阿里云盘' },
|
||||
{ key: 'cloud_enabled_115', value: 'true', description: '115 网盘' },
|
||||
{ key: 'cloud_enabled_tianyi', value: 'true', description: '天翼云盘' },
|
||||
{ key: 'cloud_enabled_123pan', value: 'true', description: '123 云盘' },
|
||||
{ key: 'cloud_enabled_uc', value: 'true', description: 'UC 网盘' },
|
||||
{ key: 'cloud_enabled_xunlei', value: 'true', description: '迅雷网盘' },
|
||||
{ key: 'cloud_enabled_pikpak', value: 'true', description: 'PikPak 网盘' },
|
||||
{ key: 'cloud_enabled_magnet', value: 'true', description: '磁力链接' },
|
||||
{ key: 'cloud_enabled_ed2k', value: 'true', description: '电驴链接' },
|
||||
{ key: 'cloud_enabled_others', value: 'false', description: '其他类型(默认关闭)' },
|
||||
{ key: 'search_result_limit', value: '10', description: '每类网盘最多展示的有效结果数' },
|
||||
{ key: 'search_fallback_image', value: '', description: '无图资源的兜底封面图 URL(留空使用渐变色)' },
|
||||
{ key: 'site_logo', value: '', description: '网站 LOGO 图片 URL(留空使用默认图标/文字)' },
|
||||
{ key: 'site_name', value: 'CloudSearch', description: '网站名称(显示在首页标题/页脚)' },
|
||||
{ key: 'site_disclaimer', value: '本站为非盈利性个人站点,所有资源仅供学习、研究使用,版权归原作者所有。请于下载后24小时内删除,切勿用于商业或非法用途。若侵犯了您的权益,请联系我们(邮箱:3337598077@qq.com),我们将及时处理。', description: '网站底部免责声明' },
|
||||
{ key: 'site_marquee', value: '📢 欢迎使用CloudSearch,所有资源仅供学习交流,请于下载后24小时内删除', description: '搜索栏下方滚动通知文字(从右往左滚动显示)' },
|
||||
{ key: 'tmdb_api_token', value: '', description: 'TMDB API 读取令牌(用于增强豆瓣内容信息)' },
|
||||
{ key: 'ip_geo_api_url', value: 'https://cn.apihz.cn/api/ip/chaapi.php?id=10014356&key=***&ip={ip}&td=0', description: 'IP 归属地查询接口({ip} 会被替换为实际IP)' },
|
||||
{ key: 'ip_geo_api_key', value: '', description: 'IP 归属地备用 API Key(留空使用默认)' },
|
||||
{ key: 'title_filter_rules', value: '', description: '搜索结果标题过滤规则(一行一条:纯文本直接移除 / 正则用/包围/)' },
|
||||
{ key: 'timezone', value: 'Asia/Shanghai', description: '系统时区(如 Asia/Shanghai、America/New_York、UTC)' },
|
||||
{ key: 'redis_url', value: 'redis://redis:6379', description: 'Redis 连接地址(用于缓存优化)' },
|
||||
{ key: 'pansou_auth_token', value: '', description: 'PanSou API 认证令牌(用于私有搜索服务)' },
|
||||
{ key: 'pansou_web_enabled', value: 'false', description: '启用 PanSou Web 端访问(在 /pansou 路径提供 PanSou 搜索引擎管理界面)' },
|
||||
{ key: 'cleanup_enabled', value: 'true', description: '启用自动清理(每天检查一次,移入回收站+清空日志+清空回收站)' },
|
||||
{ key: 'cleanup_file_retention_days', value: '7', description: '云盘文件保留天数(超过此天数的日期文件夹将被移入回收站)' },
|
||||
{ key: 'cleanup_log_retention_days', value: '30', description: '转存日志保留天数' },
|
||||
{ key: 'cleanup_empty_trash', value: 'true', description: '清理时是否清空回收站(永久删除释放空间)' },
|
||||
{ key: 'cleanup_space_threshold_enabled', value: 'false', description: '启用空间阈值自动清理(已用空间超过XX%时按比例删除最旧的转存文件)' },
|
||||
{ key: 'cleanup_space_threshold_percent', value: '90', description: '空间使用阈值百分比(超过此值时触发强制清理)' },
|
||||
{ key: 'cleanup_space_threshold_delete_percent', value: '10', description: '触发阈值清理时释放总空间的百分比(如 10 表示累计删除最旧文件直到达到总空间的 10%,6TB 总空间 → 释放 ~600GB)' },
|
||||
{ key: 'save_reuse_enabled', value: 'true', description: '启用分享链接复用(相同原始链接不再重复转存,直接复用之前的分享链接)' },
|
||||
{ key: 'cleanup_last_run', value: '', description: '上次自动清理时间' },
|
||||
{ key: 'cleanup_last_stats', value: '', description: '上次清理结果统计(JSON)' },
|
||||
{ key: 'quark_ad_keywords', value: '广告,推广,福利,加V,加微,联系,客服,赚钱,兼职', description: '夸克转存广告关键词(一行一个,匹配文件名/文件夹名即删除)' },
|
||||
{ key: 'quark_warning_folder_names', value: '⚠️ 网盘内除您所需资源外', description: '夸克转存后自动创建的警示文件夹名(一行一个,自动加上 ⚠️ 前缀)' },
|
||||
{ key: 'quark_sus_extensions', value: 'bat\nexe\nvbs\nscr\ncmd\ncom\npif\njs\njar\nmsi\nreg\ninf\nps1', description: '夸克转存可疑文件后缀(一行一个,不写点号,匹配即删除)' },
|
||||
];
|
||||
const insert = db.prepare(
|
||||
'INSERT OR IGNORE INTO system_configs (key, value, description) VALUES (?, ?, ?)'
|
||||
);
|
||||
for (const entry of defaults) {
|
||||
insert.run(entry.key, entry.value, entry.description);
|
||||
}
|
||||
}
|
||||
|
||||
/** 清理 60 天前的转存记录 */
|
||||
function cleanupOldSaveRecords(db: Database.Database): void {
|
||||
const cutoff = formatLocalDateTime(new Date(Date.now() - 60 * 24 * 60 * 60 * 1000));
|
||||
const deleted = db.prepare('DELETE FROM save_records WHERE created_at < ?').run(cutoff);
|
||||
console.log(`[DB] Cleaned up ${deleted.changes} save records older than 60 days (before ${cutoff})`);
|
||||
}
|
||||
|
||||
export default getDb;
|
||||
41
packages/backend/src/intent/intent.service.ts
Executable file
41
packages/backend/src/intent/intent.service.ts
Executable file
@@ -0,0 +1,41 @@
|
||||
export type IntentType = 'SEARCH' | 'VIDEO_PARSE' | 'CLOUD_SAVE';
|
||||
|
||||
export interface IntentResult {
|
||||
type: IntentType;
|
||||
platform?: string;
|
||||
rawInput: string;
|
||||
cleanInput: string;
|
||||
}
|
||||
|
||||
const VIDEO_PLATFORMS = [
|
||||
{ domain: /douyin\.com|v\.douyin\.com/i, name: 'douyin' },
|
||||
{ domain: /kuaishou\.com/i, name: 'kuaishou' },
|
||||
{ domain: /xiaohongshu\.com/i, name: 'xiaohongshu' },
|
||||
{ domain: /bilibili\.com|b23\.tv/i, name: 'bilibili' },
|
||||
{ domain: /weibo\.com/i, name: 'weibo' },
|
||||
{ domain: /pipixia\.com/i, name: 'pipixia' },
|
||||
{ domain: /y\.qq\.com/i, name: 'qqmusic' },
|
||||
];
|
||||
|
||||
import { CLOUD_DOMAIN_PATTERNS } from '../config/cloud-labels';
|
||||
|
||||
export function detectIntent(input: string): IntentResult {
|
||||
const urlMatch = input.match(/(https?:\/\/[^\s]+)/i);
|
||||
if (urlMatch) {
|
||||
const url = urlMatch[1];
|
||||
|
||||
for (const p of VIDEO_PLATFORMS) {
|
||||
if (p.domain.test(url)) {
|
||||
return { type: 'VIDEO_PARSE', platform: p.name, rawInput: input, cleanInput: url };
|
||||
}
|
||||
}
|
||||
|
||||
for (const p of CLOUD_DOMAIN_PATTERNS) {
|
||||
if (p.regex.test(url)) {
|
||||
return { type: 'CLOUD_SAVE', platform: p.type, rawInput: input, cleanInput: url };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { type: 'SEARCH', rawInput: input, cleanInput: input.trim() };
|
||||
}
|
||||
203
packages/backend/src/main.ts
Executable file
203
packages/backend/src/main.ts
Executable file
@@ -0,0 +1,203 @@
|
||||
import express from 'express';
|
||||
import path from 'path';
|
||||
import cors from 'cors';
|
||||
import helmet from 'helmet';
|
||||
import morgan from 'morgan';
|
||||
import config from './config';
|
||||
import { APP_VERSION } from "./version";
|
||||
import { getDb } from './database/database';
|
||||
import { connectRedis, disconnectRedis, reconnectRedis, testRedisConnection } from './middleware/cache';
|
||||
import rateLimiter from './middleware/rate-limit';
|
||||
import routes from './routes';
|
||||
import { pansouWebProxy } from './proxy/pansou-web';
|
||||
import { checkAndRunScheduledCleanup } from './cloud/cleanup.service';
|
||||
import { refreshAllStorageInfo } from './cloud/cloud.service';
|
||||
import { checkStartup } from './config/startup-validator';
|
||||
|
||||
const app = express();
|
||||
|
||||
// ============ Middleware ============
|
||||
app.set('trust proxy', true);
|
||||
|
||||
// CORS — 生产环境必须配置真实域名(空值或占位符用 * 并打警告日志)
|
||||
const corsOrigin = process.env.CORS_ORIGIN || '';
|
||||
const isPlaceholder = !corsOrigin || corsOrigin === 'https://your-domain.com';
|
||||
if (config.nodeEnv === 'production' && isPlaceholder) {
|
||||
console.warn('[WARN] CORS_ORIGIN 未配置或使用了占位符,生产环境建议设置真实域名,当前临时允许所有来源');
|
||||
}
|
||||
if (config.nodeEnv === 'production' && !isPlaceholder) {
|
||||
app.use(cors({ origin: corsOrigin, credentials: true }));
|
||||
} else {
|
||||
app.use(cors({ origin: '*', credentials: false }));
|
||||
}
|
||||
|
||||
app.use(helmet({ contentSecurityPolicy: false }));
|
||||
|
||||
// morgan 日志格式:不记录 IP,避免隐私合规问题
|
||||
app.use(morgan(':method :url :status :res[content-length] - :response-time ms'));
|
||||
|
||||
app.use(express.json({ limit: '10mb' }));
|
||||
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
|
||||
app.use(rateLimiter);
|
||||
|
||||
// ============ 前端静态文件 ============
|
||||
const frontendDist = path.join(__dirname, 'frontend');
|
||||
app.use(express.static(frontendDist, {
|
||||
maxAge: '1d',
|
||||
setHeaders: (res, p) => {
|
||||
if (p.endsWith('index.html')) {
|
||||
res.setHeader('Cache-Control', 'no-cache, no-store, must-revalidate');
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
// ============ Routes ============
|
||||
app.use('/api/uploads', express.static('/app/uploads'));
|
||||
app.use('/api', routes);
|
||||
|
||||
// ============ Health Check(增强版:覆盖 Redis / PanSou / VideoParser 状态) ============
|
||||
app.get('/health', async (_req, res) => {
|
||||
const dbOk = (() => {
|
||||
try { getDb(); return true; } catch { return false; }
|
||||
})();
|
||||
|
||||
const redisStatus = await (async () => {
|
||||
try {
|
||||
const { getRedis } = require('./middleware/cache');
|
||||
const redis = getRedis();
|
||||
if (!redis) return 'disconnected';
|
||||
// ioredis and mock redis both support ping()
|
||||
if (typeof redis.ping !== 'function') return 'unknown';
|
||||
const pong = await redis.ping().catch(() => null);
|
||||
return pong === 'PONG' ? 'connected' : 'error';
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
} catch { return 'unknown'; }
|
||||
})();
|
||||
|
||||
const pansouStatus = await (async () => {
|
||||
try {
|
||||
// Native fetch available in Node 20+
|
||||
const url = (config.pansouUrl || 'http://pansou:80').replace(/\/+$/, '') + '/api/search';
|
||||
const controller = new AbortController();
|
||||
const t = setTimeout(() => controller.abort(), 3000);
|
||||
const r = await fetch(url, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ kw: 'health', page: 1 }), signal: controller.signal });
|
||||
clearTimeout(t);
|
||||
return r.ok ? 'ok' : 'degraded';
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
} catch { return 'unreachable'; }
|
||||
})();
|
||||
|
||||
const videoParserStatus = await (async () => {
|
||||
try {
|
||||
// Native fetch available in Node 20+
|
||||
const url = (config.videoParserUrl || 'http://video-parser:3001').replace(/\/+$/, '');
|
||||
const controller = new AbortController();
|
||||
const t = setTimeout(() => controller.abort(), 3000);
|
||||
const r = await fetch(url, { signal: controller.signal });
|
||||
clearTimeout(t);
|
||||
return r.ok ? 'ok' : 'degraded';
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
} catch { return 'unreachable'; }
|
||||
})();
|
||||
|
||||
const overall = dbOk && pansouStatus !== 'unreachable'
|
||||
? 'ok'
|
||||
: dbOk
|
||||
? 'degraded'
|
||||
: 'unhealthy';
|
||||
|
||||
res.json({
|
||||
version: APP_VERSION,
|
||||
status: overall,
|
||||
timestamp: new Date().toISOString(),
|
||||
uptime: Math.floor(process.uptime()),
|
||||
memory: process.memoryUsage().rss,
|
||||
components: {
|
||||
db: dbOk ? 'connected' : 'error',
|
||||
redis: redisStatus,
|
||||
pansou: pansouStatus,
|
||||
videoParser: videoParserStatus,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
// ============ PanSou Web UI Proxy ============
|
||||
app.use('/pansou', pansouWebProxy);
|
||||
|
||||
// SPA fallback
|
||||
app.use((req, res, next) => {
|
||||
if (req.path.startsWith('/api/') || req.path === '/health') return next();
|
||||
res.sendFile(path.join(frontendDist, 'index.html'), (err) => { if (err) next(); });
|
||||
});
|
||||
|
||||
// Global error handler
|
||||
app.use((err: any, _req: express.Request, res: express.Response, _next: express.NextFunction) => {
|
||||
console.error('[Error]', err);
|
||||
res.status(err.status || 500).json({
|
||||
error: config.nodeEnv === 'production' ? 'Internal server error' : err.message,
|
||||
code: err.status || 500,
|
||||
});
|
||||
});
|
||||
|
||||
// ============ Server Start ============
|
||||
async function start(): Promise<void> {
|
||||
// ── 启动前配置校验 ──
|
||||
if (!checkStartup()) {
|
||||
console.error('[Server] 配置校验失败,退出');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
getDb();
|
||||
console.log('[DB] SQLite database initialized');
|
||||
try {
|
||||
const { getSystemConfig } = require('./admin/system-config.service');
|
||||
const tz = getSystemConfig('timezone');
|
||||
if (tz) { process.env.TZ = tz; console.log(`[Config] Timezone set to: ${tz}`); }
|
||||
} catch { console.warn('[Config] Could not set timezone, using default'); }
|
||||
} catch (err) {
|
||||
console.error('[DB] Failed to initialize database:', err);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
const { getSystemConfig } = require('./admin/system-config.service');
|
||||
const redisUrl = (process.env.REDIS_URL || getSystemConfig('redis_url') || '').trim();
|
||||
if (redisUrl) {
|
||||
const ok = await reconnectRedis(redisUrl);
|
||||
if (ok) console.log('[Redis] Connected to', redisUrl);
|
||||
else console.warn('[Redis] Connection failed, continuing without cache');
|
||||
} else {
|
||||
console.log('[Redis] No REDIS_URL configured, running without cache');
|
||||
}
|
||||
} catch { console.warn('[Redis] Redis not available, continuing without cache'); }
|
||||
|
||||
// Cleanup scheduler
|
||||
const CLEANUP_INTERVAL = 10 * 60 * 1000;
|
||||
setInterval(() => { checkAndRunScheduledCleanup().catch(err => console.error('[Cleanup] Scheduler error:', err.message)); }, CLEANUP_INTERVAL);
|
||||
setTimeout(() => { checkAndRunScheduledCleanup().catch(err => console.error('[Cleanup] Initial check error:', err.message)); }, 30000);
|
||||
|
||||
// Storage info refresh scheduler — every 60 minutes
|
||||
const STORAGE_REFRESH_INTERVAL = 60 * 60 * 1000;
|
||||
setInterval(() => { refreshAllStorageInfo().catch(err => console.error('[Storage] Refresh error:', err.message)); }, STORAGE_REFRESH_INTERVAL);
|
||||
setTimeout(() => { refreshAllStorageInfo().catch(err => console.error('[Storage] Initial refresh error:', err.message)); }, 60000);
|
||||
|
||||
const server = app.listen(config.port, () => {
|
||||
console.log(`[Server] CloudSearch Backend running on port ${config.port} (${config.nodeEnv})`);
|
||||
});
|
||||
|
||||
const shutdown = async (signal: string) => {
|
||||
console.log(`\n[Server] Received ${signal}, shutting down gracefully...`);
|
||||
server.close(async () => { await disconnectRedis(); console.log('[Server] Closed'); process.exit(0); });
|
||||
setTimeout(() => { console.error('[Server] Force shutdown'); process.exit(1); }, 10000);
|
||||
};
|
||||
|
||||
process.on('SIGTERM', () => shutdown('SIGTERM'));
|
||||
process.on('SIGINT', () => shutdown('SIGINT'));
|
||||
process.on('uncaughtException', (err) => { console.error('[FATAL] Uncaught Exception:', err); setTimeout(() => process.exit(1), 1000); });
|
||||
process.on('unhandledRejection', (reason) => { const msg = reason instanceof Error ? reason.message : String(reason); console.error('[FATAL] Unhandled Rejection:', msg); });
|
||||
}
|
||||
|
||||
start().catch((err) => { console.error('[Server] Failed to start:', err); process.exit(1); });
|
||||
|
||||
export default app;
|
||||
172
packages/backend/src/middleware/cache.ts
Executable file
172
packages/backend/src/middleware/cache.ts
Executable file
@@ -0,0 +1,172 @@
|
||||
import Redis from 'ioredis';
|
||||
|
||||
let client: Redis | null = null;
|
||||
let currentUrl: string = '';
|
||||
|
||||
export function getRedis(): Redis | null {
|
||||
return client;
|
||||
}
|
||||
|
||||
export function getRedisClient(): Redis | null {
|
||||
if (client) return client;
|
||||
return createClient();
|
||||
}
|
||||
|
||||
function createClient(url?: string): Redis | null {
|
||||
const redisUrl = url || process.env.REDIS_URL || currentUrl || getSystemConfigRedisUrl();
|
||||
if (!redisUrl) return null;
|
||||
currentUrl = redisUrl;
|
||||
|
||||
if (client) {
|
||||
try { client.disconnect(); } catch {}
|
||||
client = null;
|
||||
}
|
||||
|
||||
client = new Redis(redisUrl, {
|
||||
maxRetriesPerRequest: 3,
|
||||
retryStrategy(times: number) {
|
||||
if (times > 3) return null;
|
||||
return Math.min(times * 200, 2000);
|
||||
},
|
||||
lazyConnect: true,
|
||||
});
|
||||
|
||||
client.on('error', (err: Error) => {
|
||||
console.error('[Redis] Error:', err.message);
|
||||
});
|
||||
|
||||
client.on('connect', () => {
|
||||
console.log('[Redis] Connected to', currentUrl);
|
||||
});
|
||||
|
||||
return client;
|
||||
}
|
||||
|
||||
function getSystemConfigRedisUrl(): string {
|
||||
try {
|
||||
const { getSystemConfig } = require('./admin/system-config.service');
|
||||
return getSystemConfig('redis_url') || process.env.REDIS_URL || '';
|
||||
} catch {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
export async function connectRedis(): Promise<void> {
|
||||
const redis = createClient();
|
||||
if (!redis) return;
|
||||
try {
|
||||
await redis.connect();
|
||||
} catch (err) {
|
||||
console.warn('[Redis] Connection failed, running without cache');
|
||||
}
|
||||
}
|
||||
|
||||
export async function reconnectRedis(url: string): Promise<boolean> {
|
||||
try {
|
||||
if (client) {
|
||||
await client.quit().catch(() => {});
|
||||
client = null;
|
||||
}
|
||||
const redis = createClient(url);
|
||||
if (!redis) return false;
|
||||
await redis.connect();
|
||||
console.log('[Redis] Reconnected with new URL:', url);
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.error('[Redis] Reconnect failed:', err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function disconnectRedis(): Promise<void> {
|
||||
if (client) {
|
||||
await client.quit();
|
||||
client = null;
|
||||
currentUrl = '';
|
||||
console.log('[Redis] Disconnected');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test a Redis URL without affecting the running client.
|
||||
* @returns { ok: boolean, latency: number, info?: string }
|
||||
*/
|
||||
export async function testRedisConnection(url: string): Promise<{ ok: boolean; latency: number; info?: string }> {
|
||||
const start = Date.now();
|
||||
const testClient = new Redis(url, {
|
||||
maxRetriesPerRequest: 1,
|
||||
retryStrategy() { return null; },
|
||||
lazyConnect: true,
|
||||
connectTimeout: 5000,
|
||||
});
|
||||
try {
|
||||
await testClient.connect();
|
||||
const pong = await testClient.ping();
|
||||
const latency = Date.now() - start;
|
||||
await testClient.quit();
|
||||
return { ok: pong === 'PONG', latency, info: `响应时间 ${latency}ms` };
|
||||
} catch (err: any) {
|
||||
try { await testClient.disconnect(); } catch {}
|
||||
const latency = Date.now() - start;
|
||||
return { ok: false, latency, info: err.message || '连接失败' };
|
||||
}
|
||||
}
|
||||
|
||||
export class RedisClient {
|
||||
private redis: Redis;
|
||||
|
||||
constructor() {
|
||||
this.redis = getRedisClient() || (null as unknown as Redis);
|
||||
}
|
||||
|
||||
private isConnected(): boolean {
|
||||
return this.redis !== null && typeof this.redis.get === 'function';
|
||||
}
|
||||
|
||||
async get(key: string): Promise<string | null> {
|
||||
if (!this.isConnected()) return null;
|
||||
try {
|
||||
return await this.redis.get(key);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async set(key: string, value: string): Promise<void> {
|
||||
if (!this.isConnected()) return;
|
||||
try {
|
||||
await this.redis.set(key, value);
|
||||
} catch {
|
||||
// silently fail
|
||||
}
|
||||
}
|
||||
|
||||
async setEx(key: string, ttl: number, value: string): Promise<void> {
|
||||
if (!this.isConnected()) return;
|
||||
try {
|
||||
await this.redis.setex(key, ttl, value);
|
||||
} catch {
|
||||
// silently fail
|
||||
}
|
||||
}
|
||||
|
||||
async del(key: string): Promise<void> {
|
||||
if (!this.isConnected()) return;
|
||||
try {
|
||||
await this.redis.del(key);
|
||||
} catch {
|
||||
// silently fail
|
||||
}
|
||||
}
|
||||
|
||||
async exists(key: string): Promise<boolean> {
|
||||
try {
|
||||
const result = await this.redis.exists(key);
|
||||
return result === 1;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default RedisClient;
|
||||
61
packages/backend/src/middleware/rate-limit.ts
Executable file
61
packages/backend/src/middleware/rate-limit.ts
Executable file
@@ -0,0 +1,61 @@
|
||||
import rateLimit from 'express-rate-limit';
|
||||
|
||||
/** 公开搜索接口:较宽松 */
|
||||
export const searchLimiter = rateLimit({
|
||||
windowMs: 60 * 1000,
|
||||
max: 150,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
keyGenerator: (req) => req.socket.remoteAddress ?? 'unknown',
|
||||
message: { error: '搜索请求过于频繁,请稍后再试', code: 429 },
|
||||
});
|
||||
|
||||
/** 管理接口(admin/*):较严格 */
|
||||
export const adminLimiter = rateLimit({
|
||||
windowMs: 60 * 1000,
|
||||
max: 30,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
keyGenerator: (req) => req.socket.remoteAddress ?? 'unknown',
|
||||
message: { error: '操作过于频繁,请稍后再试', code: 429 },
|
||||
});
|
||||
|
||||
/** 登录接口:极严格,防暴力破解 */
|
||||
export const loginLimiter = rateLimit({
|
||||
windowMs: 60 * 1000,
|
||||
max: 5,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
keyGenerator: (req) => req.socket.remoteAddress ?? 'unknown',
|
||||
message: { error: '登录尝试次数过多,请一分钟后重试', code: 429 },
|
||||
});
|
||||
|
||||
/** 转存/保存接口:中等等级 */
|
||||
export const saveLimiter = rateLimit({
|
||||
windowMs: 60 * 1000,
|
||||
max: 30,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
keyGenerator: (req) => req.socket.remoteAddress ?? 'unknown',
|
||||
message: { error: '转存操作过于频繁,请稍后再试', code: 429 },
|
||||
});
|
||||
|
||||
/** 获取真实客户端 IP(优先代理头) */
|
||||
function getClientIP(req: any): string {
|
||||
return req.headers['x-forwarded-for']?.split(',')[0]?.trim()
|
||||
?? req.headers['x-real-ip']
|
||||
?? req.socket.remoteAddress
|
||||
?? 'unknown';
|
||||
}
|
||||
|
||||
/** 默认全局限流(兜底,未匹配上述规则的路由) */
|
||||
const defaultLimiter = rateLimit({
|
||||
windowMs: 60 * 1000,
|
||||
max: 500,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
keyGenerator: getClientIP,
|
||||
message: { error: 'Too many requests, please try again later.', code: 429 },
|
||||
});
|
||||
|
||||
export default defaultLimiter;
|
||||
137
packages/backend/src/proxy/pansou-web.ts
Executable file
137
packages/backend/src/proxy/pansou-web.ts
Executable file
@@ -0,0 +1,137 @@
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
// Native fetch available in Node 20+
|
||||
import { getSystemConfig } from '../admin/system-config.service';
|
||||
|
||||
const PANSOU_UPSTREAM = 'http://pansou:80';
|
||||
|
||||
// Content types that need sub_filter path rewriting
|
||||
const TEXT_TYPES = ['text/html', 'application/javascript', 'text/javascript'];
|
||||
|
||||
// Hop-by-hop headers that should not be forwarded
|
||||
const HOP_HEADERS = new Set([
|
||||
'host', 'connection', 'content-length', 'transfer-encoding',
|
||||
'keep-alive', 'proxy-authenticate', 'proxy-authorization',
|
||||
'te', 'trailer', 'upgrade',
|
||||
]);
|
||||
|
||||
/**
|
||||
* Apply sub_filter string replacements to HTML/JS content.
|
||||
* This matches what the nginx pansou.conf sub_filter does.
|
||||
*/
|
||||
function applySubFilter(text: string): string {
|
||||
return text
|
||||
// Replace HTML/JS path references: /api/ -> /pansou/api/
|
||||
.replace(/\/api\//g, '/pansou/api/')
|
||||
// baseURL rewrite (Vue SPA config)
|
||||
.replace(/baseURL:"\/api"/g, 'baseURL:"/pansou/api"')
|
||||
.replace(/baseURL:'\/api'/g, "baseURL:'/pansou/api'")
|
||||
// Static asset path rewrites
|
||||
.replace(/src="\/assets\//g, 'src="/pansou/assets/')
|
||||
.replace(/src='\/assets\//g, "src='/pansou/assets/")
|
||||
.replace(/href="\/assets\//g, 'href="/pansou/assets/')
|
||||
.replace(/href='\/assets\//g, "href='/pansou/assets/")
|
||||
// Favicon path rewrite
|
||||
.replace(/href="\/favicon\.ico/g, 'href="/pansou/favicon.ico')
|
||||
.replace(/href='\/favicon\.ico/g, "href='/pansou/favicon.ico");
|
||||
}
|
||||
|
||||
/**
|
||||
* Express middleware that proxies /pansou/* requests to the PanSou web container.
|
||||
*
|
||||
* How it works:
|
||||
* 1. Strips the /pansou prefix from the request path
|
||||
* 2. Forwards the request to http://pansou:80/{path}
|
||||
* 3. For HTML/JS responses, applies sub_filter path rewriting
|
||||
* so that /api/ becomes /pansou/api/ and /assets/ becomes /pansou/assets/
|
||||
* 4. For static assets (CSS, images, fonts), pipes through as-is
|
||||
*
|
||||
* Controlled by system config key 'pansou_web_enabled' (true/false).
|
||||
*/
|
||||
export async function pansouWebProxy(req: Request, res: Response, _next: NextFunction): Promise<void> {
|
||||
try {
|
||||
// Check if PanSou web is enabled
|
||||
const enabled = getSystemConfig('pansou_web_enabled');
|
||||
if (enabled !== 'true') {
|
||||
res.status(404).send('PanSou Web UI is disabled by administrator');
|
||||
return;
|
||||
}
|
||||
|
||||
// Build upstream URL: strip /pansou prefix
|
||||
let targetPath = req.path;
|
||||
targetPath = targetPath.replace(/^\/pansou/, '') || '/';
|
||||
|
||||
// Preserve query string
|
||||
const queryIndex = req.url.indexOf('?');
|
||||
const query = queryIndex >= 0 ? req.url.substring(queryIndex) : '';
|
||||
const upstreamUrl = `${PANSOU_UPSTREAM}${targetPath}${query}`;
|
||||
|
||||
// Build forwarded headers (filter out hop-by-hop headers)
|
||||
const forwardHeaders: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(req.headers)) {
|
||||
if (!HOP_HEADERS.has(key.toLowerCase()) && value !== undefined) {
|
||||
forwardHeaders[key] = Array.isArray(value) ? value.join(', ') : value;
|
||||
}
|
||||
}
|
||||
// Override Host header to target the upstream
|
||||
forwardHeaders['Host'] = 'pansou';
|
||||
// Remove Accept-Encoding so we get uncompressed content for text rewriting
|
||||
forwardHeaders['accept-encoding'] = '';
|
||||
|
||||
// Forward the request
|
||||
const response = await fetch(upstreamUrl, {
|
||||
method: req.method as any,
|
||||
headers: forwardHeaders,
|
||||
body: ['GET', 'HEAD'].includes(req.method) ? undefined : JSON.stringify(req.body),
|
||||
redirect: 'manual',
|
||||
signal: AbortSignal.timeout(30000),
|
||||
});
|
||||
|
||||
const contentType = response.headers.get('content-type') || '';
|
||||
|
||||
// Set response status
|
||||
res.status(response.status);
|
||||
|
||||
// Handle redirects - rewrite Location header to include /pansou prefix
|
||||
if (response.status >= 300 && response.status < 400) {
|
||||
const location = response.headers.get('location');
|
||||
if (location) {
|
||||
if (location.startsWith('/')) {
|
||||
res.setHeader('location', '/pansou' + location);
|
||||
} else {
|
||||
res.setHeader('location', location);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For HTML/JS content, apply sub_filter string replacements
|
||||
if (TEXT_TYPES.some(t => contentType.includes(t))) {
|
||||
const text = await response.text();
|
||||
const modified = applySubFilter(text);
|
||||
res.setHeader('content-type', contentType);
|
||||
// Remove content-encoding since we decompressed
|
||||
res.setHeader('content-length', Buffer.byteLength(modified, 'utf-8').toString());
|
||||
res.send(modified);
|
||||
return;
|
||||
}
|
||||
|
||||
// For other content (CSS, images, fonts, etc.), pipe through as-is
|
||||
const excludedHeaders = new Set([
|
||||
'content-encoding', 'content-length', 'transfer-encoding',
|
||||
'keep-alive', 'connection',
|
||||
]);
|
||||
response.headers.forEach((value, key) => {
|
||||
if (!excludedHeaders.has(key.toLowerCase())) {
|
||||
res.setHeader(key, value);
|
||||
}
|
||||
});
|
||||
|
||||
// Use buffer for reliability
|
||||
const buffer = await response.arrayBuffer().then(buf => Buffer.from(buf));
|
||||
res.end(buffer);
|
||||
} catch (err: any) {
|
||||
console.error(`[PanSou Web Proxy] Error proxying ${req.path}:`, err.message);
|
||||
if (!res.headersSent) {
|
||||
res.status(502).send(`PanSou Web proxy error: ${err.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
615
packages/backend/src/routes/admin.routes.ts
Normal file
615
packages/backend/src/routes/admin.routes.ts
Normal file
@@ -0,0 +1,615 @@
|
||||
import { Router, Request, Response } from 'express';
|
||||
// Native fetch available in Node 20+
|
||||
import fs from "fs";
|
||||
import { execSync } from 'child_process';
|
||||
import { adminLimiter, loginLimiter } from '../middleware/rate-limit';
|
||||
import { getSaveRecords } from '../cloud/cloud.service';
|
||||
import { getCloudConfigs, getCloudConfigById, saveCloudConfig, deleteCloudConfig, getCloudConfigByType, testCloudConnection, testCloudConnectionWithCookie } from '../cloud/credential.service';
|
||||
// Note: check-in routes were removed (sign-in feature removed)
|
||||
import { getAllCloudTypes } from '../cloud/cloud-types.service';
|
||||
import { login, authMiddleware, verifyToken, changePassword } from '../admin/auth.service';
|
||||
import { getStats } from '../admin/stats.service';
|
||||
import { getAllSystemConfigs, updateSystemConfig, updateSystemConfigs, getSystemConfig } from '../admin/system-config.service';
|
||||
import { testProxyConnection } from '../utils/proxy-agent';
|
||||
import { getDb } from '../database/database';
|
||||
import { reconnectRedis, testRedisConnection } from '../middleware/cache';
|
||||
import { startQrLogin, getQrLoginStatus, cancelQrLogin } from '../cloud/qr-login.service';
|
||||
import { BaiduDriver } from '../cloud/drivers/baidu.driver';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Public routes (no auth required)
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/**
|
||||
* POST /api/admin/login
|
||||
* Admin login
|
||||
*/
|
||||
router.post('/admin/login', loginLimiter, (req: Request, res: Response) => {
|
||||
try {
|
||||
const { username, password } = req.body;
|
||||
if (!username || !password) {
|
||||
res.status(400).json({ error: 'Username and password are required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const token = login(username, password);
|
||||
if (!token) {
|
||||
res.status(401).json({ error: 'Invalid credentials' });
|
||||
return;
|
||||
}
|
||||
|
||||
res.json({ token });
|
||||
} catch (err: any) {
|
||||
console.error('[Login] Error:', err);
|
||||
res.status(500).json({ error: err.message || 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/admin/cloud-types
|
||||
* List all cloud types (public, read-only).
|
||||
*/
|
||||
router.get('/admin/cloud-types', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const types = getAllCloudTypes();
|
||||
res.json({ types });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// QR Login routes (no auth — user not logged in yet)
|
||||
// MUST be before authMiddleware!
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
// ===== 夸克扫码登录 =====
|
||||
router.post('/admin/quark/qr-login/start', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const result = await startQrLogin();
|
||||
res.json({ ok: true, ...result });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/admin/quark/qr-login/:sessionId/status', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const sessionId = req.params.sessionId as string;
|
||||
const result = await getQrLoginStatus(sessionId);
|
||||
res.json({ ok: true, ...result });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.post('/admin/quark/qr-login/:sessionId/cancel', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const sessionId = req.params.sessionId as string;
|
||||
await cancelQrLogin(sessionId);
|
||||
res.json({ ok: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
// ===== 百度扫码登录 =====
|
||||
router.post("/admin/baidu/qr-login/start", async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const result = await BaiduDriver.startQrLogin();
|
||||
res.json({ ok: true, ...result });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/admin/baidu/qr-login/:sessionId/status", async (req: Request, res: Response) => {
|
||||
try {
|
||||
const sessionId = req.params.sessionId as string;
|
||||
const result: any = await BaiduDriver.getQrLoginStatus(sessionId);
|
||||
// Map to frontend-expected format (frontend reads data.cookie)
|
||||
res.json({
|
||||
ok: true,
|
||||
status: result.status,
|
||||
cookie: result.cookie || result.access_token || "",
|
||||
nickname: result.nickname || "",
|
||||
storage_used: result.storage_used || "",
|
||||
storage_total: result.storage_total || "",
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
router.post("/admin/baidu/qr-login/:sessionId/cancel", async (req: Request, res: Response) => {
|
||||
try {
|
||||
BaiduDriver.cancelQrLogin(req.params.sessionId as string);
|
||||
} catch {}
|
||||
res.json({ ok: true });
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Auth wall — all routes below require JWT
|
||||
// ═══════════════════════════════════════
|
||||
router.use('/admin', authMiddleware);
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Cloud Configs CRUD
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/cloud-configs — list all cloud configs */
|
||||
router.get('/admin/cloud-configs', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const configs = getCloudConfigs();
|
||||
res.json(configs);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to fetch cloud configs' });
|
||||
}
|
||||
});
|
||||
|
||||
/** POST /api/admin/cloud-configs — create or smart-replace a cloud config */
|
||||
router.post('/admin/cloud-configs', (req: Request, res: Response) => {
|
||||
try {
|
||||
const data = req.body;
|
||||
if (!data.cloud_type) {
|
||||
res.status(400).json({ error: 'cloud_type is required' });
|
||||
return;
|
||||
}
|
||||
// Normalize is_active: frontend sends boolean, SQLite needs 0/1
|
||||
if (typeof data.is_active === 'boolean') data.is_active = data.is_active ? 1 : 0;
|
||||
// Normalize is_transfer_enabled: frontend sends boolean, SQLite needs 0/1
|
||||
if (typeof data.is_transfer_enabled === 'boolean') data.is_transfer_enabled = data.is_transfer_enabled ? 1 : 0;
|
||||
const saved = saveCloudConfig(data);
|
||||
res.json(saved);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to save cloud config' });
|
||||
}
|
||||
});
|
||||
|
||||
/** PUT /api/admin/cloud-configs/:id — update an existing cloud config */
|
||||
router.put('/admin/cloud-configs/:id', (req: Request, res: Response) => {
|
||||
try {
|
||||
const id = parseInt(req.params.id as string);
|
||||
const existing = getCloudConfigById(id);
|
||||
if (!existing) {
|
||||
res.status(404).json({ error: 'Cloud config not found' });
|
||||
return;
|
||||
}
|
||||
const saved = saveCloudConfig({ ...req.body, id });
|
||||
res.json(saved);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to update cloud config' });
|
||||
}
|
||||
});
|
||||
|
||||
/** DELETE /api/admin/cloud-configs/:id */
|
||||
router.delete('/admin/cloud-configs/:id', (req: Request, res: Response) => {
|
||||
try {
|
||||
const id = parseInt(req.params.id as string);
|
||||
const ok = deleteCloudConfig(id);
|
||||
if (!ok) {
|
||||
res.status(404).json({ error: 'Cloud config not found' });
|
||||
return;
|
||||
}
|
||||
res.json({ success: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to delete cloud config' });
|
||||
}
|
||||
});
|
||||
|
||||
/** POST /api/admin/cloud-configs/:type/test — test cloud connection (by type or id) */
|
||||
router.post('/admin/cloud-configs/:type/test', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const type = req.params.type as string;
|
||||
const { cookie, id } = req.body;
|
||||
|
||||
// If cookie is provided directly, test with it (for new configs not yet saved)
|
||||
if (cookie) {
|
||||
const result = await testCloudConnectionWithCookie(type, cookie);
|
||||
res.json(result);
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise test by config id
|
||||
if (id) {
|
||||
const result = await testCloudConnection(parseInt(id));
|
||||
res.json(result);
|
||||
return;
|
||||
}
|
||||
|
||||
res.status(400).json({ success: false, message: 'Provide either cookie or id' });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ success: false, message: err.message || 'Connection test failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Stats
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/stats */
|
||||
router.get('/admin/stats', (req: Request, res: Response) => {
|
||||
try {
|
||||
const days = req.query.days ? parseInt(req.query.days as string) : 7;
|
||||
const stats = getStats(days);
|
||||
res.json(stats);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get stats' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Save Records (转存日志)
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/save-records */
|
||||
router.get('/admin/save-records', (req: Request, res: Response) => {
|
||||
try {
|
||||
const page = parseInt(req.query.page as string) || 1;
|
||||
const pageSize = parseInt(req.query.pageSize as string) || 20;
|
||||
const startDate = req.query.startDate as string | undefined;
|
||||
const endDate = req.query.endDate as string | undefined;
|
||||
const status = req.query.status as string | undefined;
|
||||
const sourceType = req.query.sourceType as string | undefined;
|
||||
const keyword = req.query.keyword as string | undefined;
|
||||
const result = getSaveRecords(page, pageSize, startDate, endDate, status, sourceType, keyword);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get save records' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// System Configs
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/system-configs */
|
||||
router.get('/admin/system-configs', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const configs = getAllSystemConfigs();
|
||||
res.json(configs);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get system configs' });
|
||||
}
|
||||
});
|
||||
|
||||
/** PUT /api/admin/system-configs — batch update */
|
||||
router.put('/admin/system-configs', (req: Request, res: Response) => {
|
||||
try {
|
||||
const { entries } = req.body;
|
||||
if (!entries || !Array.isArray(entries)) {
|
||||
res.status(400).json({ error: 'entries array is required' });
|
||||
return;
|
||||
}
|
||||
updateSystemConfigs(entries);
|
||||
res.json({ success: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to update system configs' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Cloud Types Toggle
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** PUT /api/admin/cloud-types — toggle cloud type enabled/disabled */
|
||||
router.put('/admin/cloud-types', (req: Request, res: Response) => {
|
||||
try {
|
||||
const { type, enabled } = req.body;
|
||||
if (!type) {
|
||||
res.status(400).json({ error: 'type is required' });
|
||||
return;
|
||||
}
|
||||
const db = getDb();
|
||||
db.prepare(
|
||||
`INSERT INTO system_configs (key, value, description) VALUES (?, ?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET value = excluded.value`
|
||||
).run(`cloud_type_${type}_enabled`, enabled ? '1' : '0', `Enable/disable ${type} cloud drive`);
|
||||
res.json({ success: true });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to toggle cloud type' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Change Password
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** POST /api/admin/change-password */
|
||||
router.post('/admin/change-password', (req: Request, res: Response) => {
|
||||
try {
|
||||
const { oldPassword, newPassword } = req.body;
|
||||
if (!oldPassword || !newPassword) {
|
||||
res.status(400).json({ error: 'Both old and new passwords are required' });
|
||||
return;
|
||||
}
|
||||
// Get username from JWT
|
||||
const authHeader = req.headers.authorization || '';
|
||||
const token = authHeader.replace('Bearer ', '');
|
||||
const payload = verifyToken(token);
|
||||
if (!payload) {
|
||||
res.status(401).json({ error: 'Invalid token' });
|
||||
return;
|
||||
}
|
||||
const result = changePassword(payload.username, oldPassword, newPassword);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to change password' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// DB Status
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/db-status */
|
||||
router.get('/admin/db-status', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const dbFile = getSystemConfig('db_path') || '';
|
||||
let dbSize = 'N/A';
|
||||
if (dbFile) {
|
||||
try {
|
||||
const stats = fs.statSync(dbFile);
|
||||
dbSize = (stats.size / 1024 / 1024).toFixed(2) + ' MB';
|
||||
} catch {}
|
||||
}
|
||||
|
||||
const db = getDb();
|
||||
const counts = {
|
||||
save_records: (db.prepare('SELECT COUNT(*) as c FROM save_records').get() as any)?.c || 0,
|
||||
search_stats: (db.prepare('SELECT COUNT(*) as c FROM search_stats').get() as any)?.c || 0,
|
||||
system_configs: (db.prepare('SELECT COUNT(*) as c FROM system_configs').get() as any)?.c || 0,
|
||||
cloud_configs: (db.prepare('SELECT COUNT(*) as c FROM cloud_configs').get() as any)?.c || 0,
|
||||
content_cache: (db.prepare('SELECT COUNT(*) as c FROM content_cache').get() as any)?.c || 0,
|
||||
};
|
||||
|
||||
// Redis status
|
||||
let redis_status = 'disconnected';
|
||||
let redis_url = getSystemConfig('redis_url') || '';
|
||||
try {
|
||||
const testResult = await testRedisConnection(redis_url);
|
||||
redis_status = testResult.ok ? 'connected' : 'disconnected';
|
||||
} catch {
|
||||
redis_status = 'error';
|
||||
}
|
||||
|
||||
res.json({
|
||||
db_size: dbSize,
|
||||
db_path: dbFile,
|
||||
...counts,
|
||||
redis_status,
|
||||
redis_url,
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Failed to get DB status' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Test Redis Connection
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** POST /api/admin/test-redis */
|
||||
router.post('/admin/test-redis', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { url } = req.body;
|
||||
if (!url) {
|
||||
res.status(400).json({ ok: false, info: 'Redis URL is required' });
|
||||
return;
|
||||
}
|
||||
const result = await testRedisConnection(url);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, info: err.message || 'Redis test failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Test External Service
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** POST /api/admin/test-external-service */
|
||||
router.post('/admin/test-external-service', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { type, url, token } = req.body;
|
||||
const start = Date.now();
|
||||
|
||||
switch (type) {
|
||||
case 'pansou': {
|
||||
const pansouUrl = url || getSystemConfig('pansou_url') || '';
|
||||
if (!pansouUrl) {
|
||||
res.json({ ok: false, info: 'PanSou URL not configured' });
|
||||
return;
|
||||
}
|
||||
const response = await fetch(pansouUrl + '/api/health', { signal: AbortSignal.timeout(8000) });
|
||||
const data: any = await response.json();
|
||||
const latency = Date.now() - start;
|
||||
res.json({
|
||||
ok: response.ok && data?.status === 'ok',
|
||||
latency,
|
||||
info: response.ok ? `连接成功 (${data?.channels_count || 0} 频道, ${data?.plugin_count || 0} 插件)` : '连接失败',
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'video_parser': {
|
||||
const parserUrl = url || getSystemConfig('video_parser_url') || '';
|
||||
if (!parserUrl) {
|
||||
res.json({ ok: false, info: 'Video Parser URL not configured' });
|
||||
return;
|
||||
}
|
||||
const response = await fetch(parserUrl + '/health', { signal: AbortSignal.timeout(8000) });
|
||||
const latency = Date.now() - start;
|
||||
res.json({
|
||||
ok: response.ok,
|
||||
latency,
|
||||
info: response.ok ? '连接成功' : `HTTP ${response.status}`,
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'tmdb': {
|
||||
const tmdbToken = token || getSystemConfig('tmdb_api_key') || '';
|
||||
if (!tmdbToken) {
|
||||
res.json({ ok: false, info: 'TMDB API Key not configured' });
|
||||
return;
|
||||
}
|
||||
const response = await fetch('https://api.themoviedb.org/3/configuration', {
|
||||
headers: { Authorization: `Bearer ${tmdbToken}` },
|
||||
signal: AbortSignal.timeout(8000),
|
||||
});
|
||||
const latency = Date.now() - start;
|
||||
res.json({
|
||||
ok: response.ok,
|
||||
latency,
|
||||
info: response.ok ? '连接成功' : `HTTP ${response.status}`,
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'proxy': {
|
||||
const proxyUrl = url || getSystemConfig('search_proxy_url') || '';
|
||||
if (!proxyUrl) {
|
||||
res.json({ ok: false, info: 'Proxy URL not configured' });
|
||||
return;
|
||||
}
|
||||
const result = await testProxyConnection(proxyUrl);
|
||||
res.json(result);
|
||||
break;
|
||||
}
|
||||
case 'ip_geo': {
|
||||
const geoUrl = url || getSystemConfig('ip_geo_api_url') || '';
|
||||
if (!geoUrl) {
|
||||
res.json({ ok: false, info: '请先输入 IP 归属地查询 API 地址' });
|
||||
return;
|
||||
}
|
||||
const testUrl = geoUrl.replace('{ip}', '8.8.8.8');
|
||||
const response = await fetch(testUrl, { signal: AbortSignal.timeout(8000) });
|
||||
const data: any = await response.json();
|
||||
const latency = Date.now() - start;
|
||||
const valid = !!(data?.country || data?.region || data?.city || data?.countryCode);
|
||||
res.json({ ok: valid, latency, info: valid ? '连接成功' : '响应格式不符' });
|
||||
break;
|
||||
}
|
||||
default:
|
||||
res.json({ ok: false, info: `Unknown service type: ${type}` });
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ ok: false, info: err.message || 'External service test failed' });
|
||||
}
|
||||
});
|
||||
|
||||
// ═══════════════════════════════════════
|
||||
// Pansou Info & Update
|
||||
// ═══════════════════════════════════════
|
||||
|
||||
/** GET /api/admin/pansou-info — pansou health + version + update check */
|
||||
router.get('/admin/pansou-info', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const baseUrl = getSystemConfig('pansou_url') || '';
|
||||
if (!baseUrl) {
|
||||
res.json({ status: 'disconnected', channelCount: 0, pluginCount: 0, diskCount: 0, version: '', hasUpdate: false, latestVersion: '' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Fetch PanSou health
|
||||
const healthUrl = baseUrl + '/api/health';
|
||||
const response = await fetch(healthUrl, { signal: AbortSignal.timeout(8000) });
|
||||
const healthData: any = await response.json();
|
||||
const channelCount = healthData.channels_count || 0;
|
||||
const pluginCount = healthData.plugin_count || 0;
|
||||
|
||||
// Derive disk count from channel names
|
||||
const driveKeywords = ['aliyun', 'baidu', 'quark', '115', 'pikpak', 'xunlei', 'uc', '123', '139', '189', 'tianyi', 'netease'];
|
||||
const drives = new Set<string>();
|
||||
for (const ch of (healthData.channels || [])) {
|
||||
for (const kw of driveKeywords) {
|
||||
if (ch.toLowerCase().includes(kw)) { drives.add(kw); break; }
|
||||
}
|
||||
}
|
||||
const diskCount = drives.size || 5;
|
||||
|
||||
// Get local version from docker label
|
||||
let version = '';
|
||||
let hasUpdate = false;
|
||||
let latestVersion = '';
|
||||
try {
|
||||
const created = execSync(
|
||||
`docker inspect CloudSearch_PanSou --format '{{index .Config.Labels "org.opencontainers.image.created"}}'`,
|
||||
{ timeout: 5000, encoding: 'utf8' }
|
||||
).trim();
|
||||
version = created ? created.slice(0, 10) : '';
|
||||
|
||||
// Check update cache
|
||||
const cacheFile = '/tmp/pansou-update-cache.json';
|
||||
let cache: any = null;
|
||||
try { cache = JSON.parse(fs.readFileSync(cacheFile, 'utf8') || 'null'); } catch {}
|
||||
const threeDays = 3 * 24 * 3600 * 1000;
|
||||
|
||||
if (!cache || (Date.now() - cache.checkedAt) > threeDays) {
|
||||
// Check GHCR for latest version
|
||||
try {
|
||||
const tokenRes = await fetch(
|
||||
'https://ghcr.io/token?scope=repository:fish2018/pansou-web:pull&service=ghcr.io'
|
||||
);
|
||||
const ghcrToken = (await tokenRes.json() as any).token;
|
||||
const manifestRes = await fetch(
|
||||
'https://ghcr.io/v2/fish2018/pansou-web/manifests/latest',
|
||||
{ headers: { Authorization: `Bearer ${ghcrToken}`, Accept: 'application/vnd.oci.image.index.v1+json, application/vnd.docker.distribution.manifest.list.v2+json' } }
|
||||
);
|
||||
const manifestList: any = await manifestRes.json();
|
||||
const amd64 = manifestList.manifests?.find((m: any) => m.platform?.architecture === 'amd64' && m.platform?.os === 'linux');
|
||||
if (amd64) {
|
||||
const blobRes = await fetch(
|
||||
`https://ghcr.io/v2/fish2018/pansou-web/manifests/${amd64.digest}`,
|
||||
{ headers: { Authorization: `Bearer ${ghcrToken}`, Accept: 'application/vnd.oci.image.manifest.v1+json' } }
|
||||
);
|
||||
const blobData: any = await blobRes.json();
|
||||
const cfgDigest = blobData.config?.digest;
|
||||
if (cfgDigest) {
|
||||
const cfgRes = await fetch(
|
||||
`https://ghcr.io/v2/fish2018/pansou-web/blobs/${cfgDigest}`,
|
||||
{ headers: { Authorization: `Bearer ${ghcrToken}` } }
|
||||
);
|
||||
const cfgData: any = await cfgRes.json();
|
||||
const remoteCreated = cfgData.config?.Labels?.['org.opencontainers.image.created'];
|
||||
if (remoteCreated) {
|
||||
latestVersion = remoteCreated.slice(0, 10);
|
||||
if (version && latestVersion !== version) hasUpdate = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
fs.writeFileSync(cacheFile, JSON.stringify({ checkedAt: Date.now(), hasUpdate, latestVersion }));
|
||||
} else {
|
||||
hasUpdate = cache.hasUpdate;
|
||||
latestVersion = cache.latestVersion;
|
||||
}
|
||||
} catch {}
|
||||
|
||||
res.json({
|
||||
status: response.ok ? 'connected' : 'disconnected',
|
||||
channelCount,
|
||||
pluginCount,
|
||||
diskCount,
|
||||
version,
|
||||
hasUpdate,
|
||||
latestVersion,
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.json({ status: 'error', channelCount: 0, pluginCount: 0, diskCount: 0, version: '', hasUpdate: false, latestVersion: '', error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
/** POST /api/admin/update-pansou — pull latest pansou image + recreate container */
|
||||
router.post('/admin/update-pansou', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
execSync('docker pull ghcr.io/fish2018/pansou-web:latest', { timeout: 120000 });
|
||||
execSync('docker compose -p cloudsearch -f /app/docker-compose.yml up -d pansou', { timeout: 60000 });
|
||||
try { fs.unlinkSync('/tmp/pansou-update-cache.json'); } catch {}
|
||||
res.json({ success: true, message: 'PanSou 更新成功' });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ success: false, error: err.message || 'PanSou 更新失败' });
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
87
packages/backend/src/routes/cleanup.routes.ts
Normal file
87
packages/backend/src/routes/cleanup.routes.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { runFullCleanup, emptyAllTrash } from '../cloud/cleanup.service';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// ============ Cleanup & Storage Management ============
|
||||
|
||||
/**
|
||||
* POST /api/admin/cleanup/run
|
||||
* Manually trigger a cleanup cycle:
|
||||
* - Trash old date folders from cloud drives
|
||||
* - Delete old save_records
|
||||
* - Empty recycle bin
|
||||
*/
|
||||
router.post('/admin/cleanup/run', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const stats = await runFullCleanup();
|
||||
res.json({
|
||||
success: stats.errors.length === 0,
|
||||
files_trashed: stats.filesTrashed,
|
||||
logs_deleted: stats.logsDeleted,
|
||||
trash_emptied: stats.trashEmptied,
|
||||
errors: stats.errors,
|
||||
message: stats.errors.length === 0
|
||||
? `✅ 清理完成:移入回收站 ${stats.filesTrashed} 个文件夹,删除 ${stats.logsDeleted} 条日志,清空回收站${stats.trashEmptied ? '✓' : '-'}`
|
||||
: `清理完成,但有 ${stats.errors.length} 个错误`,
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/admin/cleanup/empty-trash
|
||||
* Empty recycle bin for all cloud drives (permanently delete, frees space).
|
||||
*/
|
||||
router.post('/admin/cleanup/empty-trash', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const result = await emptyAllTrash();
|
||||
res.json({
|
||||
success: result.errors.length === 0,
|
||||
emptied: result.emptied,
|
||||
errors: result.errors,
|
||||
message: result.emptied
|
||||
? '✅ 回收站已清空,存储空间已释放'
|
||||
: (result.errors.length > 0 ? `清空回收站部分失败:${result.errors.join('; ')}` : '没有可清空的网盘'),
|
||||
});
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
/**
|
||||
* Extract genre tags from search result titles.
|
||||
*/
|
||||
function extractTagsFromResults(results: any[], keyword: string): string[] {
|
||||
const tags: string[] = [];
|
||||
if (keyword) tags.push(keyword);
|
||||
|
||||
const genreKeywords: Record<string, string> = {
|
||||
'动画': '动画', '动漫': '动画', '国漫': '国漫',
|
||||
'剧场版': '剧场版', '年番': '年番',
|
||||
'动作': '动作', '奇幻': '奇幻', '玄幻': '玄幻',
|
||||
'仙侠': '仙侠', '古装': '古装', '爱情': '爱情',
|
||||
'科幻': '科幻', '喜剧': '喜剧', '悬疑': '悬疑',
|
||||
'恐怖': '恐怖', '惊悚': '惊悚', '剧情': '剧情',
|
||||
'冒险': '冒险', '战争': '战争', '武侠': '武侠',
|
||||
'纪录': '纪录片', '真人': '真人秀', '短片': '短片',
|
||||
};
|
||||
|
||||
const seen = new Set<string>();
|
||||
for (const r of results) {
|
||||
const title = (r.title || r.note || '') as string;
|
||||
for (const [key, val] of Object.entries(genreKeywords)) {
|
||||
if (title.includes(key) && !seen.has(val)) {
|
||||
seen.add(val);
|
||||
tags.push(val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tags;
|
||||
}
|
||||
|
||||
|
||||
export default router;
|
||||
14
packages/backend/src/routes/index.ts
Executable file
14
packages/backend/src/routes/index.ts
Executable file
@@ -0,0 +1,14 @@
|
||||
import { Router } from 'express';
|
||||
import searchRoutes from './search.routes';
|
||||
import adminRoutes from './admin.routes';
|
||||
import uploadRoutes from './upload.routes';
|
||||
import cleanupRoutes from './cleanup.routes';
|
||||
|
||||
const router = Router();
|
||||
|
||||
router.use(searchRoutes);
|
||||
router.use(adminRoutes);
|
||||
router.use(uploadRoutes);
|
||||
router.use(cleanupRoutes);
|
||||
|
||||
export default router;
|
||||
630
packages/backend/src/routes/search.routes.ts
Normal file
630
packages/backend/src/routes/search.routes.ts
Normal file
@@ -0,0 +1,630 @@
|
||||
import { Router, Request, Response } from 'express';
|
||||
// Native fetch available in Node 20+
|
||||
import { searchLimiter, saveLimiter } from '../middleware/rate-limit';
|
||||
import { detectIntent } from '../intent/intent.service';
|
||||
import { search, applyTitleFilter } from '../search/search.service';
|
||||
import { getRankings, getHotKeywords, getCategorizedRankings } from '../search/rankings.service';
|
||||
import { parseVideo } from '../video/video.service';
|
||||
import { saveFromShare } from '../cloud/cloud.service';
|
||||
import { getEnabledCloudTypeSet } from '../cloud/cloud-types.service';
|
||||
import { getSystemConfig } from '../admin/system-config.service';
|
||||
import { verifyToken } from '../admin/auth.service';
|
||||
import { LinkValidator } from '../validation/link-validator.service';
|
||||
import { getContentInfo } from '../content/content.service';
|
||||
import { detectCloudType } from '../config/cloud-labels';
|
||||
import { CLOUD_LABELS, CLOUD_COLORS } from '../config/cloud-labels';
|
||||
import { getDb } from '../database/database';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// ============ Search & Query ============
|
||||
|
||||
/**
|
||||
* POST /api/query
|
||||
* Intent recognition + execution
|
||||
*/
|
||||
router.post('/query', searchLimiter, async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { input, q } = req.body;
|
||||
const query = input || q;
|
||||
if (!query || typeof query !== 'string') {
|
||||
res.status(400).json({ error: 'Input is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const intent = detectIntent(query);
|
||||
const ip = req.ip || req.socket.remoteAddress || '';
|
||||
|
||||
switch (intent.type) {
|
||||
case 'SEARCH': {
|
||||
const result = await search(intent.cleanInput, 1, ip);
|
||||
|
||||
// Pass through: use all results, group by cloud type
|
||||
const allResults = result.results || [];
|
||||
|
||||
// Transform to frontend-friendly format
|
||||
let formatted = (allResults || []).map((item: any, idx: number) => ({
|
||||
id: `search_${idx}`,
|
||||
title: filterTitle(item.title || item.content || ''),
|
||||
description: item.content || '',
|
||||
share_url: item.url || '',
|
||||
cloud_type: detectCloudType(item.url || ''),
|
||||
file_size: '',
|
||||
update_time: item.datetime || '',
|
||||
source: item.source || '',
|
||||
file_id: '',
|
||||
cover: Array.isArray(item.images) && item.images.length > 0 ? item.images[0] : '',
|
||||
password: item.password || '',
|
||||
}));
|
||||
|
||||
// Filter out expired/invalid links
|
||||
formatted = formatted.filter(r => !r.share_url || !isExpiredShareLink(r.share_url));
|
||||
|
||||
// Filter by enabled cloud types (admin-configurable per-type toggle)
|
||||
// Skip filter if search_all_channels is enabled
|
||||
const searchAllChannels = getSystemConfig('search_all_channels') === 'true';
|
||||
if (!searchAllChannels) {
|
||||
const enabledSet = getEnabledCloudTypeSet();
|
||||
formatted = formatted.filter(r => !r.cloud_type || enabledSet.has(r.cloud_type));
|
||||
}
|
||||
|
||||
const contentQuery = intent.cleanInput || query;
|
||||
const contentInfo = await getContentInfo(contentQuery).catch(() => null);
|
||||
const extractedTags = extractTagsFromResults(formatted, contentQuery);
|
||||
const linkValidationEnabled = getSystemConfig('link_validation_enabled') !== 'false';
|
||||
|
||||
// Set up streaming response (NDJSON)
|
||||
res.setHeader('Content-Type', 'application/x-ndjson');
|
||||
res.setHeader('X-Accel-Buffering', 'no');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Connection', 'keep-alive');
|
||||
|
||||
// 0. Send searching signal immediately so frontend shows feedback
|
||||
res.write(JSON.stringify({ type: 'searching' }) + '\n');
|
||||
|
||||
// 0.5 Query local DB for previously saved resources matching keyword
|
||||
const savedResults = getSavedResources(intent.cleanInput);
|
||||
if (savedResults.length > 0) {
|
||||
res.write(JSON.stringify({
|
||||
type: 'saved',
|
||||
results: savedResults,
|
||||
total: savedResults.length,
|
||||
}) + '\n');
|
||||
}
|
||||
|
||||
// 1. Send stats immediately
|
||||
const fallbackImage = getSystemConfig('search_fallback_image') || '';
|
||||
const siteLogo = getSystemConfig('site_logo') || '';
|
||||
const siteNameInStats = getSystemConfig('site_name') || 'CloudSearch';
|
||||
const siteDisclaimer = getSystemConfig('site_disclaimer') || '';
|
||||
const siteMarquee = getSystemConfig('site_marquee') || '';
|
||||
const statsPayload = {
|
||||
type: 'stats',
|
||||
total: formatted.length,
|
||||
channels: groupResultsByChannel(formatted, (item: any) => item.cloud_type),
|
||||
content_info: contentInfo,
|
||||
content_tags: extractedTags,
|
||||
link_validation: linkValidationEnabled,
|
||||
fallback_image: fallbackImage,
|
||||
site_logo: siteLogo,
|
||||
site_name: siteNameInStats,
|
||||
site_disclaimer: siteDisclaimer,
|
||||
site_marquee: siteMarquee,
|
||||
};
|
||||
res.write(JSON.stringify(statsPayload) + '\n');
|
||||
|
||||
// 2. Validate links — per-type grouping, newest-first, per-type cap from config
|
||||
if (linkValidationEnabled) {
|
||||
const validator = new LinkValidator();
|
||||
const resultLimit = parseInt(getSystemConfig('search_result_limit') || '10', 10);
|
||||
const MAX_VALID_PER_TYPE = Math.min(100, Math.max(1, resultLimit)); // configurable, 1-100
|
||||
const MAX_TOTAL_VALID = MAX_VALID_PER_TYPE * 6; // up to 6 cloud types
|
||||
const pool = validator['pool']; // concurrency: 10
|
||||
|
||||
// Group formatted results by cloud_type, then sort each group by time desc
|
||||
const byType: Record<string, any[]> = {};
|
||||
for (const item of formatted) {
|
||||
const ct = item.cloud_type || 'others';
|
||||
if (!byType[ct]) byType[ct] = [];
|
||||
byType[ct].push(item);
|
||||
}
|
||||
|
||||
// Sort each group by update_time descending (newest first)
|
||||
for (const ct of Object.keys(byType)) {
|
||||
byType[ct].sort((a: any, b: any) => {
|
||||
const ta = a.update_time || '';
|
||||
const tb = b.update_time || '';
|
||||
if (!ta && !tb) return 0;
|
||||
if (!ta) return 1;
|
||||
if (!tb) return -1;
|
||||
return tb.localeCompare(ta);
|
||||
});
|
||||
}
|
||||
|
||||
// Build a round-robin validation queue: interleave items from each type
|
||||
// to give fair priority across all cloud types
|
||||
const typeOrder = ['quark', 'baidu', 'aliyun', '115', 'tianyi', '123pan', 'uc', 'xunlei', 'pikpak', 'magnet', 'ed2k', 'others'];
|
||||
const sortedTypes = typeOrder.filter(ct => byType[ct] && byType[ct].length > 0);
|
||||
// Sort by total count descending so types with more results get more validation slots
|
||||
sortedTypes.sort((a, b) => (byType[b]?.length || 0) - (byType[a]?.length || 0));
|
||||
|
||||
const validationQueue: { item: any; type: string }[] = [];
|
||||
const maxLen = Math.max(...sortedTypes.map(ct => byType[ct].length), 0);
|
||||
for (let i = 0; i < maxLen; i++) {
|
||||
for (const ct of sortedTypes) {
|
||||
if (i < byType[ct].length) {
|
||||
validationQueue.push({ item: byType[ct][i], type: ct });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const validResults: any[] = [];
|
||||
const perTypeValid: Record<string, number> = {};
|
||||
let totalValid = 0;
|
||||
let totalInvalid = 0;
|
||||
let totalChecked = 0;
|
||||
const unknownItemIds: number[] = []; // IDs that got 'unknown' from PanSou
|
||||
|
||||
// Pass 1: PanSou-only validation
|
||||
const tasks = validationQueue.map(({ item, type }) => pool.run(async () => {
|
||||
// Stop if we've hit overall cap or per-type cap
|
||||
if (totalValid >= MAX_TOTAL_VALID) return;
|
||||
if ((perTypeValid[type] || 0) >= MAX_VALID_PER_TYPE) return;
|
||||
|
||||
totalChecked++;
|
||||
try {
|
||||
const vr = await validator.validate(item.share_url, item.cloud_type);
|
||||
// 'unknown' = PanSou couldn't determine → treat as valid for now
|
||||
if (vr.status === 'valid' || vr.status === 'unknown') {
|
||||
if (vr.status === 'unknown') {
|
||||
unknownItemIds.push(item.id);
|
||||
}
|
||||
if (totalValid < MAX_TOTAL_VALID && (perTypeValid[type] || 0) < MAX_VALID_PER_TYPE) {
|
||||
validResults.push(item);
|
||||
perTypeValid[type] = (perTypeValid[type] || 0) + 1;
|
||||
totalValid++;
|
||||
res.write(JSON.stringify({ type: 'result', id: item.id, valid: true, message: vr.message }) + '\n');
|
||||
}
|
||||
} else {
|
||||
totalInvalid++;
|
||||
res.write(JSON.stringify({ type: 'result', id: item.id, valid: false, message: vr.message }) + '\n');
|
||||
}
|
||||
} catch {
|
||||
if (totalValid < MAX_TOTAL_VALID && (perTypeValid[type] || 0) < MAX_VALID_PER_TYPE) {
|
||||
validResults.push(item);
|
||||
perTypeValid[type] = (perTypeValid[type] || 0) + 1;
|
||||
totalValid++;
|
||||
}
|
||||
res.write(JSON.stringify({ type: 'result', id: item.id, valid: true }) + '\n');
|
||||
}
|
||||
}));
|
||||
await Promise.all(tasks);
|
||||
|
||||
// Pass 2: If PanSou didn't provide enough valid results, validate
|
||||
// uncertain items with local fallback (external API calls)
|
||||
if (totalValid < MAX_TOTAL_VALID && unknownItemIds.length > 0) {
|
||||
const unknownItems = validationQueue.filter(({ item }) => unknownItemIds.includes(item.id));
|
||||
for (const { item, type } of unknownItems) {
|
||||
if (totalValid >= MAX_TOTAL_VALID) break;
|
||||
if ((perTypeValid[type] || 0) >= MAX_VALID_PER_TYPE) break;
|
||||
try {
|
||||
const vr = await validator.validateWithLocalFallback(item.share_url, item.cloud_type);
|
||||
if (vr.status === 'valid') {
|
||||
// Already in validResults from pass 1, just count it again
|
||||
perTypeValid[type] = (perTypeValid[type] || 0) + 1;
|
||||
totalValid++;
|
||||
res.write(JSON.stringify({ type: 'result', id: item.id, valid: true, message: vr.message + ' (本地确认)' }) + '\n');
|
||||
} else if (vr.status === 'invalid') {
|
||||
// Remove from validResults — was previously included as unknown
|
||||
const idx = validResults.findIndex(r => r.id === item.id);
|
||||
if (idx >= 0) {
|
||||
validResults.splice(idx, 1);
|
||||
perTypeValid[type] = Math.max(0, (perTypeValid[type] || 1) - 1);
|
||||
totalValid--;
|
||||
}
|
||||
totalInvalid++;
|
||||
res.write(JSON.stringify({ type: 'result', id: item.id, valid: false, message: vr.message + ' (本地确认失效)' }) + '\n');
|
||||
}
|
||||
} catch {
|
||||
// Keep as-is (already treated as valid from pass 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const skippedCount = validationQueue.length - totalChecked;
|
||||
|
||||
res.write(JSON.stringify({
|
||||
type: 'complete',
|
||||
results: validResults,
|
||||
channels: groupResultsByChannel(validResults, (item: any) => item.cloud_type),
|
||||
total: validResults.length,
|
||||
filtered: totalInvalid,
|
||||
per_type: perTypeValid,
|
||||
skipped: skippedCount,
|
||||
}) + '\n');
|
||||
} else {
|
||||
// No validation - just send all results
|
||||
res.write(JSON.stringify({
|
||||
type: 'complete',
|
||||
results: formatted,
|
||||
channels: groupResultsByChannel(formatted, (item: any) => item.cloud_type),
|
||||
total: formatted.length,
|
||||
filtered: 0,
|
||||
}) + '\n');
|
||||
}
|
||||
|
||||
res.end();
|
||||
break;
|
||||
}
|
||||
case 'VIDEO_PARSE': {
|
||||
const videoInfo = await parseVideo(intent.cleanInput);
|
||||
res.json({ intent: intent.type, platform: intent.platform, data: videoInfo });
|
||||
break;
|
||||
}
|
||||
case 'CLOUD_SAVE': {
|
||||
const result = await saveFromShare(intent.cleanInput, intent.platform || '', undefined, req.ip);
|
||||
res.json({ intent: intent.type, platform: intent.platform, ...result });
|
||||
break;
|
||||
}
|
||||
default:
|
||||
res.status(400).json({ error: 'Unknown intent type' });
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error('[Query] Error:', err);
|
||||
res.status(500).json({ error: err.message || 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/search
|
||||
* Search with optional link validation filtering
|
||||
*/
|
||||
router.get('/search', searchLimiter, async (req: Request, res: Response) => {
|
||||
try {
|
||||
const keyword = (req.query.q || req.query.kw) as string;
|
||||
const page = parseInt(req.query.page as string || '1', 10);
|
||||
const ip = req.ip || req.socket.remoteAddress || '';
|
||||
|
||||
if (!keyword) {
|
||||
res.status(400).json({ error: 'Query parameter "q" is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const result = await search(keyword, page, ip);
|
||||
|
||||
// Pass through: use all results
|
||||
const allResults = result.results || [];
|
||||
|
||||
// Transform to frontend format
|
||||
let formatted = (allResults || []).map((item: any) => ({
|
||||
id: item.id || '',
|
||||
title: filterTitle(item.title || item.content || ''),
|
||||
description: item.content || item.snippet || '',
|
||||
share_url: item.url || '',
|
||||
cloud_type: detectCloudType(item.url || ''),
|
||||
file_size: '',
|
||||
source: item.source || '',
|
||||
datetime: item.datetime || '',
|
||||
cover: Array.isArray(item.images) && item.images.length > 0 ? item.images[0] : '',
|
||||
password: item.password || '',
|
||||
}));
|
||||
|
||||
// Filter out expired/invalid links
|
||||
const expiredCount = formatted.filter(r => r.share_url && isExpiredShareLink(r.share_url)).length;
|
||||
formatted = formatted.filter(r => !r.share_url || !isExpiredShareLink(r.share_url));
|
||||
|
||||
// Filter by enabled cloud types (admin-configurable per-type toggle)
|
||||
const enabledSet = getEnabledCloudTypeSet();
|
||||
formatted = formatted.filter(r => !r.cloud_type || enabledSet.has(r.cloud_type));
|
||||
|
||||
// Return results immediately without blocking validation
|
||||
const channels = groupResultsByChannel(formatted, (item: any) =>
|
||||
detectCloudType(item.url || '')
|
||||
);
|
||||
|
||||
res.json({
|
||||
results: formatted,
|
||||
channels,
|
||||
total: formatted.length,
|
||||
filtered: expiredCount,
|
||||
link_validation: false,
|
||||
});
|
||||
} catch (err: any) {
|
||||
console.error('[Search] Error:', err);
|
||||
res.status(500).json({ error: err.message || 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Load title filter rules from DB and apply to a title.
|
||||
*/
|
||||
function filterTitle(title: string): string {
|
||||
const rules = getSystemConfig('title_filter_rules') || '';
|
||||
return applyTitleFilter(title, rules);
|
||||
}
|
||||
|
||||
// detectCloudType is imported from config/cloud-labels
|
||||
|
||||
// 检测失效的分享链接(支持多种模式)
|
||||
function isExpiredShareLink(url: string): boolean {
|
||||
if (!url) return false;
|
||||
|
||||
// 空链接/纯片段(无实际链接内容)
|
||||
if (url.startsWith('#') || url.length < 10) return true;
|
||||
|
||||
// PanSou 有时返回残缺链接如 "/s/xxx" 或只有 "#/list/share"
|
||||
if (url.startsWith('/') && !url.startsWith('//') && !url.startsWith('http')) return true;
|
||||
|
||||
// 夸克链接格式校验
|
||||
if (url.includes('pan.quark.cn')) {
|
||||
const baseUrl = url.split('#')[0]; // 去掉 hash 路由片段
|
||||
// 有效格式必须是 pan.quark.cn/s/xxxxxx
|
||||
if (!/pan\.quark\.cn\/s\/[a-zA-Z0-9]+/.test(baseUrl)) return true;
|
||||
}
|
||||
|
||||
// 百度网盘常见失效格式
|
||||
if (url.includes('pan.baidu.com') && /share\/init\?surl=$/.test(url)) return true;
|
||||
|
||||
// 阿里云盘失效格式(短到异常的链接)
|
||||
if (url.includes('aliyundrive.com') && url.length < 30) return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Group search results into channels by cloud type.
|
||||
* Each channel: { cloud_type, label, color, count, items }
|
||||
*/
|
||||
|
||||
function groupResultsByChannel(results: any[], getCloudType?: (item: any) => string): any[] {
|
||||
const groups: Record<string, any[]> = {};
|
||||
const order: Record<string, number> = {
|
||||
quark: 1, baidu: 2, aliyun: 3, '115': 4,
|
||||
tianyi: 5, '123pan': 6, uc: 7, xunlei: 8,
|
||||
pikpak: 9, magnet: 10, ed2k: 11, others: 12,
|
||||
};
|
||||
for (const item of results) {
|
||||
const ct = getCloudType ? getCloudType(item) : (item.source || detectCloudType(item.url || '') || 'others');
|
||||
if (!groups[ct]) groups[ct] = [];
|
||||
groups[ct].push(item);
|
||||
}
|
||||
return Object.entries(groups)
|
||||
.sort((a, b) => (order[a[0]] ?? 99) - (order[b[0]] ?? 99))
|
||||
.map(([cloud_type, items]) => ({
|
||||
cloud_type,
|
||||
label: (CLOUD_LABELS as any)[cloud_type] || cloud_type,
|
||||
color: (CLOUD_COLORS as any)[cloud_type] || '#95a5a6',
|
||||
count: items.length,
|
||||
items,
|
||||
}));
|
||||
}
|
||||
|
||||
// ============ Video ============
|
||||
// ============ Video ============
|
||||
|
||||
/**
|
||||
* POST /api/video/parse
|
||||
* Parse a video URL
|
||||
*/
|
||||
router.post('/video/parse', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { url } = req.body;
|
||||
if (!url) {
|
||||
res.status(400).json({ error: 'URL is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const videoInfo = await parseVideo(url);
|
||||
res.json(videoInfo);
|
||||
} catch (err: any) {
|
||||
console.error('[Video] Parse error:', err);
|
||||
res.status(500).json({ error: err.message || 'Failed to parse video' });
|
||||
}
|
||||
});
|
||||
|
||||
// ============ Cloud Save ============
|
||||
// ============ Cloud Save ============
|
||||
|
||||
/**
|
||||
* POST /api/save
|
||||
* Save a share link to a specific cloud
|
||||
*/
|
||||
router.post('/save', saveLimiter, async (req: Request, res: Response) => {
|
||||
try {
|
||||
// Support both formats:
|
||||
// 1. Backend-style: { url, cloudType }
|
||||
// 2. Frontend-style: { source: { share_url }, target_cloud }
|
||||
const url = req.body.url || req.body.source?.share_url || req.body.source?.url;
|
||||
const cloudType = req.body.cloudType || req.body.target_cloud || (req.body.source as any)?.cloud_type;
|
||||
const sourceTitle = req.body.source_title || req.body.source?.title || req.body.title;
|
||||
if (!url || !cloudType) {
|
||||
res.status(400).json({ error: 'URL and cloudType/cloud_type are required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const ip = req.ip || (req.headers['x-forwarded-for'] as string)?.split(',')[0]?.trim() || '';
|
||||
const result = await saveFromShare(url, cloudType, sourceTitle, ip);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
console.error('[Save] Error:', err);
|
||||
res.status(500).json({ error: err.message || 'Failed to save to cloud' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/video/save-to-cloud
|
||||
* Save a video to cloud
|
||||
*/
|
||||
router.post('/video/save-to-cloud', saveLimiter, async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { videoUrl, cloudType, title } = req.body;
|
||||
if (!videoUrl || !cloudType) {
|
||||
res.status(400).json({ error: 'videoUrl and cloudType are required' });
|
||||
return;
|
||||
}
|
||||
|
||||
const ip = req.ip || (req.headers['x-forwarded-for'] as string)?.split(',')[0]?.trim() || '';
|
||||
const result = await saveFromShare(videoUrl, cloudType, title, ip);
|
||||
res.json(result);
|
||||
} catch (err: any) {
|
||||
console.error('[Video] Save-to-cloud error:', err);
|
||||
res.status(500).json({ error: err.message || 'Failed to save video to cloud' });
|
||||
}
|
||||
});
|
||||
|
||||
// ============ Rankings ============
|
||||
// ============ Rankings ============
|
||||
|
||||
/**
|
||||
* GET /api/rankings
|
||||
* Get search rankings
|
||||
*/
|
||||
router.get('/rankings', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const rankings = await getRankings();
|
||||
res.json(rankings);
|
||||
} catch (err: any) {
|
||||
console.error('[Rankings] Error:', err);
|
||||
res.status(500).json({ error: err.message || 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/rankings/hot
|
||||
* Get hot keywords
|
||||
*/
|
||||
router.get('/rankings/hot', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const keywords = await getHotKeywords();
|
||||
res.json(keywords);
|
||||
} catch (err: any) {
|
||||
console.error('[Hot] Error:', err);
|
||||
res.status(500).json({ error: err.message || 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/rankings/categorized
|
||||
* Get categorized rankings (hot + newest per category), cached for 12h
|
||||
*/
|
||||
router.get('/rankings/categorized', async (_req: Request, res: Response) => {
|
||||
try {
|
||||
const data = await getCategorizedRankings();
|
||||
res.json(data);
|
||||
} catch (err: any) {
|
||||
console.error('[Categorized] Error:', err);
|
||||
res.status(500).json({ error: err.message || 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/site-config
|
||||
* Public site configuration (no auth required).
|
||||
*/
|
||||
router.get('/site-config', (_req: Request, res: Response) => {
|
||||
try {
|
||||
const siteLogo = getSystemConfig('site_logo') || '';
|
||||
const siteName = getSystemConfig('site_name') || 'CloudSearch';
|
||||
const fallbackImage = getSystemConfig('search_fallback_image') || '';
|
||||
const siteDisclaimer = getSystemConfig('site_disclaimer') || '';
|
||||
const siteMarquee = getSystemConfig('site_marquee') || '';
|
||||
res.json({ site_logo: siteLogo, site_name: siteName, search_fallback_image: fallbackImage, site_disclaimer: siteDisclaimer, site_marquee: siteMarquee });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/me
|
||||
* Get current user info from token (public, no auth middleware).
|
||||
*/
|
||||
router.get('/me', (req: Request, res: Response) => {
|
||||
try {
|
||||
const authHeader = req.headers.authorization;
|
||||
if (!authHeader || !authHeader.startsWith('Bearer ')) {
|
||||
res.json({ loggedIn: false });
|
||||
return;
|
||||
}
|
||||
const token = authHeader.split(' ')[1];
|
||||
const payload = verifyToken(token);
|
||||
if (!payload) {
|
||||
res.json({ loggedIn: false });
|
||||
return;
|
||||
}
|
||||
res.json({ loggedIn: true, id: payload.id, username: payload.username });
|
||||
} catch (err: any) {
|
||||
res.json({ loggedIn: false });
|
||||
}
|
||||
});
|
||||
|
||||
// ============ Admin ============
|
||||
|
||||
|
||||
/**
|
||||
* Extract genre tags from search result titles.
|
||||
*/
|
||||
function extractTagsFromResults(results: any[], keyword: string): string[] {
|
||||
const tags: string[] = [];
|
||||
if (keyword) tags.push(keyword);
|
||||
|
||||
const genreKeywords: Record<string, string> = {
|
||||
'动画': '动画', '动漫': '动画', '国漫': '国漫',
|
||||
'剧场版': '剧场版', '年番': '年番',
|
||||
'动作': '动作', '奇幻': '奇幻', '玄幻': '玄幻',
|
||||
'仙侠': '仙侠', '古装': '古装', '爱情': '爱情',
|
||||
'科幻': '科幻', '喜剧': '喜剧', '悬疑': '悬疑',
|
||||
'恐怖': '恐怖', '惊悚': '惊悚', '剧情': '剧情',
|
||||
'冒险': '冒险', '战争': '战争', '武侠': '武侠',
|
||||
'纪录': '纪录片', '真人': '真人秀', '短片': '短片',
|
||||
};
|
||||
|
||||
const seen = new Set<string>();
|
||||
for (const r of results) {
|
||||
const title = (r.title || r.note || '') as string;
|
||||
for (const [key, val] of Object.entries(genreKeywords)) {
|
||||
if (title.includes(key) && !seen.has(val)) {
|
||||
seen.add(val);
|
||||
tags.push(val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tags;
|
||||
}
|
||||
|
||||
/**
|
||||
* Query DB for previously saved resources that match the keyword.
|
||||
* Returns formatted results for immediate streaming before external API call.
|
||||
*/
|
||||
function getSavedResources(keyword: string): any[] {
|
||||
try {
|
||||
const db = getDb();
|
||||
const rows = db.prepare(`
|
||||
SELECT source_url, source_title, target_cloud, share_url, created_at
|
||||
FROM save_records
|
||||
WHERE status = 'success'
|
||||
AND (source_title LIKE ? OR source_url LIKE ?)
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 20
|
||||
`).all(`%${keyword}%`, `%${keyword}%`) as any[];
|
||||
|
||||
return rows.map((row: any, idx: number) => ({
|
||||
id: `saved_${idx}`,
|
||||
title: row.source_title || row.source_url || '',
|
||||
description: '',
|
||||
share_url: row.share_url || row.source_url || '',
|
||||
cloud_type: detectCloudType(row.share_url || row.source_url || ''),
|
||||
file_size: '',
|
||||
update_time: row.created_at || '',
|
||||
source: 'local',
|
||||
file_id: '',
|
||||
cover: '',
|
||||
password: '',
|
||||
}));
|
||||
} catch (err) {
|
||||
console.error('[SavedResources] DB query error:', err);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export default router;
|
||||
125
packages/backend/src/routes/upload.routes.ts
Normal file
125
packages/backend/src/routes/upload.routes.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { Router, Request, Response } from 'express';
|
||||
import multer from 'multer';
|
||||
import sharp from 'sharp';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { authMiddleware } from '../admin/auth.service';
|
||||
import { updateSystemConfig } from '../admin/system-config.service';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// ============ Upload ============
|
||||
|
||||
/**
|
||||
* POST /api/admin/upload-fallback-image
|
||||
* Upload a fallback cover image for search results without covers.
|
||||
* Recommended: 320×180 JPEG/PNG (16:9), max 2MB.
|
||||
*/
|
||||
const uploadDir = path.resolve('/app/uploads/fallback');
|
||||
if (!fs.existsSync(uploadDir)) {
|
||||
fs.mkdirSync(uploadDir, { recursive: true });
|
||||
}
|
||||
|
||||
const fallbackStorage = multer.diskStorage({
|
||||
destination: (_req, _file, cb) => cb(null, uploadDir),
|
||||
filename: (_req, _file, cb) => {
|
||||
const ext = '.jpg';
|
||||
cb(null, `fallback_cover_tmp${ext}`);
|
||||
},
|
||||
});
|
||||
|
||||
const upload = multer({
|
||||
storage: fallbackStorage,
|
||||
limits: { fileSize: 2 * 1024 * 1024 }, // 2MB max
|
||||
fileFilter: (_req, file, cb) => {
|
||||
if (file.mimetype.startsWith('image/')) {
|
||||
cb(null, true);
|
||||
} else {
|
||||
cb(new Error('仅支持图片文件(JPEG/PNG)'));
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
router.post('/admin/upload-fallback-image', authMiddleware, upload.single('image'), async (req: Request, res: Response) => {
|
||||
try {
|
||||
if (!req.file) {
|
||||
res.status(400).json({ error: '请选择要上传的图片' });
|
||||
return;
|
||||
}
|
||||
// 压缩:最大宽度320px,JPEG quality 80
|
||||
const outPath = path.resolve(uploadDir, 'fallback_cover.jpg');
|
||||
await sharp(req.file.path)
|
||||
.resize(320, undefined, { fit: 'inside', withoutEnlargement: true })
|
||||
.jpeg({ quality: 80 })
|
||||
.toFile(outPath);
|
||||
// 删除原始上传文件(如果路径不同)
|
||||
if (req.file.path !== outPath) {
|
||||
fs.unlink(req.file.path, () => {});
|
||||
}
|
||||
const url = `/api/uploads/fallback/fallback_cover.jpg`;
|
||||
updateSystemConfig('search_fallback_image', url);
|
||||
const stat = fs.statSync(outPath);
|
||||
res.json({ success: true, url, message: `✅ 兜底图已压缩上传 (${(stat.size / 1024).toFixed(1)}KB)` });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || '上传失败' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/admin/upload-logo
|
||||
* Upload a site logo image displayed on search page (home link) and homepage.
|
||||
* Recommended: 320×60 or similar wide/banner ratio, JPEG/PNG/WebP, max 2MB.
|
||||
*/
|
||||
const logoUploadDir = path.resolve('/app/uploads/logo');
|
||||
if (!fs.existsSync(logoUploadDir)) {
|
||||
fs.mkdirSync(logoUploadDir, { recursive: true });
|
||||
}
|
||||
|
||||
const logoStorage = multer.diskStorage({
|
||||
destination: (_req, _file, cb) => cb(null, logoUploadDir),
|
||||
filename: (_req, _file, cb) => {
|
||||
cb(null, `site_logo_tmp.png`);
|
||||
},
|
||||
});
|
||||
|
||||
const logoUpload = multer({
|
||||
storage: logoStorage,
|
||||
limits: { fileSize: 2 * 1024 * 1024 }, // 2MB max
|
||||
fileFilter: (_req, file, cb) => {
|
||||
if (file.mimetype.startsWith('image/')) {
|
||||
cb(null, true);
|
||||
} else {
|
||||
cb(new Error('仅支持图片文件(JPEG/PNG/WebP)'));
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
router.post('/admin/upload-logo', authMiddleware, logoUpload.single('image'), async (req: Request, res: Response) => {
|
||||
try {
|
||||
if (!req.file) {
|
||||
res.status(400).json({ error: '请选择要上传的图片' });
|
||||
return;
|
||||
}
|
||||
// 压缩:最大宽度640px,PNG格式
|
||||
const outPath = path.resolve(logoUploadDir, 'site_logo.png');
|
||||
await sharp(req.file.path)
|
||||
.resize(640, undefined, { fit: 'inside', withoutEnlargement: true })
|
||||
.png({ compressionLevel: 9 })
|
||||
.toFile(outPath);
|
||||
if (req.file.path !== outPath) {
|
||||
fs.unlink(req.file.path, () => {});
|
||||
}
|
||||
const url = `/api/uploads/logo/site_logo.png`;
|
||||
updateSystemConfig('site_logo', url);
|
||||
const stat = fs.statSync(outPath);
|
||||
res.json({ success: true, url, message: `✅ 站点图标已压缩上传 (${(stat.size / 1024).toFixed(1)}KB)` });
|
||||
} catch (err: any) {
|
||||
res.status(500).json({ error: err.message || '上传失败' });
|
||||
}
|
||||
});
|
||||
|
||||
import { startQrLogin, getQrLoginStatus, cancelQrLogin } from '../cloud/qr-login.service';
|
||||
|
||||
// ===== 夸克扫码登录 (不需要 auth,用户未登录时也需要能用) =====
|
||||
|
||||
export default router;
|
||||
351
packages/backend/src/search/rankings.service.ts
Executable file
351
packages/backend/src/search/rankings.service.ts
Executable file
@@ -0,0 +1,351 @@
|
||||
// Native fetch available in Node 20+
|
||||
import { getDb } from '../database/database';
|
||||
import { getTimezone, formatLocalDateTime } from '../utils/time';
|
||||
|
||||
export interface RankingItem {
|
||||
keyword: string;
|
||||
searchCount: number;
|
||||
updatedAt: string;
|
||||
rating?: number;
|
||||
}
|
||||
|
||||
export interface CategorizedRanking {
|
||||
category: string;
|
||||
label: string;
|
||||
hot: RankingItem[];
|
||||
newest: RankingItem[];
|
||||
}
|
||||
|
||||
export interface CategorizedResponse {
|
||||
fetchedAt: string;
|
||||
categories: CategorizedRanking[];
|
||||
}
|
||||
|
||||
// ===== Bilibili PGC 排行榜配置 =====
|
||||
interface BiliPgcDef {
|
||||
category: string;
|
||||
label: string;
|
||||
season_type: number; // 1=番剧, 2=电影, 3=纪录片, 4=国创, 5=电视剧, 7=综艺
|
||||
}
|
||||
|
||||
const BILI_PGC_CATEGORIES: BiliPgcDef[] = [
|
||||
// 国创:凡人修仙传、灵笼、斗破苍穹等官方国产动画
|
||||
{ category: 'donghua', label: '国产动漫', season_type: 4 },
|
||||
// 番剧:日漫等全球动画
|
||||
{ category: 'global_anime', label: '热门动漫', season_type: 1 },
|
||||
];
|
||||
|
||||
// ===== 百度热搜榜配置 =====
|
||||
interface BaiduBoardDef {
|
||||
category: string;
|
||||
label: string;
|
||||
tab: string; // movie=电影热搜, teleplay=电视剧热搜
|
||||
}
|
||||
|
||||
const BAIDU_BOARDS: BaiduBoardDef[] = [
|
||||
// 百度电影热搜:实时反映国内电影热度
|
||||
{ category: 'movie', label: '国内电影', tab: 'movie' },
|
||||
// 百度电视剧热搜:国内剧集热度
|
||||
{ category: 'tv', label: '热门剧集', tab: 'teleplay' },
|
||||
];
|
||||
|
||||
// ===== TMDB 分类配置(保留欧美和冷门内容)=====
|
||||
interface TmdbCategoryDef {
|
||||
category: string;
|
||||
label: string;
|
||||
hotUrl: string;
|
||||
newestUrl: string;
|
||||
}
|
||||
|
||||
const TMDB_CATEGORIES: TmdbCategoryDef[] = [
|
||||
{
|
||||
category: 'western_movie', label: '欧美电影',
|
||||
hotUrl: 'https://api.themoviedb.org/3/discover/movie?with_origin_country=US&sort_by=vote_average.desc&vote_count.gte=10',
|
||||
newestUrl: 'https://api.themoviedb.org/3/discover/movie?with_origin_country=US&sort_by=release_date.desc&vote_count.gte=1',
|
||||
},
|
||||
{
|
||||
category: 'western_tv', label: '欧美剧集',
|
||||
hotUrl: 'https://api.themoviedb.org/3/discover/tv?with_origin_country=US&sort_by=vote_average.desc&vote_count.gte=10',
|
||||
newestUrl: 'https://api.themoviedb.org/3/discover/tv?with_origin_country=US&sort_by=first_air_date.desc&vote_count.gte=10',
|
||||
},
|
||||
{
|
||||
category: 'niche', label: '冷门佳片',
|
||||
hotUrl: 'https://api.themoviedb.org/3/discover/movie?sort_by=vote_average.desc&vote_count.gte=10&vote_count.lte=500',
|
||||
newestUrl: 'https://api.themoviedb.org/3/discover/movie?sort_by=release_date.desc&vote_count.gte=1&vote_count.lte=500',
|
||||
},
|
||||
];
|
||||
|
||||
// ===== 显示顺序 =====
|
||||
const CATEGORY_ORDER: Record<string, number> = {
|
||||
donghua: 1,
|
||||
movie: 2,
|
||||
tv: 3,
|
||||
global_anime: 4,
|
||||
western_movie: 5,
|
||||
western_tv: 6,
|
||||
niche: 7,
|
||||
hotsite: 8,
|
||||
};
|
||||
|
||||
// ===== 12小时缓存 =====
|
||||
let cache: { data: CategorizedResponse; time: number } | null = null;
|
||||
const CACHE_TTL = 12 * 60 * 60 * 1000;
|
||||
|
||||
function isCacheValid(): boolean {
|
||||
return cache !== null && (Date.now() - cache.time) < CACHE_TTL;
|
||||
}
|
||||
|
||||
// ===== Bilibili PGC API =====
|
||||
|
||||
/**
|
||||
* 抓取 Bilibili PGC 排行榜(番剧/国创)
|
||||
*/
|
||||
async function fetchFromBiliPgc(season_type: number): Promise<RankingItem[]> {
|
||||
try {
|
||||
const url = `https://api.bilibili.com/pgc/web/rank/list?season_type=${season_type}&day=7`;
|
||||
const resp = await fetch(url, {
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Referer': 'https://www.bilibili.com/',
|
||||
'Accept': 'application/json, text/plain, */*',
|
||||
'Accept-Language': 'zh-CN,zh;q=0.9',
|
||||
},
|
||||
signal: AbortSignal.timeout(8000),
|
||||
});
|
||||
if (!resp.ok) {
|
||||
console.error(`[BiliPGC] HTTP ${resp.status} for season_type=${season_type}`);
|
||||
return [];
|
||||
}
|
||||
const json = await resp.json() as any;
|
||||
if (json.code !== 0 || !json.result?.list) {
|
||||
console.error(`[BiliPGC] API error code=${json.code} for season_type=${season_type}`);
|
||||
return [];
|
||||
}
|
||||
return json.result.list.slice(0, 20).map((item: any) => {
|
||||
const stat = item.stat || {};
|
||||
const viewCount = stat.view || 0;
|
||||
const followCount = stat.follow || 0;
|
||||
const searchCount = viewCount > 0 ? viewCount : followCount;
|
||||
|
||||
let rating = 0;
|
||||
if (item.rating) {
|
||||
const m = String(item.rating).match(/([\d.]+)/);
|
||||
if (m) rating = parseFloat(m[1]);
|
||||
}
|
||||
|
||||
return {
|
||||
keyword: item.title || '',
|
||||
searchCount,
|
||||
updatedAt: item.new_ep?.index_show || item.new_ep?.cover || '',
|
||||
rating,
|
||||
};
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(`[BiliPGC] Fetch error for season_type=${season_type}:`, (err as Error).message);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// ===== 百度热搜榜 API =====
|
||||
|
||||
/**
|
||||
* 抓取百度热搜榜
|
||||
* tab: movie=电影, teleplay=电视剧
|
||||
*/
|
||||
async function fetchFromBaidu(tab: string): Promise<RankingItem[]> {
|
||||
try {
|
||||
const url = `https://top.baidu.com/api/board?tab=${tab}`;
|
||||
const resp = await fetch(url, {
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Referer': 'https://top.baidu.com/board',
|
||||
},
|
||||
signal: AbortSignal.timeout(8000),
|
||||
});
|
||||
if (!resp.ok) {
|
||||
console.error(`[Baidu] HTTP ${resp.status} for tab=${tab}`);
|
||||
return [];
|
||||
}
|
||||
const json = await resp.json() as any;
|
||||
if (!json.success || !json.data?.cards) {
|
||||
console.error(`[Baidu] API error for tab=${tab}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
const results: RankingItem[] = [];
|
||||
for (const card of json.data.cards) {
|
||||
for (const item of (card.content || [])) {
|
||||
results.push({
|
||||
keyword: item.word || '',
|
||||
// hotScore can be like "96438", parse as number
|
||||
searchCount: parseInt(item.hotScore || '0', 10) || 0,
|
||||
updatedAt: item.desc || '',
|
||||
rating: 0,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return results.slice(0, 20);
|
||||
} catch (err) {
|
||||
console.error(`[Baidu] Fetch error for tab=${tab}:`, (err as Error).message);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// ===== TMDB =====
|
||||
|
||||
function getTmdbToken(): string {
|
||||
const db = getDb();
|
||||
return (db.prepare('SELECT value FROM system_configs WHERE key = ?').get('tmdb_api_token') as any)?.value || '';
|
||||
}
|
||||
|
||||
function tmdbResultToRanking(item: any): RankingItem {
|
||||
const title = item.title || item.name || '';
|
||||
const date = item.release_date || item.first_air_date || '';
|
||||
const rating = item.vote_average ? Math.round(item.vote_average * 10) / 10 : 0;
|
||||
return {
|
||||
keyword: title,
|
||||
searchCount: item.vote_count || 0,
|
||||
updatedAt: date,
|
||||
rating,
|
||||
};
|
||||
}
|
||||
|
||||
async function tmdbFetch(url: string, token: string): Promise<any[]> {
|
||||
const fullUrl = `${url}${url.includes('?') ? '&' : '?'}language=zh-CN`;
|
||||
try {
|
||||
const resp = await fetch(fullUrl, {
|
||||
headers: { 'Authorization': `Bearer ${token}` },
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
if (!resp.ok) {
|
||||
console.error(`[TMDB] HTTP ${resp.status} for ${url}`);
|
||||
return [];
|
||||
}
|
||||
const data = await resp.json() as any;
|
||||
return (data.results || []).slice(0, 20);
|
||||
} catch (err) {
|
||||
console.error(`[TMDB] Fetch error for ${url}:`, err);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// ===== 主流程 =====
|
||||
|
||||
async function fetchRankings(): Promise<CategorizedResponse> {
|
||||
const fetchedAt = formatLocalDateTime();
|
||||
|
||||
// 1. 并行抓取 Bilibili PGC 数据(国漫、番剧)
|
||||
const biliPromises = BILI_PGC_CATEGORIES.map(async (cat) => {
|
||||
const results = await fetchFromBiliPgc(cat.season_type);
|
||||
const mid = Math.ceil(results.length / 2);
|
||||
return {
|
||||
category: cat.category,
|
||||
label: cat.label,
|
||||
hot: results.slice(0, mid),
|
||||
newest: results.slice(mid),
|
||||
};
|
||||
});
|
||||
|
||||
// 2. 并行抓取百度热搜数据(电影、电视剧)
|
||||
// 百度只有热榜没有最新榜,全部放 hot
|
||||
const baiduPromises = BAIDU_BOARDS.map(async (board) => {
|
||||
const results = await fetchFromBaidu(board.tab);
|
||||
return {
|
||||
category: board.category,
|
||||
label: board.label,
|
||||
hot: results,
|
||||
newest: [],
|
||||
};
|
||||
});
|
||||
|
||||
// 3. 并行抓取 TMDB 数据(欧美观影、剧集、冷门)
|
||||
const token = getTmdbToken();
|
||||
let tmdbResults: CategorizedRanking[] = [];
|
||||
if (token) {
|
||||
const tmdbPromises = TMDB_CATEGORIES.map(async (cat) => {
|
||||
const [hotResults, newestResults] = await Promise.all([
|
||||
tmdbFetch(cat.hotUrl, token),
|
||||
tmdbFetch(cat.newestUrl, token),
|
||||
]);
|
||||
return {
|
||||
category: cat.category,
|
||||
label: cat.label,
|
||||
hot: hotResults.map(tmdbResultToRanking),
|
||||
newest: newestResults.map(tmdbResultToRanking),
|
||||
};
|
||||
});
|
||||
tmdbResults = await Promise.all(tmdbPromises);
|
||||
}
|
||||
|
||||
// 4. 本站热搜
|
||||
const db = getDb();
|
||||
const rows = db.prepare(
|
||||
'SELECT keyword, search_count as searchCount, updated_at as updatedAt FROM hot_keywords ORDER BY search_count DESC LIMIT 20'
|
||||
).all() as RankingItem[];
|
||||
const newestRows = db.prepare(
|
||||
'SELECT keyword, search_count as searchCount, updated_at as updatedAt FROM hot_keywords ORDER BY updated_at DESC LIMIT 20'
|
||||
).all() as RankingItem[];
|
||||
|
||||
const hotsiteCategory: CategorizedRanking = {
|
||||
category: 'hotsite',
|
||||
label: '本站热搜',
|
||||
hot: rows,
|
||||
newest: newestRows,
|
||||
};
|
||||
|
||||
// 5. 合并所有结果
|
||||
const [biliResults, baiduResults] = await Promise.all([
|
||||
Promise.all(biliPromises),
|
||||
Promise.all(baiduPromises),
|
||||
]);
|
||||
const allCategories = [...biliResults, ...baiduResults, ...tmdbResults, hotsiteCategory];
|
||||
|
||||
// 按 CATEGORY_ORDER 排序
|
||||
allCategories.sort((a, b) => (CATEGORY_ORDER[a.category] || 99) - (CATEGORY_ORDER[b.category] || 99));
|
||||
|
||||
return { fetchedAt, categories: allCategories };
|
||||
}
|
||||
|
||||
export async function getCategorizedRankings(): Promise<CategorizedResponse> {
|
||||
if (isCacheValid()) {
|
||||
return cache!.data;
|
||||
}
|
||||
|
||||
try {
|
||||
const data = await fetchRankings();
|
||||
cache = { data, time: Date.now() };
|
||||
return data;
|
||||
} catch (err) {
|
||||
console.error('[Rankings] Fetch error:', err);
|
||||
if (cache) return cache.data;
|
||||
const db = getDb();
|
||||
const rows = db.prepare(
|
||||
'SELECT keyword, search_count as searchCount, updated_at as updatedAt FROM hot_keywords ORDER BY search_count DESC LIMIT 20'
|
||||
).all() as RankingItem[];
|
||||
return {
|
||||
fetchedAt: formatLocalDateTime(),
|
||||
categories: [{
|
||||
category: 'hotsite', label: '本站热搜',
|
||||
hot: rows,
|
||||
newest: [...rows].sort((a, b) => b.updatedAt.localeCompare(a.updatedAt)).slice(0, 20),
|
||||
}],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export async function getRankings(): Promise<RankingItem[]> {
|
||||
const db = getDb();
|
||||
const rows = db.prepare(
|
||||
'SELECT keyword, search_count as searchCount, updated_at as updatedAt FROM hot_keywords ORDER BY search_count DESC LIMIT 20'
|
||||
).all() as RankingItem[];
|
||||
return rows;
|
||||
}
|
||||
|
||||
export async function getHotKeywords(): Promise<string[]> {
|
||||
const db = getDb();
|
||||
const rows = db.prepare(
|
||||
'SELECT keyword FROM hot_keywords ORDER BY search_count DESC LIMIT 20'
|
||||
).all() as { keyword: string }[];
|
||||
return rows.map(r => r.keyword);
|
||||
}
|
||||
125
packages/backend/src/search/search-optimizer.ts
Executable file
125
packages/backend/src/search/search-optimizer.ts
Executable file
@@ -0,0 +1,125 @@
|
||||
/**
|
||||
* Search Results Optimizer
|
||||
*
|
||||
* For each cloud type, keep only the top N most relevant results.
|
||||
* Order groups by priority: real cloud storage > other providers > magnet/others.
|
||||
*
|
||||
* Goal: give users a manageable, high-quality result set instead of overwhelming them
|
||||
* with hundreds of results dominated by magnet links.
|
||||
*/
|
||||
|
||||
import { detectCloudType } from '../config/cloud-labels';
|
||||
|
||||
/** Minimal result shape the optimizer needs */
|
||||
interface OptimizableResult {
|
||||
title?: string;
|
||||
url?: string;
|
||||
source?: string;
|
||||
score?: number;
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
/** Priority tiers for result ordering */
|
||||
const CLOUD_PRIORITY: Record<string, number> = {
|
||||
// Tier 1: Major cloud storage (most useful for save-to-cloud feature)
|
||||
baidu: 10,
|
||||
quark: 10,
|
||||
aliyun: 10,
|
||||
// Tier 2: Other cloud storage
|
||||
'115': 20,
|
||||
tianyi: 20,
|
||||
'123pan': 20,
|
||||
uc: 20,
|
||||
xunlei: 20,
|
||||
pikpak: 20,
|
||||
// Tier 3: Mobile/app links (not very useful)
|
||||
mobile: 50,
|
||||
// Tier 4: Direct links (lowest utility for cloud saving)
|
||||
magnet: 100,
|
||||
ed2k: 100,
|
||||
others: 100,
|
||||
};
|
||||
|
||||
const DEFAULT_PRIORITY = 50;
|
||||
|
||||
/** Get cloud type for a result, with an extra check for tracker URLs */
|
||||
function getCloudType(result: OptimizableResult): string {
|
||||
const url = result.url;
|
||||
// Check for tracker/private-site URLs not covered by shared detection
|
||||
if (url && /mteam|hdarea|hdsky/i.test(url)) return 'others';
|
||||
return detectCloudType(url);
|
||||
}
|
||||
|
||||
function getPriority(cloudType: string): number {
|
||||
return CLOUD_PRIORITY[cloudType] ?? DEFAULT_PRIORITY;
|
||||
}
|
||||
|
||||
export interface OptimizationResult {
|
||||
results: OptimizableResult[];
|
||||
/** Per-type stats for display */
|
||||
perType: Array<{ type: string; count: number; total: number }>;
|
||||
/** How many items were kept vs filtered */
|
||||
keptCount: number;
|
||||
filteredCount: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Optimize search results:
|
||||
* 1. Group by cloud type
|
||||
* 2. Sort by score descending within each group
|
||||
* 3. Keep only top `maxPerType` results per type
|
||||
* 4. Order groups by priority (cloud storage first)
|
||||
*/
|
||||
export function optimizeSearchResults(
|
||||
items: OptimizableResult[],
|
||||
maxPerType: number = 20
|
||||
): OptimizationResult {
|
||||
// Step 1: Group by cloud type
|
||||
const grouped: Record<string, OptimizableResult[]> = {};
|
||||
const typeTotals: Record<string, number> = {};
|
||||
|
||||
for (const item of items) {
|
||||
const ct = getCloudType(item);
|
||||
if (!grouped[ct]) {
|
||||
grouped[ct] = [];
|
||||
}
|
||||
grouped[ct].push(item);
|
||||
typeTotals[ct] = (typeTotals[ct] || 0) + 1;
|
||||
}
|
||||
|
||||
// Step 2 & 3: Sort each group by score desc, take top N
|
||||
const kept: OptimizableResult[] = [];
|
||||
const perType: Array<{ type: string; count: number; total: number }> = [];
|
||||
|
||||
for (const [ct, groupItems] of Object.entries(grouped)) {
|
||||
// Sort by score descending (higher score = more relevant)
|
||||
groupItems.sort((a, b) => (b.score || 0) - (a.score || 0));
|
||||
|
||||
const top = groupItems.slice(0, maxPerType);
|
||||
kept.push(...top);
|
||||
|
||||
perType.push({
|
||||
type: ct,
|
||||
count: top.length,
|
||||
total: typeTotals[ct],
|
||||
});
|
||||
}
|
||||
|
||||
// Step 4: Sort kept results by cloud priority, then by score within same priority
|
||||
kept.sort((a, b) => {
|
||||
const pa = getPriority(getCloudType(a));
|
||||
const pb = getPriority(getCloudType(b));
|
||||
if (pa !== pb) return pa - pb;
|
||||
return (b.score || 0) - (a.score || 0);
|
||||
});
|
||||
|
||||
const keptCount = kept.length;
|
||||
const filteredCount = items.length - keptCount;
|
||||
|
||||
return {
|
||||
results: kept,
|
||||
perType,
|
||||
keptCount,
|
||||
filteredCount,
|
||||
};
|
||||
}
|
||||
348
packages/backend/src/search/search.service.ts
Executable file
348
packages/backend/src/search/search.service.ts
Executable file
@@ -0,0 +1,348 @@
|
||||
// Native fetch available in Node 20+
|
||||
import config from '../config';
|
||||
import { getDb } from '../database/database';
|
||||
import { localTimestamp } from '../utils/time';
|
||||
import { proxiedFetch } from '../utils/proxy-agent';
|
||||
|
||||
export interface SearchResult {
|
||||
title: string;
|
||||
url: string;
|
||||
content: string;
|
||||
score?: number;
|
||||
source?: string;
|
||||
password?: string;
|
||||
datetime?: string;
|
||||
responseTimeMs?: number;
|
||||
}
|
||||
|
||||
export interface SearchResponse {
|
||||
results: SearchResult[];
|
||||
total: number;
|
||||
page: number;
|
||||
pageSize: number;
|
||||
}
|
||||
|
||||
export interface ApiSearchSource {
|
||||
name: string;
|
||||
url: string;
|
||||
method?: string; // GET | POST (default POST)
|
||||
headers?: Record<string, string>;
|
||||
body?: string; // JSON body template, supports {keyword} {page}
|
||||
resultPath: string; // dot-notation path to results array (e.g. "data.list")
|
||||
fieldMap: { // maps SearchResult fields to JSON response fields
|
||||
title?: string;
|
||||
url?: string;
|
||||
content?: string;
|
||||
password?: string;
|
||||
datetime?: string;
|
||||
};
|
||||
timeout?: number; // per-source timeout (ms), default 10000
|
||||
}
|
||||
|
||||
/** Simple dot/bracket notation JSON path accessor. */
|
||||
function jsonPathGet(obj: any, path: string): any {
|
||||
if (!obj || !path) return undefined;
|
||||
const parts = path
|
||||
.replace(/\[(\d+)\]/g, '.$1') // items[0] → items.0
|
||||
.split('.')
|
||||
.filter(Boolean);
|
||||
let current = obj;
|
||||
for (const part of parts) {
|
||||
if (current == null) return undefined;
|
||||
current = current[part];
|
||||
}
|
||||
return current;
|
||||
}
|
||||
|
||||
/** Parse configured API search sources from system config. */
|
||||
function getApiSearchSources(): ApiSearchSource[] {
|
||||
try {
|
||||
const db = getDb();
|
||||
const raw = (db.prepare("SELECT value FROM system_configs WHERE key = 'api_search_sources'").get() as any)?.value;
|
||||
if (!raw) return [];
|
||||
const parsed = JSON.parse(raw);
|
||||
if (!Array.isArray(parsed)) return [];
|
||||
return parsed.filter((s: any) => s.url && s.resultPath);
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/** Query a single API search source and return results with timing. */
|
||||
async function queryApiSource(
|
||||
source: ApiSearchSource,
|
||||
keyword: string,
|
||||
page: number,
|
||||
proxyUrl?: string,
|
||||
): Promise<{ source: string; results: SearchResult[]; responseTimeMs: number; error?: string }> {
|
||||
const startTime = Date.now();
|
||||
const timeout = source.timeout || 10000;
|
||||
const method = (source.method || 'POST').toUpperCase();
|
||||
|
||||
try {
|
||||
let url = source.url;
|
||||
const headers: Record<string, string> = { ...source.headers };
|
||||
|
||||
let body: string | undefined;
|
||||
if (source.body) {
|
||||
body = source.body
|
||||
.replace(/\{keyword\}/g, encodeURIComponent(keyword))
|
||||
.replace(/\{page\}/g, String(page));
|
||||
}
|
||||
|
||||
// For GET requests, append query params; for POST, use body
|
||||
const fetchOptions: RequestInit = {
|
||||
method,
|
||||
headers: { 'Content-Type': 'application/json', ...headers },
|
||||
signal: AbortSignal.timeout(timeout),
|
||||
};
|
||||
|
||||
// For GET requests, append query params; for POST, use body
|
||||
if (method === 'GET') {
|
||||
// Parse body as JSON and convert to query string
|
||||
if (body) {
|
||||
try {
|
||||
const params = JSON.parse(body);
|
||||
const qs = new URLSearchParams(params).toString();
|
||||
url += (url.includes('?') ? '&' : '?') + qs;
|
||||
} catch {
|
||||
// If body is not JSON, append as raw query
|
||||
url += (url.includes('?') ? '&' : '?') + body;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
(fetchOptions as any).body = body || JSON.stringify({ keyword, page });
|
||||
}
|
||||
|
||||
const response = await proxiedFetch(url, fetchOptions, proxyUrl);
|
||||
const responseTimeMs = Date.now() - startTime;
|
||||
|
||||
if (!response.ok) {
|
||||
return { source: source.name, results: [], responseTimeMs, error: `HTTP ${response.status}` };
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const resultTimeMs = Date.now() - startTime;
|
||||
|
||||
// Extract results array using JSONPath
|
||||
const items = jsonPathGet(data, source.resultPath);
|
||||
if (!Array.isArray(items)) {
|
||||
return { source: source.name, results: [], responseTimeMs: resultTimeMs, error: 'resultPath not found or not an array' };
|
||||
}
|
||||
|
||||
// Map fields
|
||||
const fm = source.fieldMap || {};
|
||||
const results: SearchResult[] = items.map((item: any) => ({
|
||||
title: (fm.title ? item[fm.title] : item.title) || item.name || '',
|
||||
url: (fm.url ? item[fm.url] : item.url) || item.link || '',
|
||||
content: (fm.content ? item[fm.content] : item.content) || item.snippet || '',
|
||||
password: (fm.password ? item[fm.password] : item.password) || '',
|
||||
datetime: (fm.datetime ? item[fm.datetime] : item.datetime) || item.date || '',
|
||||
source: source.name,
|
||||
responseTimeMs: resultTimeMs,
|
||||
}));
|
||||
|
||||
return { source: source.name, results, responseTimeMs: resultTimeMs };
|
||||
} catch (err: any) {
|
||||
const responseTimeMs = Date.now() - startTime;
|
||||
return { source: source.name, results: [], responseTimeMs, error: err.message };
|
||||
}
|
||||
}
|
||||
|
||||
/** Query all configured API search sources in parallel. */
|
||||
async function searchApiSources(keyword: string, page: number, proxyUrl?: string): Promise<{
|
||||
results: SearchResult[];
|
||||
sourceStats: { name: string; count: number; responseTimeMs: number; error?: string }[];
|
||||
}> {
|
||||
const sources = getApiSearchSources();
|
||||
if (sources.length === 0) return { results: [], sourceStats: [] };
|
||||
|
||||
const promises = sources.map(s => queryApiSource(s, keyword, page, proxyUrl));
|
||||
const allResults = await Promise.all(promises);
|
||||
|
||||
const sourceStats = allResults.map(r => ({
|
||||
name: r.source,
|
||||
count: r.results.length,
|
||||
responseTimeMs: r.responseTimeMs,
|
||||
error: r.error,
|
||||
}));
|
||||
|
||||
// Merge all results, tag with source name, sort by response time (fastest first)
|
||||
const results = allResults
|
||||
.flatMap(r => r.results)
|
||||
.sort((a, b) => (a.responseTimeMs || 99999) - (b.responseTimeMs || 99999));
|
||||
|
||||
return { results, sourceStats };
|
||||
}
|
||||
|
||||
export async function search(keyword: string, page: number = 1, ip?: string): Promise<SearchResponse> {
|
||||
const db = getDb();
|
||||
const pansouUrl = (db.prepare('SELECT value FROM system_configs WHERE key = ?').get('pansou_url') as any)?.value || config.pansouUrl;
|
||||
const proxyEnabled = (db.prepare('SELECT value FROM system_configs WHERE key = ?').get('search_proxy_enabled') as any)?.value === 'true';
|
||||
const proxyUrl = (db.prepare('SELECT value FROM system_configs WHERE key = ?').get('search_proxy_url') as any)?.value || '';
|
||||
const effectiveProxy = proxyEnabled ? proxyUrl : undefined;
|
||||
|
||||
// ── Run PanSou and API sources in parallel ──
|
||||
const pansouPromise = (async () => {
|
||||
const url = `${pansouUrl}/api/search`;
|
||||
const fetchOptions: RequestInit = {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ kw: keyword, page }),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
};
|
||||
const pansouStart = Date.now();
|
||||
const response = await proxiedFetch(url, fetchOptions, effectiveProxy);
|
||||
if (!response.ok) throw new Error(`PanSou API error: ${response.status}`);
|
||||
const data = await response.json() as any;
|
||||
return { data, responseTimeMs: Date.now() - pansouStart };
|
||||
})();
|
||||
|
||||
const apiSourcesPromise = searchApiSources(keyword, page, effectiveProxy);
|
||||
|
||||
const [pansouResult, apiSourcesResult] = await Promise.all([pansouPromise, apiSourcesPromise]);
|
||||
|
||||
const { data, responseTimeMs: pansouTime } = pansouResult;
|
||||
|
||||
// ── Parse PanSou results ──
|
||||
let items: any[] = [];
|
||||
let total = 0;
|
||||
|
||||
if (data.data?.merged_by_type) {
|
||||
for (const [cloudType, cloudItems] of Object.entries(data.data.merged_by_type)) {
|
||||
if (Array.isArray(cloudItems)) {
|
||||
items.push(...cloudItems.map((item: any) => ({
|
||||
...item,
|
||||
_cloud_type: cloudType,
|
||||
})));
|
||||
}
|
||||
}
|
||||
total = data.data.total || items.length;
|
||||
} else if (Array.isArray(data.data)) {
|
||||
items = data.data;
|
||||
total = data.total || items.length;
|
||||
} else if (Array.isArray(data.results)) {
|
||||
items = data.results;
|
||||
total = data.total || items.length;
|
||||
}
|
||||
|
||||
const pansouResults: SearchResult[] = items.map((item: any) => ({
|
||||
title: item.note || item.title || '',
|
||||
url: item.url || item.link || '',
|
||||
content: item.content || item.snippet || item.note || '',
|
||||
score: item.score || 0,
|
||||
source: item.source || item._cloud_type || 'pansou',
|
||||
password: item.password || '',
|
||||
datetime: item.datetime || '',
|
||||
responseTimeMs: pansouTime,
|
||||
images: item.images || [],
|
||||
}));
|
||||
|
||||
// ── Merge PanSou + API sources, sort by response time (fastest first) ──
|
||||
const allResults = [...apiSourcesResult.results, ...pansouResults]
|
||||
.sort((a, b) => (a.responseTimeMs || 99999) - (b.responseTimeMs || 99999));
|
||||
|
||||
// Deduplicate by URL within merged results
|
||||
const seenUrls = new Set<string>();
|
||||
const results: SearchResult[] = [];
|
||||
for (const r of allResults) {
|
||||
if (r.url && !seenUrls.has(r.url)) {
|
||||
seenUrls.add(r.url);
|
||||
results.push(r);
|
||||
} else if (!r.url) {
|
||||
results.push(r); // keep results without URLs (unlikely but safe)
|
||||
}
|
||||
}
|
||||
|
||||
total = results.length;
|
||||
|
||||
// Sort by datetime descending as secondary sort (preserve response-time groups)
|
||||
results.sort((a: any, b: any) => {
|
||||
const ta = a.datetime || '';
|
||||
const tb = b.datetime || '';
|
||||
if (!ta && !tb) return 0;
|
||||
if (!ta) return 1;
|
||||
if (!tb) return -1;
|
||||
return tb.localeCompare(ta);
|
||||
});
|
||||
|
||||
// Record search statistics
|
||||
recordSearchStats(keyword, results.length, ip);
|
||||
|
||||
// Update hot keywords
|
||||
updateHotKeywords(keyword);
|
||||
|
||||
return {
|
||||
results,
|
||||
total,
|
||||
page,
|
||||
pageSize: data.pageSize || 10,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply title filter rules to clean up search result titles.
|
||||
* Rules format (one per line):
|
||||
* # comment lines are ignored (hash must be followed by space)
|
||||
* /pattern/flags → regex: matched content is deleted from title
|
||||
* plain text → literal text: exact text is deleted from title wherever it appears
|
||||
*/
|
||||
export function applyTitleFilter(title: string, rules: string): string {
|
||||
if (!title || !rules) return title;
|
||||
const lines = rules.split('\n');
|
||||
let result = title;
|
||||
for (const rawLine of lines) {
|
||||
const line = rawLine.trim();
|
||||
if (!line || line.startsWith('# ')) continue;
|
||||
try {
|
||||
if (line.startsWith('/') && line.lastIndexOf('/') > 0) {
|
||||
const lastSlashIdx = line.lastIndexOf('/');
|
||||
const pattern = line.substring(1, lastSlashIdx);
|
||||
const flags = line.substring(lastSlashIdx + 1);
|
||||
const anchored = pattern.startsWith('^') ? pattern : '^' + pattern;
|
||||
const re = new RegExp(anchored, flags);
|
||||
const match = re.exec(result);
|
||||
if (match && match.index === 0) {
|
||||
result = result.slice(match[0].length);
|
||||
}
|
||||
} else {
|
||||
if (result.startsWith(line)) {
|
||||
result = result.slice(line.length);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return result.trim();
|
||||
}
|
||||
|
||||
function recordSearchStats(keyword: string, resultCount: number, ip?: string): void {
|
||||
try {
|
||||
const db = getDb();
|
||||
db.prepare(
|
||||
'INSERT INTO search_stats (keyword, intent, result_count, ip_address, created_at) VALUES (?, ?, ?, ?, ?)'
|
||||
).run(keyword, 'SEARCH', resultCount, ip || '', localTimestamp());
|
||||
} catch (err) {
|
||||
console.error('[Search] Failed to record stats:', err);
|
||||
}
|
||||
}
|
||||
|
||||
function updateHotKeywords(keyword: string): void {
|
||||
try {
|
||||
const db = getDb();
|
||||
const existing = db.prepare('SELECT id FROM hot_keywords WHERE keyword = ?').get(keyword) as any;
|
||||
|
||||
if (existing) {
|
||||
db.prepare(
|
||||
"UPDATE hot_keywords SET search_count = search_count + 1, updated_at = ? WHERE keyword = ?"
|
||||
).run(localTimestamp(), keyword);
|
||||
} else {
|
||||
db.prepare(
|
||||
"INSERT INTO hot_keywords (keyword, search_count, updated_at) VALUES (?, 1, ?)"
|
||||
).run(keyword, localTimestamp());
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('[Search] Failed to update hot keywords:', err);
|
||||
}
|
||||
}
|
||||
111
packages/backend/src/utils/crypto.ts
Normal file
111
packages/backend/src/utils/crypto.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
/**
|
||||
* AES-256-GCM 加解密工具
|
||||
*
|
||||
* 用于保护数据库中存储的网盘 Cookie。
|
||||
* 加密密钥从环境变量 COOKIE_ENCRYPTION_KEY 读取,
|
||||
* 未设置时自动生成随机密钥(仅当前进程有效,重启后旧数据不可解密)。
|
||||
*
|
||||
* 生产环境必须设置 COOKIE_ENCRYPTION_KEY!
|
||||
*/
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
const ALGORITHM = 'aes-256-gcm';
|
||||
const IV_LENGTH = 12; // 96-bit nonce for GCM
|
||||
const TAG_LENGTH = 16; // 128-bit auth tag
|
||||
const KEY_LENGTH = 32; // 256-bit key
|
||||
|
||||
let ENCRYPTION_KEY: Buffer | null = null;
|
||||
|
||||
function getKey(): Buffer {
|
||||
if (ENCRYPTION_KEY) return ENCRYPTION_KEY;
|
||||
|
||||
const envKey = process.env.COOKIE_ENCRYPTION_KEY;
|
||||
if (envKey && envKey.length >= 32) {
|
||||
// Use SHA-256 to derive a consistent 32-byte key from any length input
|
||||
ENCRYPTION_KEY = crypto.createHash('sha256').update(envKey).digest();
|
||||
console.log('[Crypto] Cookie encryption enabled (key from COOKIE_ENCRYPTION_KEY)');
|
||||
} else if (envKey) {
|
||||
// Short key: still use SHA-256
|
||||
ENCRYPTION_KEY = crypto.createHash('sha256').update(envKey).digest();
|
||||
console.log('[Crypto] Cookie encryption enabled (key from COOKIE_ENCRYPTION_KEY, SHA-256 derived)');
|
||||
} else {
|
||||
// Default stable key (not ephemeral) — data survives container restart
|
||||
ENCRYPTION_KEY = crypto.createHash('sha256').update('cloudsearch-cookie-key-v1').digest();
|
||||
console.log('[Crypto] Cookie encryption enabled (built-in default key — set COOKIE_ENCRYPTION_KEY in .env for extra security)');
|
||||
}
|
||||
return ENCRYPTION_KEY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypt plaintext. Returns base64-encoded ciphertext (includes IV + auth tag).
|
||||
*/
|
||||
export function encrypt(plaintext: string): string {
|
||||
if (!plaintext) return '';
|
||||
const key = getKey();
|
||||
const iv = crypto.randomBytes(IV_LENGTH);
|
||||
const cipher = crypto.createCipheriv(ALGORITHM, key, iv);
|
||||
|
||||
const encrypted = Buffer.concat([
|
||||
cipher.update(plaintext, 'utf8'),
|
||||
cipher.final(),
|
||||
]);
|
||||
const tag = cipher.getAuthTag();
|
||||
|
||||
// Format: iv (12) + tag (16) + ciphertext
|
||||
const combined = Buffer.concat([iv, tag, encrypted]);
|
||||
return combined.toString('base64');
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypt base64-encoded ciphertext. Returns original plaintext.
|
||||
* Returns empty string if decryption fails (corrupted data or wrong key).
|
||||
*/
|
||||
export function decrypt(encoded: string): string {
|
||||
if (!encoded) return '';
|
||||
try {
|
||||
const key = getKey();
|
||||
const combined = Buffer.from(encoded, 'base64');
|
||||
|
||||
if (combined.length < IV_LENGTH + TAG_LENGTH + 1) {
|
||||
console.warn('[Crypto] Ciphertext too short, returning as-is (possibly unencrypted legacy data)');
|
||||
// Legacy data: stored as plaintext before encryption was added
|
||||
return encoded;
|
||||
}
|
||||
|
||||
const iv = combined.subarray(0, IV_LENGTH);
|
||||
const tag = combined.subarray(IV_LENGTH, IV_LENGTH + TAG_LENGTH);
|
||||
const ciphertext = combined.subarray(IV_LENGTH + TAG_LENGTH);
|
||||
|
||||
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv);
|
||||
decipher.setAuthTag(tag);
|
||||
|
||||
const decrypted = Buffer.concat([
|
||||
decipher.update(ciphertext),
|
||||
decipher.final(),
|
||||
]);
|
||||
return decrypted.toString('utf8');
|
||||
} catch (err: any) {
|
||||
// If it looks like base64 but decryption fails, it might be legacy plaintext
|
||||
// stored before encryption was enabled. Try returning as-is.
|
||||
if (err.message?.includes('unsupported state') || err.message?.includes('authentication')) {
|
||||
console.warn('[Crypto] Decryption failed (possibly legacy plaintext), returning as-is');
|
||||
return encoded;
|
||||
}
|
||||
console.error('[Crypto] Decryption error:', err.message);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a string appears to be encrypted (base64 with IV+tag prefix).
|
||||
* Used for migration: re-encrypt legacy plaintext cookies.
|
||||
*/
|
||||
export function isEncrypted(value: string): boolean {
|
||||
if (!value) return false;
|
||||
try {
|
||||
const combined = Buffer.from(value, 'base64');
|
||||
return combined.length > IV_LENGTH + TAG_LENGTH;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
73
packages/backend/src/utils/logger.ts
Normal file
73
packages/backend/src/utils/logger.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
/**
|
||||
* 结构化日志工具
|
||||
*
|
||||
* 统一日志格式,支持请求追踪。
|
||||
*/
|
||||
export type LogLevel = 'debug' | 'info' | 'warn' | 'error';
|
||||
|
||||
interface LogEntry {
|
||||
level: LogLevel;
|
||||
message: string;
|
||||
timestamp: string;
|
||||
module?: string;
|
||||
duration?: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
const LOG_LEVELS: Record<LogLevel, number> = {
|
||||
debug: 0,
|
||||
info: 1,
|
||||
warn: 2,
|
||||
error: 3,
|
||||
};
|
||||
|
||||
let currentLevel: LogLevel =
|
||||
(process.env.LOG_LEVEL as LogLevel) ||
|
||||
(process.env.NODE_ENV === 'production' ? 'info' : 'debug');
|
||||
|
||||
function shouldLog(level: LogLevel): boolean {
|
||||
return LOG_LEVELS[level] >= LOG_LEVELS[currentLevel];
|
||||
}
|
||||
|
||||
function formatLog(entry: LogEntry): string {
|
||||
const parts = [
|
||||
`[${entry.timestamp}]`,
|
||||
`[${entry.level.toUpperCase()}]`,
|
||||
];
|
||||
if (entry.module) parts.push(`[${entry.module}]`);
|
||||
parts.push(entry.message);
|
||||
if (entry.duration !== undefined) parts.push(`(${entry.duration}ms)`);
|
||||
if (entry.error) parts.push(`\n ${entry.error}`);
|
||||
return parts.join(' ');
|
||||
}
|
||||
|
||||
function log(level: LogLevel, message: string, module?: string, extra?: Record<string, any>): void {
|
||||
if (!shouldLog(level)) return;
|
||||
|
||||
const entry: LogEntry = {
|
||||
level,
|
||||
message,
|
||||
timestamp: new Date().toISOString(),
|
||||
module,
|
||||
...extra,
|
||||
};
|
||||
|
||||
const formatted = formatLog(entry);
|
||||
switch (level) {
|
||||
case 'error': console.error(formatted); break;
|
||||
case 'warn': console.warn(formatted); break;
|
||||
default: console.log(formatted); break;
|
||||
}
|
||||
}
|
||||
|
||||
export const logger = {
|
||||
debug: (msg: string, module?: string) => log('debug', msg, module),
|
||||
info: (msg: string, module?: string) => log('info', msg, module),
|
||||
warn: (msg: string, module?: string) => log('warn', msg, module),
|
||||
error: (msg: string, module?: string, err?: Error) =>
|
||||
log('error', msg, module, err ? { error: err.stack || err.message } : undefined),
|
||||
|
||||
/** Log with duration (for performance tracking) */
|
||||
perf: (msg: string, durationMs: number, module?: string) =>
|
||||
log('info', msg, module, { duration: durationMs }),
|
||||
};
|
||||
143
packages/backend/src/utils/proxy-agent.ts
Normal file
143
packages/backend/src/utils/proxy-agent.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
/**
|
||||
* 统一代理工具 — 支持 HTTP/HTTPS/SOCKS5/SOCKS5h 协议
|
||||
*
|
||||
* Node 20+ 原生 fetch() 使用 undici Dispatcher,但 socks-proxy-agent 不实现此接口。
|
||||
* 解决方案:使用 http.Agent 接口 + http/https.request()。
|
||||
*/
|
||||
|
||||
let HttpsProxyAgent: any;
|
||||
let SocksProxyAgent: any;
|
||||
|
||||
try {
|
||||
HttpsProxyAgent = require('https-proxy-agent').HttpsProxyAgent;
|
||||
} catch {
|
||||
try { HttpsProxyAgent = require('https-proxy-agent'); } catch {}
|
||||
}
|
||||
|
||||
try {
|
||||
SocksProxyAgent = require('socks-proxy-agent').SocksProxyAgent;
|
||||
} catch {
|
||||
try { SocksProxyAgent = require('socks-proxy-agent'); } catch {}
|
||||
}
|
||||
|
||||
/** Create an http.Agent for the given proxy URL (works with https.request) */
|
||||
function createProxyAgent(proxyUrl: string): any | null {
|
||||
if (!proxyUrl || typeof proxyUrl !== 'string') return null;
|
||||
const trimmed = proxyUrl.trim();
|
||||
if (!trimmed) return null;
|
||||
const lower = trimmed.toLowerCase();
|
||||
|
||||
try {
|
||||
if (lower.startsWith('socks5://') || lower.startsWith('socks5h://')) {
|
||||
if (!SocksProxyAgent) {
|
||||
console.warn('[Proxy] socks-proxy-agent not installed');
|
||||
return null;
|
||||
}
|
||||
return new SocksProxyAgent(trimmed);
|
||||
}
|
||||
if (lower.startsWith('http://') || lower.startsWith('https://')) {
|
||||
if (!HttpsProxyAgent) {
|
||||
console.warn('[Proxy] No HTTP proxy agent available');
|
||||
return null;
|
||||
}
|
||||
return new HttpsProxyAgent(trimmed);
|
||||
}
|
||||
// Unknown scheme — try as HTTP proxy
|
||||
if (HttpsProxyAgent) return new HttpsProxyAgent(trimmed);
|
||||
return null;
|
||||
} catch (err: any) {
|
||||
console.error(`[Proxy] Failed to create proxy agent: ${err.message}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch with proxy support.
|
||||
* Uses native fetch() when no proxy, or http/https.request() with agent when proxy is set.
|
||||
*/
|
||||
export async function proxiedFetch(
|
||||
url: string,
|
||||
init?: RequestInit,
|
||||
proxyUrl?: string,
|
||||
): Promise<Response> {
|
||||
if (!proxyUrl) return fetch(url, init);
|
||||
|
||||
const agent = createProxyAgent(proxyUrl);
|
||||
if (!agent) return fetch(url, init);
|
||||
|
||||
const parsedUrl = new URL(url);
|
||||
const mod = parsedUrl.protocol === 'https:' ? require('https') : require('http');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const headers: Record<string, string> = {};
|
||||
if (init?.headers) {
|
||||
const h = init.headers as any;
|
||||
if (h instanceof Headers) {
|
||||
h.forEach((v, k) => { headers[k] = v; });
|
||||
} else if (typeof h === 'object') {
|
||||
Object.assign(headers, h);
|
||||
}
|
||||
}
|
||||
|
||||
const options: any = {
|
||||
hostname: parsedUrl.hostname,
|
||||
port: parsedUrl.port || (parsedUrl.protocol === 'https:' ? 443 : 80),
|
||||
path: parsedUrl.pathname + parsedUrl.search,
|
||||
method: init?.method || 'GET',
|
||||
headers,
|
||||
agent,
|
||||
};
|
||||
|
||||
const req = mod.request(options, (res: any) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (c: Buffer) => chunks.push(c));
|
||||
res.on('end', () => {
|
||||
const body = Buffer.concat(chunks);
|
||||
resolve(new Response(body, {
|
||||
status: res.statusCode || 502,
|
||||
statusText: res.statusMessage || '',
|
||||
headers: new Headers(res.headers || {}),
|
||||
}));
|
||||
});
|
||||
});
|
||||
|
||||
req.on('error', reject);
|
||||
|
||||
if (init?.signal) {
|
||||
init.signal.addEventListener('abort', () => req.destroy());
|
||||
}
|
||||
|
||||
if (init?.body) {
|
||||
req.write(
|
||||
typeof init.body === 'string' ? init.body :
|
||||
init.body instanceof Buffer ? init.body :
|
||||
init.body instanceof ArrayBuffer ? Buffer.from(init.body) :
|
||||
Buffer.from(String(init.body))
|
||||
);
|
||||
}
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
export async function testProxyConnection(
|
||||
proxyUrl: string,
|
||||
testUrl?: string,
|
||||
): Promise<{ ok: boolean; latency: number; info: string }> {
|
||||
const target = testUrl || 'https://www.baidu.com';
|
||||
const start = Date.now();
|
||||
try {
|
||||
const res = await proxiedFetch(target, {
|
||||
signal: AbortSignal.timeout(10000),
|
||||
}, proxyUrl);
|
||||
const latency = Date.now() - start;
|
||||
return { ok: true, latency, info: `连接成功 (${res.status})` };
|
||||
} catch (err: any) {
|
||||
return { ok: false, latency: Date.now() - start, info: `代理连接失败: ${err.message}` };
|
||||
}
|
||||
}
|
||||
|
||||
// Legacy compat — no longer returns dispatcher, kept for type compatibility
|
||||
export function createProxyDispatcher(proxyUrl: string): { agent?: any } | null {
|
||||
const agent = createProxyAgent(proxyUrl);
|
||||
return agent ? { agent } : null;
|
||||
}
|
||||
407
packages/backend/src/utils/qr-login.service.ts
Executable file
407
packages/backend/src/utils/qr-login.service.ts
Executable file
@@ -0,0 +1,407 @@
|
||||
import { chromium, BrowserContext, Page } from 'playwright';
|
||||
import jsQR from 'jsqr';
|
||||
import { getDb } from '../database/database';
|
||||
import { escapeLike } from '../utils/time';
|
||||
|
||||
interface QrSession {
|
||||
id: string;
|
||||
browserContext: BrowserContext;
|
||||
page: Page;
|
||||
createdAt: number;
|
||||
cookieSnapshot: string;
|
||||
lastPollAt: number;
|
||||
qrUrl: string;
|
||||
status: 'pending' | 'scanned' | 'logged_in' | 'expired' | 'error';
|
||||
error?: string;
|
||||
}
|
||||
|
||||
const SESSIONS = new Map<string, QrSession>();
|
||||
const SESSION_TTL = 5 * 60 * 1000; // 5 minutes
|
||||
const COOKIE_CHECK_INTERVAL = 1500; // 1.5s between cookie checks
|
||||
|
||||
const CHROMIUM_PATH = process.env.CHROMIUM_PATH || '/usr/bin/chromium-browser';
|
||||
|
||||
// Clean up old sessions periodically
|
||||
setInterval(() => {
|
||||
const now = Date.now();
|
||||
for (const [id, session] of SESSIONS.entries()) {
|
||||
if (now - session.createdAt > SESSION_TTL) {
|
||||
cleanupSession(id);
|
||||
}
|
||||
}
|
||||
}, 60000);
|
||||
|
||||
function cleanupSession(id: string) {
|
||||
const session = SESSIONS.get(id);
|
||||
if (session) {
|
||||
try {
|
||||
session.browserContext.close().catch(() => {});
|
||||
} catch {}
|
||||
SESSIONS.delete(id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract QR code URL from the login page canvas using jsQR.
|
||||
* The actual login QR code is Canvas #0 (anonymous, 177x177), NOT #react-qrcode-logo.
|
||||
*/
|
||||
async function extractQrUrl(page: Page): Promise<string> {
|
||||
// Run inside Playwright's browser context (as a string to avoid Node TS type errors)
|
||||
const raw = await page.evaluate(`(() => {
|
||||
const canvases = document.querySelectorAll('canvas');
|
||||
var results = [];
|
||||
for (var i = 0; i < canvases.length; i++) {
|
||||
try {
|
||||
var c = canvases[i];
|
||||
var ctx = c.getContext('2d');
|
||||
if (!ctx) continue;
|
||||
var imageData = ctx.getImageData(0, 0, c.width, c.height);
|
||||
results.push({
|
||||
index: i,
|
||||
w: c.width,
|
||||
h: c.height,
|
||||
data: Array.from(imageData.data)
|
||||
});
|
||||
} catch(e) {}
|
||||
}
|
||||
return results;
|
||||
})()`) as unknown as { index: number; w: number; h: number; data: number[] }[];
|
||||
|
||||
if (!raw || raw.length === 0) {
|
||||
throw new Error('页面没有可用的 canvas');
|
||||
}
|
||||
|
||||
// Try to decode each canvas, preferring the one with su.quark.cn URL
|
||||
let bestUrl = '';
|
||||
let bestResult: { index: number; w: number; h: number; data: number[] } | null = null;
|
||||
|
||||
for (const canvas of raw) {
|
||||
const code = jsQR(new Uint8ClampedArray(canvas.data), canvas.w, canvas.h);
|
||||
if (code && code.data) {
|
||||
// If this is the login QR code (has su.quark.cn), use it immediately
|
||||
if (code.data.includes('su.quark.cn')) {
|
||||
return code.data;
|
||||
}
|
||||
// Otherwise keep it as fallback
|
||||
if (!bestUrl) {
|
||||
bestUrl = code.data;
|
||||
bestResult = canvas;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (bestUrl) {
|
||||
return bestUrl;
|
||||
}
|
||||
|
||||
throw new Error('无法解析二维码内容');
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a QR code login session.
|
||||
* Launches headless Chromium, navigates to Quark login page, extracts QR code URL.
|
||||
*/
|
||||
export async function startQrLogin(): Promise<{
|
||||
sessionId: string;
|
||||
qrUrl: string;
|
||||
expiresIn: number;
|
||||
}> {
|
||||
// Clean up any existing expired sessions
|
||||
for (const [id, session] of SESSIONS.entries()) {
|
||||
if (Date.now() - session.createdAt > SESSION_TTL) {
|
||||
cleanupSession(id);
|
||||
}
|
||||
}
|
||||
|
||||
const browser = await chromium.launch({
|
||||
executablePath: CHROMIUM_PATH,
|
||||
headless: true,
|
||||
args: [
|
||||
'--no-sandbox',
|
||||
'--disable-setuid-sandbox',
|
||||
'--disable-dev-shm-usage',
|
||||
'--disable-gpu',
|
||||
'--no-first-run',
|
||||
'--no-zygote',
|
||||
],
|
||||
});
|
||||
|
||||
const browserContext = await browser.newContext({
|
||||
userAgent:
|
||||
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
viewport: { width: 1280, height: 800 },
|
||||
locale: 'zh-CN',
|
||||
});
|
||||
|
||||
const page = await browserContext.newPage();
|
||||
const sessionId = Date.now().toString(36) + Math.random().toString(36).slice(2, 8);
|
||||
|
||||
try {
|
||||
// Navigate to Quark login page (now the homepage itself has QR login)
|
||||
await page.goto('https://pan.quark.cn/', {
|
||||
waitUntil: 'commit',
|
||||
timeout: 30000,
|
||||
});
|
||||
|
||||
// Wait for the QR code canvas to appear
|
||||
await page.waitForSelector('canvas', { timeout: 15000 });
|
||||
|
||||
// Extra wait for the QR code to fully render
|
||||
await page.waitForTimeout(2000);
|
||||
|
||||
// Extract the QR code URL from the canvas
|
||||
const qrUrl = await extractQrUrl(page);
|
||||
|
||||
// Take initial cookie snapshot
|
||||
const cookies = await browserContext.cookies();
|
||||
const cookieSnapshot = cookies.map(c => `${c.name}=${c.value}`).join('; ');
|
||||
|
||||
const session: QrSession = {
|
||||
id: sessionId,
|
||||
browserContext,
|
||||
page,
|
||||
createdAt: Date.now(),
|
||||
cookieSnapshot,
|
||||
lastPollAt: Date.now(),
|
||||
qrUrl,
|
||||
status: 'pending',
|
||||
};
|
||||
|
||||
SESSIONS.set(sessionId, session);
|
||||
|
||||
// Start background polling for login detection
|
||||
pollLoginStatus(session);
|
||||
|
||||
// Handle page navigation (like redirect after login)
|
||||
page.on('framenavigated', async (frame) => {
|
||||
if (frame === page.mainFrame()) {
|
||||
const url = frame.url();
|
||||
if (url === 'about:blank') {
|
||||
await checkAndCaptureCookies(session);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Handle popups/dialogs
|
||||
page.on('popup', async (popup) => {
|
||||
try {
|
||||
await popup.waitForLoadState('networkidle', { timeout: 10000 });
|
||||
await checkAndCaptureCookies(session);
|
||||
} catch {}
|
||||
});
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
qrUrl,
|
||||
expiresIn: SESSION_TTL / 1000,
|
||||
};
|
||||
} catch (err: any) {
|
||||
// Clean up on failure
|
||||
try { await browserContext.close(); } catch {}
|
||||
try { browser.close().catch(() => {}); } catch {}
|
||||
SESSIONS.delete(sessionId);
|
||||
throw new Error(`启动扫码登录失败: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Poll login status in background.
|
||||
* Checks cookies every COOKIE_CHECK_INTERVAL ms for new session tokens.
|
||||
*/
|
||||
async function pollLoginStatus(session: QrSession) {
|
||||
const checkInterval = setInterval(async () => {
|
||||
try {
|
||||
const now = Date.now();
|
||||
|
||||
// Check if expired
|
||||
if (now - session.createdAt > SESSION_TTL) {
|
||||
clearInterval(checkInterval);
|
||||
session.status = 'expired';
|
||||
cleanupSession(session.id);
|
||||
return;
|
||||
}
|
||||
|
||||
session.lastPollAt = now;
|
||||
|
||||
// Check cookies
|
||||
const cookies = await session.browserContext.cookies();
|
||||
const cookieStr = cookies.map(c => `${c.name}=${c.value}`).join('; ');
|
||||
|
||||
// Check for session cookies indicating login
|
||||
const hasSessionCookie = cookies.some(
|
||||
c => (c.name === '__st' || c.name === 'pus' || c.name === '__pus' || c.name === '__ktd')
|
||||
);
|
||||
|
||||
if (hasSessionCookie) {
|
||||
session.cookieSnapshot = cookieStr;
|
||||
session.status = 'logged_in';
|
||||
clearInterval(checkInterval);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check URL change as alternative indicator
|
||||
const url = session.page.url();
|
||||
if (!url.includes('login') && !url.includes('qrcode') && url !== 'about:blank' && url !== 'https://pan.quark.cn/' && url.length > 10) {
|
||||
await checkAndCaptureCookies(session);
|
||||
}
|
||||
} catch (err: any) {
|
||||
// Page might have been closed
|
||||
clearInterval(checkInterval);
|
||||
}
|
||||
}, COOKIE_CHECK_INTERVAL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check cookies after navigation/redirect and capture them if login succeeded.
|
||||
*/
|
||||
async function checkAndCaptureCookies(session: QrSession) {
|
||||
try {
|
||||
const cookies = await session.browserContext.cookies();
|
||||
const cookieStr = cookies.map(c => `${c.name}=${c.value}`).join('; ');
|
||||
const hasSessionCookie = cookies.some(
|
||||
c => (c.name === '__st' || c.name === 'pus' || c.name === '__pus' || c.name === '__ktd')
|
||||
);
|
||||
|
||||
if (hasSessionCookie) {
|
||||
session.cookieSnapshot = cookieStr;
|
||||
session.status = 'logged_in';
|
||||
} else if (cookies.length > 3) {
|
||||
const newCookies = cookies.filter(
|
||||
c => !['ctoken', 'b-user-id', '__wpkreporterwid_'].includes(c.name)
|
||||
);
|
||||
if (newCookies.length > 0) {
|
||||
session.cookieSnapshot = cookieStr;
|
||||
try {
|
||||
const resp = await session.page.evaluate(async () => {
|
||||
const r = await fetch('https://pan.quark.cn/account/info', {
|
||||
credentials: 'include',
|
||||
});
|
||||
return await r.text();
|
||||
});
|
||||
const data = JSON.parse(resp);
|
||||
if (data?.data?.nickname) {
|
||||
session.status = 'logged_in';
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the login status for a session.
|
||||
*/
|
||||
export async function getQrLoginStatus(sessionId: string): Promise<{
|
||||
status: string;
|
||||
cookie?: string;
|
||||
nickname?: string;
|
||||
storage_used?: string;
|
||||
storage_total?: string;
|
||||
autoUpdated?: boolean;
|
||||
updatedConfigId?: number;
|
||||
}> {
|
||||
const session = SESSIONS.get(sessionId);
|
||||
if (!session) {
|
||||
return { status: 'expired' };
|
||||
}
|
||||
|
||||
// Check if expired
|
||||
if (Date.now() - session.createdAt > SESSION_TTL) {
|
||||
session.status = 'expired';
|
||||
cleanupSession(sessionId);
|
||||
return { status: 'expired' };
|
||||
}
|
||||
|
||||
if (session.status === 'logged_in') {
|
||||
// Try to get nickname too
|
||||
let nickname = '';
|
||||
try {
|
||||
const resp = await session.page.evaluate(async () => {
|
||||
const r = await fetch('https://pan.quark.cn/account/info', {
|
||||
credentials: 'include',
|
||||
});
|
||||
return await r.text();
|
||||
});
|
||||
const data = JSON.parse(resp);
|
||||
nickname = data?.data?.nickname || '';
|
||||
} catch {}
|
||||
|
||||
// Fetch capacity info from within the browser context (has full JS signing)
|
||||
let storageTotal = '';
|
||||
let storageUsed = '';
|
||||
try {
|
||||
const capResp = await session.page.evaluate(async () => {
|
||||
const r = await fetch(
|
||||
'https://pan.quark.cn/1/clouddrive/capacity/detail?pr=ucpro&fr=pc',
|
||||
{ credentials: 'include' }
|
||||
);
|
||||
return await r.text();
|
||||
});
|
||||
const capData = JSON.parse(capResp);
|
||||
if (capData.status === 200 && capData.data?.capacity_summary) {
|
||||
const summary = capData.data.capacity_summary;
|
||||
const total = summary.sum_capacity || 0;
|
||||
storageTotal = formatBytes(total);
|
||||
storageUsed = '0 B'; // capacity/detail doesn't return used_size
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Build full cookie string including httpOnly cookies
|
||||
const cookies = await session.browserContext.cookies();
|
||||
const cookieStr = cookies.map(c => `${c.name}=${c.value}`).join('; ');
|
||||
|
||||
// Extract __uid from cookie for duplicate detection
|
||||
const uidMatch = cookieStr.match(/(?<!\\w)__uid=([a-f0-9-]+)/);
|
||||
let autoUpdated = false;
|
||||
let updatedConfigId: number | undefined;
|
||||
|
||||
if (uidMatch) {
|
||||
const uid = uidMatch[1];
|
||||
try {
|
||||
const db = getDb();
|
||||
const existing = db.prepare(
|
||||
`SELECT id, nickname FROM cloud_configs WHERE cloud_type = 'quark' AND cookie LIKE ?`
|
||||
).get(`%${escapeLike(uid)}%`) as { id: number; nickname: string } | undefined;
|
||||
|
||||
if (existing) {
|
||||
// Same account → auto-update cookie with capacity info too
|
||||
const localTimestamp = new Date().toISOString().replace('T', ' ').slice(0, 19);
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET cookie = ?, storage_used = ?, storage_total = ?, updated_at = ? WHERE id = ?`
|
||||
).run(cookieStr, storageUsed || null, storageTotal || null, localTimestamp, existing.id);
|
||||
autoUpdated = true;
|
||||
updatedConfigId = existing.id;
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Clean up session after successful login
|
||||
cleanupSession(sessionId);
|
||||
|
||||
return {
|
||||
status: 'logged_in',
|
||||
cookie: cookieStr,
|
||||
nickname,
|
||||
storage_used: storageUsed,
|
||||
storage_total: storageTotal,
|
||||
autoUpdated,
|
||||
updatedConfigId,
|
||||
};
|
||||
}
|
||||
|
||||
return { status: session.status };
|
||||
}
|
||||
|
||||
function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
return parseFloat((bytes / Math.pow(1024, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel a QR login session.
|
||||
*/
|
||||
export async function cancelQrLogin(sessionId: string): Promise<void> {
|
||||
cleanupSession(sessionId);
|
||||
}
|
||||
27
packages/backend/src/utils/response.ts
Executable file
27
packages/backend/src/utils/response.ts
Executable file
@@ -0,0 +1,27 @@
|
||||
import { Response } from 'express';
|
||||
|
||||
/**
|
||||
* Send a successful JSON response.
|
||||
* Uses the standard format: { error: null, data }
|
||||
*/
|
||||
export function sendSuccess<T>(res: Response, data: T, status: number = 200) {
|
||||
res.status(status).json({ error: null, ...(data as any) });
|
||||
}
|
||||
|
||||
/**
|
||||
* Send an error JSON response.
|
||||
* Uses the standard format: { error: string }
|
||||
* All routes should use this for consistent frontend error handling.
|
||||
*/
|
||||
export function sendError(res: Response, status: number, message: string) {
|
||||
res.status(status).json({ error: message });
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a 500 error response from a caught exception.
|
||||
* Prevents leaking stack traces in production.
|
||||
*/
|
||||
export function sendServerError(res: Response, err: unknown, fallbackMessage: string = 'Internal server error') {
|
||||
const message = err instanceof Error ? err.message : fallbackMessage;
|
||||
res.status(500).json({ error: message || fallbackMessage });
|
||||
}
|
||||
116
packages/backend/src/utils/time.ts
Executable file
116
packages/backend/src/utils/time.ts
Executable file
@@ -0,0 +1,116 @@
|
||||
import { getDb } from '../database/database';
|
||||
|
||||
/**
|
||||
* Get the current timezone from DB config, with fallback.
|
||||
*/
|
||||
export function getTimezone(): string {
|
||||
try {
|
||||
const db = getDb();
|
||||
const row = db.prepare('SELECT value FROM system_configs WHERE key = ?').get('timezone') as any;
|
||||
return row?.value || 'Asia/Shanghai';
|
||||
} catch {
|
||||
return 'Asia/Shanghai';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns current local time as an ISO 8601 string with timezone offset.
|
||||
* Example: "2026-05-02T14:32:23+08:00"
|
||||
* This format is reliably parsed by JavaScript Date() in all browsers.
|
||||
*/
|
||||
export function localTimestamp(): string {
|
||||
const tz = getTimezone();
|
||||
const now = new Date();
|
||||
// Format as local time string with timezone offset
|
||||
try {
|
||||
const parts = new Intl.DateTimeFormat('sv-SE', {
|
||||
timeZone: tz,
|
||||
year: 'numeric', month: '2-digit', day: '2-digit',
|
||||
hour: '2-digit', minute: '2-digit', second: '2-digit',
|
||||
hour12: false,
|
||||
}).formatToParts(now);
|
||||
const get = (type: string) => parts.find(p => p.type === type)?.value || '00';
|
||||
const dateStr = `${get('year')}-${get('month')}-${get('day')}T${get('hour')}:${get('minute')}:${get('second')}`;
|
||||
// Calculate timezone offset for the configured timezone
|
||||
// Use getTimezoneOffset difference between UTC and the target timezone
|
||||
const utcMs = now.getTime();
|
||||
const localStr = dateStr.replace('T', ' ');
|
||||
// Get offset in minutes for the configured timezone
|
||||
const formatter = new Intl.DateTimeFormat('sv-SE', {
|
||||
timeZone: tz,
|
||||
timeZoneName: 'longOffset',
|
||||
});
|
||||
const tzName = formatter.formatToParts(now).find(p => p.type === 'timeZoneName')?.value || '';
|
||||
// tzName is like "GMT+8" or "GMT-05:00"
|
||||
let offset = '+00:00';
|
||||
if (tzName) {
|
||||
const match = tzName.match(/GMT([+-])(\d+)(?::(\d+))?/);
|
||||
if (match) {
|
||||
const sign = match[1];
|
||||
const hours = match[2].padStart(2, '0');
|
||||
const mins = (match[3] || '00').padStart(2, '0');
|
||||
offset = `${sign}${hours}:${mins}`;
|
||||
}
|
||||
}
|
||||
return dateStr + offset;
|
||||
} catch {
|
||||
// Fallback: use UTC
|
||||
return now.toISOString();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns today's (or given date's) date string in the configured timezone.
|
||||
* Example: "2026-05-04"
|
||||
*/
|
||||
export function formatLocalDate(date?: Date): string {
|
||||
const tz = getTimezone();
|
||||
const d = date || new Date();
|
||||
try {
|
||||
const parts = new Intl.DateTimeFormat('sv-SE', {
|
||||
timeZone: tz,
|
||||
year: 'numeric', month: '2-digit', day: '2-digit',
|
||||
}).formatToParts(d);
|
||||
const get = (type: string) => parts.find(p => p.type === type)?.value || '00';
|
||||
return `${get('year')}-${get('month')}-${get('day')}`;
|
||||
} catch {
|
||||
const y = d.getFullYear();
|
||||
const m = String(d.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(d.getDate()).padStart(2, '0');
|
||||
return `${y}-${m}-${day}`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape SQL LIKE wildcards (% and _) in user input to prevent unintended pattern matching.
|
||||
*/
|
||||
export function escapeLike(str: string): string {
|
||||
return str.replace(/[%_\\]/g, '\\$&');
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a local datetime string in the configured timezone.
|
||||
* Example: "2026-05-04 14:32:23" — intentionally space-separated (no T) for DB compatibility.
|
||||
*/
|
||||
export function formatLocalDateTime(date?: Date): string {
|
||||
const tz = getTimezone();
|
||||
const d = date || new Date();
|
||||
try {
|
||||
const parts = new Intl.DateTimeFormat('sv-SE', {
|
||||
timeZone: tz,
|
||||
year: 'numeric', month: '2-digit', day: '2-digit',
|
||||
hour: '2-digit', minute: '2-digit', second: '2-digit',
|
||||
hour12: false,
|
||||
}).formatToParts(d);
|
||||
const get = (type: string) => parts.find(p => p.type === type)?.value || '00';
|
||||
return `${get('year')}-${get('month')}-${get('day')} ${get('hour')}:${get('minute')}:${get('second')}`;
|
||||
} catch {
|
||||
const y = d.getFullYear();
|
||||
const mo = String(d.getMonth() + 1).padStart(2, '0');
|
||||
const da = String(d.getDate()).padStart(2, '0');
|
||||
const h = String(d.getHours()).padStart(2, '0');
|
||||
const mi = String(d.getMinutes()).padStart(2, '0');
|
||||
const s = String(d.getSeconds()).padStart(2, '0');
|
||||
return `${y}-${mo}-${da} ${h}:${mi}:${s}`;
|
||||
}
|
||||
}
|
||||
49
packages/backend/src/validation/bounded-pool.ts
Executable file
49
packages/backend/src/validation/bounded-pool.ts
Executable file
@@ -0,0 +1,49 @@
|
||||
export class BoundedPool {
|
||||
private concurrency: number;
|
||||
private running: number;
|
||||
private queue: Array<() => Promise<void>>;
|
||||
|
||||
constructor(concurrency: number = 10) {
|
||||
this.concurrency = concurrency;
|
||||
this.running = 0;
|
||||
this.queue = [];
|
||||
}
|
||||
|
||||
async run<T>(fn: () => Promise<T>): Promise<T> {
|
||||
return new Promise<T>((resolve, reject) => {
|
||||
const task = async () => {
|
||||
this.running++;
|
||||
try {
|
||||
const result = await fn();
|
||||
resolve(result);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
} finally {
|
||||
this.running--;
|
||||
this.processQueue();
|
||||
}
|
||||
};
|
||||
|
||||
if (this.running < this.concurrency) {
|
||||
task();
|
||||
} else {
|
||||
this.queue.push(task);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private processQueue(): void {
|
||||
while (this.running < this.concurrency && this.queue.length > 0) {
|
||||
const task = this.queue.shift();
|
||||
if (task) task();
|
||||
}
|
||||
}
|
||||
|
||||
get pending(): number {
|
||||
return this.queue.length;
|
||||
}
|
||||
|
||||
get active(): number {
|
||||
return this.running;
|
||||
}
|
||||
}
|
||||
375
packages/backend/src/validation/link-validator.service.ts
Executable file
375
packages/backend/src/validation/link-validator.service.ts
Executable file
@@ -0,0 +1,375 @@
|
||||
// Native fetch available in Node 20+
|
||||
import config from '../config';
|
||||
import { RedisClient } from '../middleware/cache';
|
||||
import { BoundedPool } from './bounded-pool';
|
||||
import { BaiduDriver } from '../cloud/drivers/baidu.driver';
|
||||
import { AliyunDriver } from '../cloud/drivers/aliyun.driver';
|
||||
import { getSystemConfig } from '../admin/system-config.service';
|
||||
|
||||
export type LinkStatus = 'valid' | 'invalid' | 'unknown';
|
||||
|
||||
export interface ValidationResult {
|
||||
url: string;
|
||||
status: LinkStatus;
|
||||
cloudType: string;
|
||||
checkedAt: string;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* 从系统配置加载自定义关键词列表(一行一条)
|
||||
*/
|
||||
function loadCustomKeywords(configKey: string): string[] {
|
||||
try {
|
||||
const rules = getSystemConfig(configKey);
|
||||
if (rules) {
|
||||
return rules.split('\n').map(k => k.trim()).filter(k => k.length > 0);
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
export class LinkValidator {
|
||||
private cache: RedisClient;
|
||||
private pool: BoundedPool;
|
||||
|
||||
constructor(concurrency?: number) {
|
||||
this.cache = new RedisClient();
|
||||
this.pool = new BoundedPool(concurrency || config.validation.concurrency);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a single share link — PanSou only, no local fallback.
|
||||
*/
|
||||
async validate(url: string, cloudType: string): Promise<ValidationResult> {
|
||||
// Check cache first
|
||||
const cacheKey = `link:valid:${cloudType}:${Buffer.from(url).toString('base64').slice(0, 64)}`;
|
||||
|
||||
try {
|
||||
const cached = await this.cache.get(cacheKey);
|
||||
if (cached) {
|
||||
const parsed = JSON.parse(cached);
|
||||
return parsed as ValidationResult;
|
||||
}
|
||||
} catch {
|
||||
// ignore cache errors
|
||||
}
|
||||
|
||||
// Try PanSou's /api/check/links
|
||||
const pansouResult = await this.validateViaPansou(url, cloudType);
|
||||
if (pansouResult) {
|
||||
if (pansouResult.status === 'valid' || pansouResult.status === 'invalid') {
|
||||
// Cache definitive result
|
||||
const ttl = pansouResult.status === 'valid' ? config.validation.cacheTtlValid : config.validation.cacheTtlInvalid;
|
||||
try { await this.cache.setEx(cacheKey, ttl, JSON.stringify(pansouResult)); } catch {}
|
||||
return pansouResult;
|
||||
}
|
||||
// PanSou returned locked/unsupported/uncertain → return unknown, no local fallback
|
||||
return pansouResult;
|
||||
}
|
||||
|
||||
// PanSou unreachable → return unknown
|
||||
return { url, status: 'unknown' as LinkStatus, cloudType, checkedAt: new Date().toISOString(), message: '盘搜不可达' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Full validation with local fallback when PanSou can't determine.
|
||||
*/
|
||||
async validateWithLocalFallback(url: string, cloudType: string): Promise<ValidationResult> {
|
||||
// Check cache first
|
||||
const cacheKey = `link:valid:${cloudType}:${Buffer.from(url).toString('base64').slice(0, 64)}`;
|
||||
|
||||
try {
|
||||
const cached = await this.cache.get(cacheKey);
|
||||
if (cached) {
|
||||
const parsed = JSON.parse(cached);
|
||||
return parsed as ValidationResult;
|
||||
}
|
||||
} catch {
|
||||
// ignore cache errors
|
||||
}
|
||||
|
||||
// Try PanSou
|
||||
const pansouResult = await this.validateViaPansou(url, cloudType);
|
||||
if (pansouResult) {
|
||||
if (pansouResult.status === 'valid' || pansouResult.status === 'invalid') {
|
||||
const ttl = pansouResult.status === 'valid' ? config.validation.cacheTtlValid : config.validation.cacheTtlInvalid;
|
||||
try { await this.cache.setEx(cacheKey, ttl, JSON.stringify(pansouResult)); } catch {}
|
||||
return pansouResult;
|
||||
}
|
||||
// PanSou uncertain → fall through to local validation
|
||||
}
|
||||
|
||||
// Fall back to own validation
|
||||
let result: ValidationResult;
|
||||
|
||||
switch (cloudType) {
|
||||
case 'quark':
|
||||
result = await this.validateQuark(url);
|
||||
break;
|
||||
case 'baidu':
|
||||
result = await this.validateBaidu(url);
|
||||
break;
|
||||
case 'aliyun':
|
||||
result = await this.validateAliyun(url);
|
||||
break;
|
||||
default:
|
||||
result = await this.validateByHtml(url, cloudType);
|
||||
}
|
||||
|
||||
const ttl = result.status === 'valid' ? config.validation.cacheTtlValid : config.validation.cacheTtlInvalid;
|
||||
try { await this.cache.setEx(cacheKey, ttl, JSON.stringify(result)); } catch {}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Try PanSou's /api/check/links for validation.
|
||||
* Returns null if PanSou is unreachable.
|
||||
*
|
||||
* Judgment order:
|
||||
* 1. summary "链接有效" → valid (PanSou's own OK signal)
|
||||
* 2. summary 含自定义确认关键词 → valid (from DB link_valid_keywords)
|
||||
* 3. summary 含自定义失效关键词 → invalid (from DB link_invalid_keywords)
|
||||
* 4. 其他 → unknown
|
||||
*/
|
||||
private async validateViaPansou(url: string, cloudType: string): Promise<ValidationResult | null> {
|
||||
const checkedAt = new Date().toISOString();
|
||||
try {
|
||||
const pansouApiUrl = `${config.pansouUrl}/api/check/links`;
|
||||
const response = await fetch(pansouApiUrl, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
items: [{ disk_type: cloudType, url }],
|
||||
}),
|
||||
signal: AbortSignal.timeout(10000),
|
||||
});
|
||||
|
||||
if (!response.ok) return null;
|
||||
|
||||
const data = await response.json() as any;
|
||||
const pansouResult = data.results?.[0];
|
||||
if (!pansouResult) return null;
|
||||
|
||||
const summary = pansouResult.summary || '';
|
||||
|
||||
// 1. PanSou 明确返回"链接有效"
|
||||
if (summary.includes('链接有效')) {
|
||||
return { url, status: 'valid', cloudType, checkedAt, message: summary };
|
||||
}
|
||||
|
||||
// 2. 自定义确认关键词(用户配置的"有效"信号)
|
||||
const validKeywords = loadCustomKeywords('link_valid_keywords');
|
||||
if (validKeywords.some(kw => summary.includes(kw))) {
|
||||
return { url, status: 'valid', cloudType, checkedAt, message: summary };
|
||||
}
|
||||
|
||||
// 3. 自定义失效关键词(用户配置的"失效"信号)
|
||||
const invalidKeywords = loadCustomKeywords('link_invalid_keywords');
|
||||
if (invalidKeywords.some(kw => summary.includes(kw))) {
|
||||
return { url, status: 'invalid', cloudType, checkedAt, message: summary };
|
||||
}
|
||||
|
||||
// 4. 其余全部返回 unknown
|
||||
return { url, status: 'unknown', cloudType, checkedAt, message: summary || '盘搜无法确认' };
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a Quark share link using the public share token API.
|
||||
*/
|
||||
private async validateQuark(url: string): Promise<ValidationResult> {
|
||||
const checkedAt = new Date().toISOString();
|
||||
|
||||
try {
|
||||
const cleanUrl = url.split('#')[0];
|
||||
const urlObj = new URL(cleanUrl);
|
||||
const pathParts = urlObj.pathname.split('/');
|
||||
const shareToken = pathParts[pathParts.length - 1] || pathParts[pathParts.length - 2];
|
||||
|
||||
if (!shareToken) {
|
||||
return { url, status: 'unknown', cloudType: 'quark', checkedAt, message: '无法解析分享链接 token' };
|
||||
}
|
||||
|
||||
const tokenUrl = 'https://drive-pc.quark.cn/1/clouddrive/share/sharepage/token?pr=ucpro&fr=pc';
|
||||
const response = await fetch(tokenUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36',
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json',
|
||||
'Origin': 'https://pan.quark.cn',
|
||||
'Referer': 'https://pan.quark.cn/',
|
||||
},
|
||||
body: JSON.stringify({ pwd_id: shareToken, passcode: '' }),
|
||||
signal: AbortSignal.timeout(15000),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const msg = response.status === 403 ? '分享已过期或需要密码' : `HTTP ${response.status}`;
|
||||
return { url, status: 'invalid', cloudType: 'quark', checkedAt, message: msg };
|
||||
}
|
||||
|
||||
const data = await response.json() as any;
|
||||
if (data.status === 200 && data.data?.stoken) {
|
||||
const title = data.data?.title || '';
|
||||
const author = data.data?.author?.nick_name || '';
|
||||
const expiredAt = data.data?.expired_at || 0;
|
||||
const expireDate = expiredAt > 0 ? new Date(expiredAt).toISOString().slice(0, 10) : '';
|
||||
return {
|
||||
url,
|
||||
status: 'valid',
|
||||
cloudType: 'quark',
|
||||
checkedAt,
|
||||
message: expireDate ? `有效链接,过期时间: ${expireDate}` : '有效链接',
|
||||
};
|
||||
}
|
||||
|
||||
// API 返回了 200 但无 stoken — 可能是临时异常,保守判 unknown
|
||||
return { url, status: 'unknown', cloudType: 'quark', checkedAt, message: 'API 返回异常(无 stoken),不做失效判定' };
|
||||
} catch (err: any) {
|
||||
return {
|
||||
url,
|
||||
status: 'unknown',
|
||||
cloudType: 'quark',
|
||||
checkedAt,
|
||||
message: `校验异常: ${err.message?.slice(0, 50) || '未知错误'}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private async validateBaidu(url: string): Promise<ValidationResult> {
|
||||
const checkedAt = new Date().toISOString();
|
||||
|
||||
try {
|
||||
const driver = new BaiduDriver();
|
||||
const result = await driver.validateShareLink(url);
|
||||
|
||||
return {
|
||||
url,
|
||||
status: result.valid ? 'valid' : 'invalid',
|
||||
cloudType: 'baidu',
|
||||
checkedAt,
|
||||
message: result.message,
|
||||
};
|
||||
} catch (err: any) {
|
||||
return {
|
||||
url,
|
||||
status: 'unknown',
|
||||
cloudType: 'baidu',
|
||||
checkedAt,
|
||||
message: `校验失败: ${err.message || err}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private async validateAliyun(url: string): Promise<ValidationResult> {
|
||||
const checkedAt = new Date().toISOString();
|
||||
|
||||
try {
|
||||
const driver = new AliyunDriver();
|
||||
const result = await driver.validateShareLink(url);
|
||||
|
||||
return {
|
||||
url,
|
||||
status: result.valid ? 'valid' : 'invalid',
|
||||
cloudType: 'aliyun',
|
||||
checkedAt,
|
||||
message: result.message,
|
||||
};
|
||||
} catch (err: any) {
|
||||
return {
|
||||
url,
|
||||
status: 'unknown',
|
||||
cloudType: 'aliyun',
|
||||
checkedAt,
|
||||
message: `校验失败: ${err.message || err}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fallback: validate by fetching the share page as HTML and checking for
|
||||
* custom failure keywords from DB config. Used for providers without a
|
||||
* dedicated API (115, tianyi, 123pan, etc.).
|
||||
*/
|
||||
private async validateByHtml(url: string, cloudType: string): Promise<ValidationResult> {
|
||||
let status: LinkStatus = 'valid';
|
||||
const checkedAt = new Date().toISOString();
|
||||
let message = '';
|
||||
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), config.validation.timeout);
|
||||
|
||||
const response = await fetch(url, {
|
||||
signal: controller.signal as any,
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
||||
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
|
||||
},
|
||||
redirect: 'follow',
|
||||
});
|
||||
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
const text = await response.text();
|
||||
const keywords = loadCustomKeywords('link_invalid_keywords');
|
||||
|
||||
const isHttpError = response.status >= 400;
|
||||
if (isHttpError) {
|
||||
status = 'invalid';
|
||||
message = `HTTP ${response.status} ${response.statusText}`;
|
||||
} else {
|
||||
const matched = keywords.find(kw => text.includes(kw));
|
||||
if (matched) {
|
||||
status = 'invalid';
|
||||
message = `页面包含自定义失效关键词: "${matched}"`;
|
||||
} else {
|
||||
message = 'HTML 页面可访问,未检测到失效关键词';
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
// On timeout or network error, conservatively mark as valid
|
||||
status = 'valid';
|
||||
message = `网络校验超时,保守标记为有效`;
|
||||
}
|
||||
|
||||
return { url, status, cloudType, checkedAt, message };
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch validate multiple links with bounded concurrency.
|
||||
*/
|
||||
async validateBatch(urls: Array<{ url: string; cloudType: string }>): Promise<ValidationResult[]> {
|
||||
const tasks = urls.map(item => () => this.validate(item.url, item.cloudType));
|
||||
const results: ValidationResult[] = [];
|
||||
|
||||
for (const task of tasks) {
|
||||
try {
|
||||
const result = await this.pool.run(task);
|
||||
results.push(result);
|
||||
} catch (err) {
|
||||
results.push({
|
||||
url: '',
|
||||
status: 'unknown',
|
||||
cloudType: '',
|
||||
checkedAt: new Date().toISOString(),
|
||||
message: '校验执行异常',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
async validateBatchWithPool(urls: Array<{ url: string; cloudType: string }>): Promise<ValidationResult[]> {
|
||||
return this.validateBatch(urls);
|
||||
}
|
||||
}
|
||||
12
packages/backend/src/version.ts
Normal file
12
packages/backend/src/version.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
/**
|
||||
* CloudSearch 应用版本号
|
||||
*
|
||||
* 版本管理规则:
|
||||
* - 每次小优化/修复:patch +1 (0.0.1 → 0.0.2)
|
||||
* - 20 次 patch 后:minor +1, patch 归零 (0.1.0)
|
||||
* - 10 次 minor 后:major +1, minor 归零 (1.0.0)
|
||||
*
|
||||
* 修改此文件的同时请同步更新后端 package.json 中的 version 字段。
|
||||
*/
|
||||
|
||||
export const APP_VERSION = "0.0.2";
|
||||
37
packages/backend/src/video/video.service.ts
Executable file
37
packages/backend/src/video/video.service.ts
Executable file
@@ -0,0 +1,37 @@
|
||||
// Native fetch available in Node 20+
|
||||
import config from '../config';
|
||||
|
||||
export interface VideoInfo {
|
||||
title: string;
|
||||
coverUrl: string;
|
||||
videoUrl: string;
|
||||
author: string;
|
||||
platform: string;
|
||||
duration?: string;
|
||||
}
|
||||
|
||||
export async function parseVideo(url: string): Promise<VideoInfo> {
|
||||
const apiUrl = `${config.videoParserUrl}/parse`;
|
||||
|
||||
const response = await fetch(apiUrl, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ url }),
|
||||
signal: AbortSignal.timeout(30000),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Video parser API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = await response.json() as any;
|
||||
|
||||
return {
|
||||
title: data.title || '',
|
||||
coverUrl: data.coverUrl || data.cover || '',
|
||||
videoUrl: data.videoUrl || data.url || data.video || '',
|
||||
author: data.author || data.nickname || '',
|
||||
platform: data.platform || '',
|
||||
duration: data.duration || '',
|
||||
};
|
||||
}
|
||||
19
packages/backend/tsconfig.json
Executable file
19
packages/backend/tsconfig.json
Executable file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "commonjs",
|
||||
"lib": ["ES2022"],
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"resolveJsonModule": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"sourceMap": true
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
Reference in New Issue
Block a user