chore: initial commit - CloudSearch v0.0.2
This commit is contained in:
323
packages/backend/src/cloud/cloud.service.ts
Normal file
323
packages/backend/src/cloud/cloud.service.ts
Normal file
@@ -0,0 +1,323 @@
|
||||
import { getDb } from '../database/database';
|
||||
import { localTimestamp, formatLocalDateTime } from '../utils/time';
|
||||
import { getSystemConfig } from '../admin/system-config.service';
|
||||
import { QuarkDriver } from './drivers/quark.driver';
|
||||
import { BaiduDriver } from './drivers/baidu.driver';
|
||||
import { CloudConfig, getAndValidateCredential, getActiveCloudConfigs } from './credential.service';
|
||||
import { lookupIpLocation } from './ip-lookup';
|
||||
|
||||
/** In-flight save dedup: prevents concurrent saves of the same URL (race condition fix) */
|
||||
const inFlightSaves = new Map<string, Promise<SaveResult>>();
|
||||
|
||||
export interface SaveResult {
|
||||
success: boolean;
|
||||
shareUrl?: string;
|
||||
share_url?: string;
|
||||
sharePwd?: string;
|
||||
folderName?: string;
|
||||
message: string;
|
||||
file_count?: number;
|
||||
folder_count?: number;
|
||||
duration_ms?: number;
|
||||
}
|
||||
|
||||
export interface SaveRecord {
|
||||
id: number;
|
||||
source_type: string;
|
||||
source_title: string | null;
|
||||
source_url: string;
|
||||
target_cloud: string;
|
||||
share_url: string | null;
|
||||
share_pwd: string | null;
|
||||
file_size: string | null;
|
||||
file_count: number;
|
||||
folder_count: number;
|
||||
duration_ms: number;
|
||||
status: string;
|
||||
error_message: string | null;
|
||||
folder_name: string | null;
|
||||
original_folder_name: string | null;
|
||||
ip_address: string | null;
|
||||
ip_location: string | null;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
/** Core save logic extracted so inFlight dedup can wrap it */
|
||||
async function doSaveFromShare(shareUrl: string, cloudType: string, sourceTitle?: string, ipAddress?: string): Promise<SaveResult> {
|
||||
const db = getDb();
|
||||
const ipLocation = await lookupIpLocation(ipAddress || '');
|
||||
|
||||
// ── Short-term dedup: prevent duplicate saves of the same URL within 60 seconds ──
|
||||
const DEDUP_WINDOW_SEC = 60;
|
||||
let dedupCutoff = '';
|
||||
try {
|
||||
const recentCutoff = db.prepare(
|
||||
`SELECT datetime('now','localtime', '-${DEDUP_WINDOW_SEC} seconds') as cutoff`
|
||||
).get() as { cutoff: string };
|
||||
dedupCutoff = recentCutoff.cutoff;
|
||||
|
||||
const recentRecord = db.prepare(
|
||||
`SELECT share_url, share_pwd, status, error_message, folder_name, original_folder_name FROM save_records
|
||||
WHERE source_url = ? AND created_at >= ?
|
||||
ORDER BY created_at DESC LIMIT 1`
|
||||
).get(shareUrl, dedupCutoff) as {
|
||||
share_url: string | null; share_pwd: string | null; status: string;
|
||||
error_message: string | null; folder_name: string | null; original_folder_name: string | null;
|
||||
} | undefined;
|
||||
|
||||
if (recentRecord) {
|
||||
const alreadySaved = recentRecord.status === 'success' || recentRecord.status === 'reused';
|
||||
if (alreadySaved && recentRecord.share_url) {
|
||||
console.log(`[Share] 🛡️ Dedup: ${shareUrl} was saved ${DEDUP_WINDOW_SEC}s ago (status=${recentRecord.status}), returning existing share link`);
|
||||
db.prepare(
|
||||
`INSERT INTO save_records (source_type, source_title, source_url, target_cloud, share_url, share_pwd, file_size, file_count, folder_count, duration_ms, status, error_message, folder_name, original_folder_name, ip_address, ip_location, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
||||
).run(
|
||||
cloudType, sourceTitle || null, shareUrl, cloudType,
|
||||
recentRecord.share_url, recentRecord.share_pwd || null,
|
||||
null, 0, 0, 0, 'reused', null,
|
||||
recentRecord.folder_name || null, recentRecord.original_folder_name || null,
|
||||
ipAddress || null, ipLocation, localTimestamp(),
|
||||
);
|
||||
return {
|
||||
success: true,
|
||||
message: `🛡️ 此资源刚在 ${DEDUP_WINDOW_SEC} 秒内转存过,直接返回已有分享链接`,
|
||||
share_url: recentRecord.share_url, shareUrl: recentRecord.share_url,
|
||||
sharePwd: recentRecord.share_pwd || '', folderName: '',
|
||||
file_count: 0, folder_count: 0, duration_ms: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.log(`[Share] Dedup check failed: ${err.message}, proceeding with normal save`);
|
||||
}
|
||||
|
||||
// ── Share link reuse: if same source URL was already saved successfully, validate and reuse ──
|
||||
const reuseEnabled = getSystemConfig('save_reuse_enabled');
|
||||
if (reuseEnabled !== 'false') {
|
||||
try {
|
||||
const existing = db.prepare(
|
||||
`SELECT share_url, share_pwd, folder_name, original_folder_name FROM save_records
|
||||
WHERE source_url = ? AND status IN ('success', 'reused') AND share_url IS NOT NULL AND share_url != ''
|
||||
ORDER BY created_at DESC LIMIT 1`
|
||||
).get(shareUrl) as { share_url: string; share_pwd: string; folder_name: string | null; original_folder_name: string | null } | undefined;
|
||||
|
||||
if (existing?.share_url) {
|
||||
const { LinkValidator } = await import('../validation/link-validator.service');
|
||||
const validator = new LinkValidator();
|
||||
const validation = await validator.validate(existing.share_url, 'quark');
|
||||
if (validation.status === 'valid') {
|
||||
const isFirstReuse = dedupCutoff ? !db.prepare(
|
||||
`SELECT 1 FROM save_records WHERE source_url = ? AND created_at >= ? AND status = 'reused' LIMIT 1`
|
||||
).get(shareUrl, dedupCutoff) : true;
|
||||
const reuseStatus = isFirstReuse ? 'success' : 'reused';
|
||||
const reuseMsg = isFirstReuse
|
||||
? `♻️ 检测到此资源之前已转存过,直接复用已存在的分享链接`
|
||||
: `♻️ 短时间内重复请求,复用已有分享链接`;
|
||||
|
||||
console.log(`[Share] ♻️ Reusing existing share link for ${shareUrl}: ${existing.share_url} (firstReuse=${isFirstReuse})`);
|
||||
db.prepare(
|
||||
`INSERT INTO save_records (source_type, source_title, source_url, target_cloud, share_url, share_pwd, file_size, file_count, folder_count, duration_ms, status, error_message, folder_name, original_folder_name, ip_address, ip_location, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
||||
).run(
|
||||
cloudType, sourceTitle || null, shareUrl, cloudType,
|
||||
existing.share_url, existing.share_pwd || null,
|
||||
null, 0, 0, 0, reuseStatus, null,
|
||||
existing.folder_name || null, existing.original_folder_name || null,
|
||||
ipAddress || null, ipLocation, localTimestamp(),
|
||||
);
|
||||
return {
|
||||
success: true, message: reuseMsg,
|
||||
share_url: existing.share_url, shareUrl: existing.share_url,
|
||||
sharePwd: existing.share_pwd || '', folderName: '',
|
||||
file_count: 0, folder_count: 0, duration_ms: 0,
|
||||
};
|
||||
}
|
||||
console.log(`[Share] Existing share link for ${shareUrl} is invalid/expired, will re-save`);
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.log(`[Share] Link reuse check failed: ${err.message}, proceeding with normal save`);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Unified credential validation ──
|
||||
const credential = await getAndValidateCredential(cloudType);
|
||||
if (!credential.valid || !credential.config) {
|
||||
return { success: false, message: credential.message };
|
||||
}
|
||||
const config = credential.config;
|
||||
|
||||
// ── Check transfer enabled ──
|
||||
if (config.is_transfer_enabled === 0) {
|
||||
return { success: false, message: `${config.nickname || cloudType} 的转存功能已关闭,请先在后台开启` };
|
||||
}
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
let driverResult: { success: boolean; message: string; shareUrl?: string; sharePwd?: string; folderName?: string; fileCount?: number; folderCount?: number; originalFolderName?: string };
|
||||
|
||||
switch (cloudType) {
|
||||
case 'quark': {
|
||||
const driver = new QuarkDriver({ cookie: config.cookie!, nickname: config.nickname });
|
||||
driverResult = await driver.saveFromShare(shareUrl, sourceTitle);
|
||||
break;
|
||||
}
|
||||
case 'baidu': {
|
||||
const driver = new BaiduDriver({ cookie: config.cookie!, nickname: config.nickname });
|
||||
driverResult = await driver.saveFromShare(shareUrl, sourceTitle);
|
||||
break;
|
||||
}
|
||||
case 'aliyun':
|
||||
return { success: false, message: '阿里云盘保存功能暂未实现' };
|
||||
default:
|
||||
return { success: false, message: `暂不支持 ${cloudType} 的保存功能` };
|
||||
}
|
||||
|
||||
const durationMs = Date.now() - startTime;
|
||||
|
||||
if (driverResult.success) {
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET last_used_at = datetime('now','localtime'), total_saves = total_saves + 1, consecutive_failures = 0 WHERE id = ?`
|
||||
).run(config.id);
|
||||
} else if ((driverResult as any).cookieExpired) {
|
||||
// Cookie expired — don't count as failure, user needs to re-login
|
||||
} else {
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET consecutive_failures = consecutive_failures + 1 WHERE id = ?`
|
||||
).run(config.id);
|
||||
}
|
||||
|
||||
db.prepare(
|
||||
`INSERT INTO save_records (source_type, source_title, source_url, target_cloud, share_url, share_pwd, file_size, file_count, folder_count, duration_ms, status, error_message, folder_name, original_folder_name, ip_address, ip_location, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
||||
).run(
|
||||
cloudType, sourceTitle || driverResult.folderName || null, shareUrl, cloudType,
|
||||
driverResult.shareUrl || null, driverResult.sharePwd || null,
|
||||
null, driverResult.fileCount || 0, driverResult.folderCount || 0,
|
||||
durationMs, driverResult.success ? 'success' : 'failed',
|
||||
driverResult.success ? null : driverResult.message,
|
||||
driverResult.folderName || null, driverResult.originalFolderName || null,
|
||||
ipAddress || null, ipLocation, localTimestamp(),
|
||||
);
|
||||
|
||||
return {
|
||||
success: driverResult.success,
|
||||
message: driverResult.message,
|
||||
share_url: driverResult.shareUrl || '',
|
||||
shareUrl: driverResult.shareUrl,
|
||||
sharePwd: (driverResult as any).sharePwd || '',
|
||||
folderName: driverResult.folderName || '',
|
||||
file_count: driverResult.fileCount || 0,
|
||||
folder_count: driverResult.folderCount || 0,
|
||||
duration_ms: durationMs,
|
||||
};
|
||||
} catch (err: any) {
|
||||
const durationMs = Date.now() - startTime;
|
||||
const errorMessage = err.message || 'Failed to save to cloud';
|
||||
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET consecutive_failures = consecutive_failures + 1 WHERE id = ?`
|
||||
).run(config.id);
|
||||
|
||||
db.prepare(
|
||||
`INSERT INTO save_records (source_type, source_url, target_cloud, duration_ms, status, error_message, ip_address, ip_location, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
||||
).run(cloudType, shareUrl, cloudType, durationMs, 'failed', errorMessage, ipAddress || null, ipLocation, localTimestamp());
|
||||
|
||||
return { success: false, message: errorMessage };
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveFromShare(shareUrl: string, cloudType: string, sourceTitle?: string, ipAddress?: string): Promise<SaveResult> {
|
||||
const key = `${cloudType}:${shareUrl}`;
|
||||
|
||||
const inflight = inFlightSaves.get(key);
|
||||
if (inflight) {
|
||||
console.log(`[Share] ⏳ In-flight: ${shareUrl} — another save is already running, awaiting result`);
|
||||
return inflight;
|
||||
}
|
||||
|
||||
const promise = doSaveFromShare(shareUrl, cloudType, sourceTitle, ipAddress);
|
||||
inFlightSaves.set(key, promise);
|
||||
try {
|
||||
return await promise;
|
||||
} finally {
|
||||
inFlightSaves.delete(key);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Save Records ──────────────────────────────────────────────────
|
||||
|
||||
export function getSaveRecords(page: number = 1, pageSize: number = 20, startDate?: string, endDate?: string, status?: string, sourceType?: string, keyword?: string): { total: number; records: SaveRecord[]; summary?: { total: number; success: number; failed: number; reused: number } } {
|
||||
const db = getDb();
|
||||
const offset = (page - 1) * pageSize;
|
||||
const conditions: string[] = [];
|
||||
const params: any[] = [];
|
||||
const summaryConditions: string[] = [];
|
||||
const summaryParams: any[] = [];
|
||||
if (startDate) {
|
||||
conditions.push('created_at >= ?'); params.push(startDate);
|
||||
summaryConditions.push('created_at >= ?'); summaryParams.push(startDate);
|
||||
}
|
||||
if (endDate) {
|
||||
conditions.push('created_at < ?'); params.push(endDate);
|
||||
summaryConditions.push('created_at < ?'); summaryParams.push(endDate);
|
||||
}
|
||||
if (status) { conditions.push('status = ?'); params.push(status); }
|
||||
if (sourceType) {
|
||||
conditions.push('source_type = ?'); params.push(sourceType);
|
||||
summaryConditions.push('source_type = ?'); summaryParams.push(sourceType);
|
||||
}
|
||||
if (keyword) { conditions.push('source_title LIKE ?'); params.push(`%${keyword}%`); }
|
||||
const where = conditions.length > 0 ? 'WHERE ' + conditions.join(' AND ') : '';
|
||||
const total = (db.prepare(`SELECT COUNT(*) as count FROM save_records ${where}`).get(...params) as any).count;
|
||||
const records = db.prepare(
|
||||
`SELECT * FROM save_records ${where} ORDER BY created_at DESC LIMIT ? OFFSET ?`
|
||||
).all(...params, pageSize, offset) as SaveRecord[];
|
||||
|
||||
const summaryWhere = summaryConditions.length > 0 ? 'WHERE ' + summaryConditions.join(' AND ') : '';
|
||||
const summaryRows = db.prepare(
|
||||
`SELECT status, COUNT(*) as cnt FROM save_records ${summaryWhere} GROUP BY status`
|
||||
).all(...summaryParams) as { status: string; cnt: number }[];
|
||||
let sumTotal = 0, sumSuccess = 0, sumFailed = 0, sumReused = 0;
|
||||
for (const r of summaryRows) {
|
||||
sumTotal += r.cnt;
|
||||
if (r.status === 'success') sumSuccess = r.cnt;
|
||||
else if (r.status === 'failed') sumFailed = r.cnt;
|
||||
else if (r.status === 'reused') sumReused = r.cnt;
|
||||
}
|
||||
const summary = { total: sumTotal, success: sumSuccess, failed: sumFailed, reused: sumReused };
|
||||
|
||||
return { total, records, summary };
|
||||
}
|
||||
|
||||
export function cleanupOldSaveRecords(): void {
|
||||
const db = getDb();
|
||||
const cutoff = formatLocalDateTime(new Date(Date.now() - 60 * 24 * 60 * 60 * 1000));
|
||||
const deleted = db.prepare('DELETE FROM save_records WHERE created_at < ?').run(cutoff);
|
||||
console.log(`[Cleanup] Deleted ${deleted.changes} save records older than 60 days (before ${cutoff})`);
|
||||
}
|
||||
|
||||
// ── Storage Refresh ───────────────────────────────────────────────
|
||||
|
||||
export async function refreshAllStorageInfo(): Promise<void> {
|
||||
const configs = getActiveCloudConfigs().filter(c => c.cloud_type === 'quark' && c.cookie);
|
||||
if (configs.length === 0) return;
|
||||
|
||||
for (const cfg of configs) {
|
||||
try {
|
||||
const { QuarkDriver } = require('./drivers/quark.driver');
|
||||
const driver = new QuarkDriver({ cookie: cfg.cookie, nickname: cfg.nickname });
|
||||
const storage = await driver.getStorageInfo();
|
||||
if (storage.totalBytes > 0 || storage.usedBytes > 0) {
|
||||
const db = getDb();
|
||||
db.prepare(
|
||||
`UPDATE cloud_configs SET storage_used = ?, storage_total = ? WHERE id = ?`
|
||||
).run(storage.used, storage.total, cfg.id);
|
||||
}
|
||||
} catch (err: any) {
|
||||
console.error(`[Storage] Failed to refresh quark#${cfg.id}:`, err.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user