上传文件路由:
var express = require('express');
var router = express.Router();
const multer = require('multer');
const fs = require('fs');
const path = require('path');// 确保上传目录存在
const uploadDir = path.join(__dirname, '../backend/uploads');
const tempDir = path.join(uploadDir, 'temp');
if (!fs.existsSync(uploadDir)) {fs.mkdirSync(uploadDir, { recursive: true });
}
if (!fs.existsSync(tempDir)) {fs.mkdirSync(tempDir, { recursive: true });
}// 创建文件哈希存储
const hashFilePath = path.join(__dirname, '../backend/fileHashes.json');
let fileHashes = {};try {if (fs.existsSync(hashFilePath)) {const data = fs.readFileSync(hashFilePath, 'utf8');fileHashes = JSON.parse(data || '{}');}
} catch (err) {console.error('Error reading hash file:', err);
}// 保存文件哈希
function saveFileHashes() {fs.writeFileSync(hashFilePath, JSON.stringify(fileHashes, null, 2), 'utf8');
}// 配置Multer进行切片上传 - 修复:使用查询参数
const chunkStorage = multer.diskStorage({destination: (req, file, cb) => {// 从查询参数中获取fileHashconst fileHash = req.query.fileHash;if (!fileHash) return cb(new Error('Missing file hash'));const chunkDir = path.join(tempDir, fileHash);if (!fs.existsSync(chunkDir)) {fs.mkdirSync(chunkDir, { recursive: true });}cb(null, chunkDir);},filename: (req, file, cb) => {// 从查询参数中获取chunkIndexconst chunkIndex = req.query.chunkIndex;cb(null, `chunk-${chunkIndex}`);},
});const uploadChunk = multer({ storage: chunkStorage });// 文件检查端点
router.post('/check', express.json(), (req, res) => {const { fileName, fileSize, fileHash, algorithm = 'MD5' } = req.body;if (!fileHash) {return res.status(400).json({exists: false,message: '缺少文件哈希值',});}// 检查文件哈希是否已存在if (fileHashes[fileHash]) {return res.json({exists: true,message: `文件已存在: ${fileHashes[fileHash].fileName}`,});}res.json({exists: false,message: '文件不存在,可以上传',});
});// 检查切片端点
router.post('/checkChunks', express.json(), (req, res) => {const { fileHash, totalChunks } = req.body;if (!fileHash) {return res.status(400).json({message: '缺少文件哈希值',});}const chunkDir = path.join(tempDir, fileHash);const uploadedChunks = [];if (fs.existsSync(chunkDir)) {const files = fs.readdirSync(chunkDir);files.forEach((file) => {if (file.startsWith('chunk-')) {const chunkIndex = parseInt(file.split('-')[1]);if (!isNaN(chunkIndex)) {uploadedChunks.push(chunkIndex);}}});}res.json({uploadedChunks,message: `已找到 ${uploadedChunks.length}/${totalChunks} 个切片`,});
});// 切片上传端点 - 增加完整性检查
router.post('/uploadChunk', uploadChunk.single('file'), (req, res) => {if (!req.file) {return res.status(400).json({success: false,message: '没有切片被上传',});}// 从查询参数中获取值const fileHash = req.query.fileHash;const chunkIndex = req.query.chunkIndex;const expectedSize = parseInt(req.query.chunkSize);if (!fileHash || !chunkIndex || isNaN(expectedSize)) {// 清理无效上传try {if (req.file.path) {fs.unlinkSync(req.file.path);}} catch (err) {console.error('删除无效切片失败:', err);}return res.status(400).json({success: false,message: '缺少必要参数',});}try {// 验证切片大小const stats = fs.statSync(req.file.path);if (stats.size !== expectedSize) {fs.unlinkSync(req.file.path);return res.status(400).json({success: false,message: `切片大小不匹配: 预期 ${expectedSize} 字节, 实际 ${stats.size} 字节`,expectedSize,actualSize: stats.size,});}res.json({success: true,message: '切片上传成功',chunkIndex: parseInt(chunkIndex),fileHash,});} catch (err) {console.error(`切片验证失败: ${req.file.path}`, err);try {if (fs.existsSync(req.file.path)) {fs.unlinkSync(req.file.path);}} catch (cleanupErr) {console.error('删除切片失败:', cleanupErr);}res.status(500).json({success: false,message: '切片验证失败',error: err.message,});}
});// 合并切片端点
router.post('/merge', express.json(), (req, res) => {const { fileHash, fileName, totalChunks } = req.body;if (!fileHash || !fileName || totalChunks === undefined) {return res.status(400).json({success: false,message: '缺少必要参数',});}const chunkDir = path.join(tempDir, fileHash);const mergedFilePath = path.join(uploadDir, `${fileHash}-${fileName}`);// 验证所有切片是否都存在let allChunksExist = true;for (let i = 0; i < totalChunks; i++) {const chunkPath = path.join(chunkDir, `chunk-${i}`);if (!fs.existsSync(chunkPath)) {allChunksExist = false;break;}}if (!allChunksExist) {return res.status(400).json({success: false,message: '部分切片缺失,无法合并',});}// 合并文件try {const writeStream = fs.createWriteStream(mergedFilePath);const mergeChunks = (index) => {if (index >= totalChunks) {writeStream.end(() => {// 合并完成后删除临时目录fs.rm(chunkDir, { recursive: true }, (err) => {if (err) console.error('删除临时目录失败:', err);// 记录文件信息const stats = fs.statSync(mergedFilePath);fileHashes[fileHash] = {fileName: fileName,filePath: mergedFilePath,fileSize: stats.size,uploadDate: new Date().toISOString(),hash: fileHash,algorithm: 'MD5',};saveFileHashes();res.json({success: true,message: '文件合并成功',filePath: mergedFilePath,});});});return;}const chunkPath = path.join(chunkDir, `chunk-${index}`);const readStream = fs.createReadStream(chunkPath);readStream.pipe(writeStream, { end: false });readStream.on('end', () => {// 删除已合并的切片fs.unlink(chunkPath, (err) => {if (err) console.error(`删除切片 ${index} 失败:`, err);mergeChunks(index + 1);});});readStream.on('error', (err) => {writeStream.close();console.error(`读取切片 ${index} 失败:`, err);res.status(500).json({success: false,message: '合并文件失败',error: err.message,});});};mergeChunks(0);} catch (err) {console.error('合并文件失败:', err);res.status(500).json({success: false,message: '合并文件失败',error: err.message,});}
});module.exports = router;