class FileChunkUploader { constructor ( file, options = { } ) { this . sourceFile = file; this . config = { chunkSize : options. chunkSize || 5 << 20 , maxConcurrency : options. threads || 3 , hashWorkerPath : options. hashWorkerPath || 'hash-worker.js' } ; this . uploadState = { totalChunks : Math. ceil ( file. size / this . config. chunkSize) , uploadedChunkIndices : new Set ( ) , fileHash : null , uploadSessionId : this . _generateUniqueId ( ) } ; } async startUpload ( ) { try { this . uploadState. fileHash = await this . _calculateFileHash ( ) ; if ( await this . _checkForInstantUpload ( ) ) { return { success : true , skipped : true , reason : '文件已存在' } ; } await this . _fetchUploadProgress ( ) ; return await this . _uploadAllChunks ( ) ; } catch ( error) { console. error ( '上传失败:' , error) ; throw new UploadError ( 'UPLOAD_FAILED' , { cause : error } ) ; } } async _calculateFileHash ( ) { return new Promise ( ( resolve ) => { const worker = new Worker ( this . config. hashWorkerPath) ; worker. postMessage ( { file : this . sourceFile, operation : 'hash' } ) ; worker. onmessage = ( event ) => { if ( event. data. progress) { this . _updateProgress ( event. data. progress) ; } else if ( event. data. hash) { resolve ( event. data. hash) ; worker. terminate ( ) ; } } ; } ) ; } async _checkForInstantUpload ( ) { const response = await fetch ( ` /api/files/check?hash= ${ this . uploadState. fileHash} ` ) ; const { exists } = await response. json ( ) ; return exists; } async _fetchUploadProgress ( ) { try { const response = await fetch ( ` /api/uploads/progress?sessionId= ${ this . uploadState. uploadSessionId} ` ) ; const { uploadedChunks } = await response. json ( ) ; uploadedChunks. forEach ( index => { this . uploadState. uploadedChunkIndices. add ( index) ; } ) ; } catch ( error) { console. warn ( '获取上传进度失败,将重新上传所有分片' , error) ; } } async _uploadAllChunks ( ) { const pendingChunks = this . _getPendingChunks ( ) ; await this . _uploadWithConcurrencyControl ( pendingChunks) ; return this . _finalizeUpload ( ) ; } _getPendingChunks ( ) { return Array. from ( { length : this . uploadState. totalChunks } , ( _, index ) => index) . filter ( index => ! this . uploadState. uploadedChunkIndices. has ( index) ) ; } async _uploadWithConcurrencyControl ( chunkIndices ) { const activeUploads = new Set ( ) ; for ( const chunkIndex of chunkIndices) { const uploadPromise = this . _uploadSingleChunk ( chunkIndex) . finally ( ( ) => activeUploads. delete ( uploadPromise) ) ; activeUploads. add ( uploadPromise) ; if ( activeUploads. size >= this . config. maxConcurrency) { await Promise. race ( activeUploads) ; } } await Promise. all ( activeUploads) ; } async _uploadSingleChunk ( chunkIndex, maxRetries = 3 ) { let attempt = 0 ; while ( attempt < maxRetries) { try { const chunkData = this . _getChunkData ( chunkIndex) ; await this . _sendChunkToServer ( chunkIndex, chunkData) ; this . uploadState. uploadedChunkIndices. add ( chunkIndex) ; this . _saveProgressLocally ( ) ; return ; } catch ( error) { attempt++ ; if ( attempt >= maxRetries) { throw new UploadError ( 'CHUNK_UPLOAD_FAILED' , { chunkIndex, attempts : maxRetries, error} ) ; } } } } _getChunkData ( chunkIndex ) { const start = chunkIndex * this . config. chunkSize; const end = Math. min ( start + this . config. chunkSize, this . sourceFile. size) ; return this . sourceFile. slice ( start, end) ; } async _sendChunkToServer ( chunkIndex, chunkData ) { const formData = new FormData ( ) ; formData. append ( 'file' , chunkData) ; formData. append ( 'chunkIndex' , chunkIndex) ; formData. append ( 'totalChunks' , this . uploadState. totalChunks) ; formData. append ( 'fileHash' , this . uploadState. fileHash) ; formData. append ( 'sessionId' , this . uploadState. uploadSessionId) ; const response = await fetch ( '/api/uploads/chunk' , { method : 'POST' , body : formData} ) ; if ( ! response. ok) { throw new Error ( ` 服务器返回错误: ${ response. status} ` ) ; } } async _finalizeUpload ( ) { const response = await fetch ( '/api/uploads/complete' , { method : 'POST' , headers : { 'Content-Type' : 'application/json' } , body : JSON . stringify ( { fileHash : this . uploadState. fileHash, sessionId : this . uploadState. uploadSessionId, totalChunks : this . uploadState. totalChunks} ) } ) ; if ( ! response. ok) { throw new Error ( '合并分片失败' ) ; } return { success : true } ; } _generateUniqueId ( ) { return Math. random ( ) . toString ( 36 ) . substring ( 2 ) + Date. now ( ) . toString ( 36 ) ; } _saveProgressLocally ( ) { const progressData = { sessionId : this . uploadState. uploadSessionId, uploadedChunks : Array. from ( this . uploadState. uploadedChunkIndices) , timestamp : Date. now ( ) } ; localStorage. setItem ( ` uploadProgress_ ${ this . uploadState. fileHash} ` , JSON . stringify ( progressData) ) ; }
}
class UploadError extends Error { constructor ( code, details = { } ) { super ( ) ; this . name = 'UploadError' ; this . code = code; this . details = details; this . message = this . _getErrorMessage ( ) ; } _getErrorMessage ( ) { const messages = { 'UPLOAD_FAILED' : '文件上传失败' , 'CHUNK_UPLOAD_FAILED' : ` 分片上传失败 (尝试次数: ${ this . details. attempts} ) ` , 'NETWORK_ERROR' : '网络连接出现问题' } ; return messages[ this . code] || '发生未知错误' ; }
}
self. importScripts ( 'spark-md5.min.js' ) ;
self. onmessage = async ( event ) => { const file = event. data. file; const chunkSize = 2 * 1024 * 1024 ; const totalChunks = Math. ceil ( file. size / chunkSize) ; const hasher = new self. SparkMD5. ArrayBuffer ( ) ; for ( let currentChunk = 0 ; currentChunk < totalChunks; currentChunk++ ) { const chunkData = await getFileChunk ( file, currentChunk * chunkSize, chunkSize) ; hasher. append ( chunkData) ; self. postMessage ( { progress : ( currentChunk + 1 ) / totalChunks } ) ; } self. postMessage ( { hash : hasher. end ( ) } ) ;
} ;
function getFileChunk ( file, start, length ) { return new Promise ( ( resolve ) => { const reader = new FileReader ( ) ; reader. onload = ( loadEvent ) => resolve ( loadEvent. target. result) ; reader. readAsArrayBuffer ( file. slice ( start, start + length) ) ; } ) ;
}