Skip to content

Commit

Permalink
feat: 大文件上传示例
Browse files Browse the repository at this point in the history
  • Loading branch information
zh4554 committed Sep 26, 2024
1 parent c7e9241 commit 83a7315
Show file tree
Hide file tree
Showing 6 changed files with 852 additions and 0 deletions.
8 changes: 8 additions & 0 deletions src/router/modules/normal.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,14 @@ const routes: Array<RouteRecordRaw> = [{
title: '文件上传',
},
},
{
path: 'webworker',
component: () => import('@/views/base_comp/file_upload/webworker.vue'),
name: 'webWorker',
meta: {
title: '大文件上传',
},
},
{
path: 'base_table',
component: () => import('@/views/base_comp/base_table/index.vue'),
Expand Down
20 changes: 20 additions & 0 deletions src/views/base_comp/file_upload/file_utils/createChunk.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import SparkMD5 from './spark-md5'
// 创建切片
export function createChunk(file, index, chunkSize) {
return new Promise((resolve) => {
let start = index * chunkSize
let end = start + chunkSize
const spark = new SparkMD5.ArrayBuffer()
const fileReader = new FileReader()
fileReader.onload = (e) => {
spark.append(e.target.result)
resolve({
start,
end,
index,
hash: spark.end(),
})
}
fileReader.readAsArrayBuffer(file.slice(start, end))
})
}
40 changes: 40 additions & 0 deletions src/views/base_comp/file_upload/file_utils/cutFile.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
// 每个分片的大小
const CHUNK_SIZE = 5 * 1024 * 1024
// 获取计算机进程数,最大化利用进程进行处理
const WORK_COUNT = navigator.hardwareConcurrency || 4
export async function cutFile(file: File, chunkSize: number = CHUNK_SIZE) {
return new Promise((resolve, reject) => {
try {
const result: any = []
const chunkCount = Math.ceil(file.size / chunkSize)
// 每一个线程,需要处理的分片数量
const WORK_CHUNK_COUNT = Math.ceil(chunkCount / WORK_COUNT)
let finishCount = 0
for (let i = 0; i < WORK_COUNT; i++) {
const worker = new Worker(new URL('./webworker.js', import.meta.url), {
type: 'module',
})
const startIndex = i * WORK_CHUNK_COUNT
let endIndex = startIndex + WORK_CHUNK_COUNT
if (endIndex > chunkCount) {
endIndex = chunkCount
}
worker.postMessage({ file, chunkSize, startIndex, endIndex })
worker.onmessage = (e) => {
for (let i = startIndex; i < endIndex; i++) {
result[i] = e.data[i - startIndex]
}
worker.terminate() // 执行完成结束线程
finishCount++ // 计算完成数量
// 结束计算,返回结果
if (finishCount === WORK_COUNT) {
resolve(result)
}
}
}
}
catch (error) {
reject(error)
}
})
}
Loading

0 comments on commit 83a7315

Please sign in to comment.