Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

大文件上传 #42

Open
mtonhuang opened this issue Feb 23, 2024 · 0 comments
Open

大文件上传 #42

mtonhuang opened this issue Feb 23, 2024 · 0 comments

Comments

@mtonhuang
Copy link
Owner

mtonhuang commented Feb 23, 2024

<template>
  <input type="file" @change="handleFileSelect" />
  <div>{{ uploadStatus }}</div>
  <progress v-if="uploadProgress > 0" max="100" :value="uploadProgress">{{ uploadProgress }}%</progress>
  <button v-if="isUploading" @click="handleCancelUpload">Cancel Upload</button>
</template>

<script>
import { ref } from "vue";
import axios from "axios";
import SparkMD5 from "spark-md5";

export default {
  setup() {
    const UPLOAD_ENDPOINT = "your_backend_endpoint";
    const CHUNK_SIZE = 5 * 1024 * 1024; // 5 MB
    const uploadStatus = ref("");
    const uploadProgress = ref(0);
    const isUploading = ref(false);
    const progress = ref({});
    const fileHash = ref("");

    // 文件选择处理
    const handleFileSelect = async (event) => {
      isUploading.value = true;
      const file = event.target.files[0];
      if (!file) {
        uploadStatus.value = "No file selected.";
        return;
      }
      fileHash.value = await getFileHash(file);
      progress.value = getStoredUploadProgress(fileHash.value) || {};
      // 开始上传逻辑
      uploadStatus.value = "Start uploading...";
      await uploadLargeFile(file);
    };

    // 上传分片
    const uploadChunk = (chunk, chunkIndex, totalChunks) => {
      const formData = new FormData();
      // 数据包装
      formData.append("chunk", chunk);
      formData.append("hash", fileHash.value + '-' + chunkIndex);
      formData.append("name", fileHash.value); // 可以传递原文件名等额外信息

      return axios.post(UPLOAD_ENDPOINT, formData, {
        onUploadProgress: (progressEvent) => {
          const progressPercentage = parseInt((progressEvent.loaded / progressEvent.total) * 100);
          uploadStatus.value = `Uploading chunk ${chunkIndex + 1}/${totalChunks}: ${progressPercentage}%`;
        }
      });
    };

    // 通知服务器合并文件
    const notifyServerToMerge = async () => {
      uploadStatus.value = "Uploading done. Asking server to merge chunks.";
      const response = await axios.post(`${UPLOAD_ENDPOINT}/merge`, {
        name: fileHash.value,
        size: CHUNK_SIZE,
      });
      uploadStatus.value = response.data.message;
      cleanupProgress(fileHash.value); // 清理本地进度存储
    };

    // 大文件上传逻辑
    const uploadLargeFile = async (file) => {
      const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
      let promises = [];
      for (let chunkIndex = 0; chunkIndex < totalChunks; chunkIndex++) {
        if (progress.value[chunkIndex]) { // 如果已上传,忽略
          continue;
        }
        const chunk = file.slice(chunkIndex * CHUNK_SIZE, ((chunkIndex + 1) * CHUNK_SIZE > file.size) ? file.size : (chunkIndex + 1) * CHUNK_SIZE);
        promises.push(uploadChunk(chunk, chunkIndex, totalChunks));
        if (promises.length >= 4 || chunkIndex === totalChunks - 1) { // 最多并发上传4个
          try {
            // 等待一批分片上传完成
            await Promise.all(promises);
            updateProgress(chunkIndex); // 更新进度
          } catch (e) {
            uploadStatus.value = `Failed to upload chunk ${chunkIndex}. ${e}`;
            break;
          }
          promises = [];
        }
      }

      if (Object.keys(progress.value).length === totalChunks) {
        await notifyServerToMerge();
        isUploading.value = false;
      }
    };

    // 更新进度信息
    const updateProgress = (chunkIndex) => {
      progress.value[chunkIndex] = true;
      storeUploadProgress(fileHash.value, progress.value);
      calculateTotalProgress();
    };

    // 计算整体进度
    const calculateTotalProgress = () => {
      const uploadedChunks = Object.keys(progress.value).length;
      const totalChunks = Math.ceil(fileSize / CHUNK_SIZE);
      uploadProgress.value = Math.floor((uploadedChunks / totalChunks) * 100);
    };

    // 存储上传进度到本地
    const storeUploadProgress = (fileHash, progress) => {
      localStorage.setItem(fileHash, JSON.stringify(progress));
    };

    // 清理本地存储的上传进度
    const cleanupProgress = (fileHash) => {
      localStorage.removeItem(fileHash);
      progress.value = {};
      uploadProgress.value = 0;
    };

    // 获取上传进度信息
    const getStoredUploadProgress = (fileHash) => {
      const progressJSON = localStorage.getItem(fileHash);
      return progressJSON ? JSON.parse(progressJSON) : null;
    };

    // 取消上传处理
    const handleCancelUpload = () => {
      // 由于文件可能有许多分片存储在不同的请求中上传,
      // 我们需要确保我们能够取消每一个进行中的Axios请求。
      // 我们需要一种方法来跟踪所有这些取消令牌,并在需要时使用它们。

      // 假设我们正在维护一个cancelTokens数组,它存储着只有当上传正在进行时才会创建的Axios取消令牌
      cancelTokens.forEach(cancelToken => {
        cancelToken.cancel("Upload cancelled by the user.");
      });

      // 上传状态更新并清理
      isUploading.value = false;
      uploadStatus.value = "Upload cancelled.";
      uploadProgress.value = 0;
      // 还可以清理所有与上传相关的数据,例如进度信息等
      cleanupProgress(fileHash.value);
    };

    // 获取文件哈希值
    const getFileHash = (file) => {
      return new Promise((resolve, reject) => {
        const fileReader = new FileReader();
        const spark = new SparkMD5.ArrayBuffer();
        const slice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
        const chunkSize = 2097152; // 以2MB为单位进行分块(自行调整大小)
        const chunks = Math.ceil(file.size / chunkSize);
        let currentChunk = 0;

        fileReader.onload = (e) => {
          spark.append(e.target.result); // Append array buffer
          currentChunk++;

          if (currentChunk < chunks) {
            loadNext();
          } else {
            const result = spark.end();
            resolve(result);
          }
        };

        fileReader.onerror = () => {
          reject(new Error('Unable to read the file.'));
        };

        function loadNext() {
          const start = currentChunk * chunkSize;
          const end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;
          fileReader.readAsArrayBuffer(slice.call(file, start, end));
        }

        loadNext();
      });
    };

    return {
      handleFileSelect,
      uploadStatus,
      uploadProgress,
      isUploading,
      handleCancelUpload,
    };
  },
};
</script>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Projects
None yet
Development

No branches or pull requests

1 participant