minio断点续传和分片上传

2023-12-18 14:33:52
<template>
  <div class="home">
    <template>
    <el-card style="width: 80%; margin: 80px auto" header="文件分片上传">
        <el-upload
            class="upload-demo"
            drag
            action="/"
            multiple
            :http-request="handleHttpRequest"
            :on-remove="handleRemoveFile">
            <el-icon class="el-icon--upload"><upload-filled /></el-icon>
            <div class="el-upload__text">
                请拖拽文件到此处或 <em>点击此处上传</em>
            </div>
        </el-upload>
    </el-card>

</template>
  </div>
</template>
<script>
import md5 from "./../lib/md5";
import { taskInfo, initTask, preSignUrl, merge } from './../lib/api';
import Queue from 'promise-queue-plus';
import axios from 'axios'
export default {
  name: 'HomeView',
  data(){
    return {
       fileUploadChunkQueue:{}
    }
  },
  methods:{
    /**
 * 获取一个上传任务,没有则初始化一个
 */
    async getTaskInfo(file){
      let task;
    const identifier = await md5(file)
    const { code, data, msg } = await taskInfo(identifier)
    if (code === 200000) {
        task = data
        if (!task) {
            const initTaskData = {
                identifier,
                fileName: file.name,
                totalSize: file.size,
                chunkSize: 5 * 1024 * 1024
            }
            const { code, data, msg } = await initTask(initTaskData)
            if (code === 200000) {
                task = data
            } else {
                ElNotification.error({
                    title: '文件上传错误',
                    message: msg
                })
            }
        }
    } else {
        ElNotification.error({
            title: '文件上传错误',
            message: msg
        })
    }
    return task
    },

/**
 * 上传逻辑处理,如果文件已经上传完成(完成分块合并操作),则不会进入到此方法中
 */
 handleUpload(file, taskRecord, options){
  let lastUploadedSize = 0; // 上次断点续传时上传的总大小
    let uploadedSize = 0 // 已上传的大小
    const totalSize = file.size || 0 // 文件总大小
    let startMs = new Date().getTime(); // 开始上传的时间
    const { exitPartList, chunkSize, chunkNum, fileIdentifier } = taskRecord
    // 获取从开始上传到现在的平均速度(byte/s)
    const getSpeed = () => {
        // 已上传的总大小 - 上次上传的总大小(断点续传)= 本次上传的总大小(byte)
        const intervalSize = uploadedSize - lastUploadedSize
        const nowMs = new Date().getTime()
        // 时间间隔(s)
        const intervalTime = (nowMs - startMs) / 1000
        return intervalSize / intervalTime
    }
    const uploadNext = async (partNumber) => {
        const start = new Number(chunkSize) * (partNumber - 1)
        const end = start + new Number(chunkSize)
        const blob = file.slice(start, end)
        const { code, data, msg } = await preSignUrl({ identifier: fileIdentifier, partNumber: partNumber} )
        if (code === 200000 && data) {
            await axios.request({
                url: data,
                method: 'PUT',
                data: blob,
                headers: {'Content-Type': 'application/octet-stream'}
            })
            return Promise.resolve({ partNumber: partNumber, uploadedSize: blob.size })
        }
        return Promise.reject(`分片${partNumber}, 获取上传地址失败`)
    }

    /**
     * 更新上传进度
     * @param increment 为已上传的进度增加的字节量
     */
    const updateProcess = (increment) => {
        increment = new Number(increment)
        const { onProgress } = options
        let factor = 1000; // 每次增加1000 byte
        let from = 0;
        // 通过循环一点一点的增加进度
        while (from <= increment) {
            from += factor
            uploadedSize += factor
            const percent = Math.round(uploadedSize / totalSize * 100).toFixed(2);
            onProgress({percent: percent})
        }

        const speed = getSpeed();
        const remainingTime = speed != 0 ? Math.ceil((totalSize - uploadedSize) / speed) + 's' : '未知'
        console.log('剩余大小:', (totalSize - uploadedSize) / 1024 / 1024, 'mb');
        console.log('当前速度:', (speed / 1024 / 1024).toFixed(2), 'mbps');
        console.log('预计完成:', remainingTime);
    }


    return new Promise(resolve => {
        const failArr = [];
        const queue = Queue(5, {
            "retry": 3,               //Number of retries
            "retryIsJump": false,     //retry now?
            "workReject": function(reason,queue){
                failArr.push(reason)
            },
            "queueEnd": function(queue){
                resolve(failArr);
            }
        })
        this.fileUploadChunkQueue[file.uid] = queue
        for (let partNumber = 1; partNumber <= chunkNum; partNumber++) {
            const exitPart = (exitPartList || []).find(exitPart => exitPart.partNumber == partNumber)
            if (exitPart) {
                // 分片已上传完成,累计到上传完成的总额中,同时记录一下上次断点上传的大小,用于计算上传速度
                lastUploadedSize += new Number(exitPart.size)
                updateProcess(exitPart.size)
            } else {
                queue.push(() => uploadNext(partNumber).then(res => {
                    // 单片文件上传完成再更新上传进度
                    updateProcess(res.uploadedSize)
                }))
            }
        }
        if (queue.getLength() == 0) {
            // 所有分片都上传完,但未合并,直接return出去,进行合并操作
            resolve(failArr);
            return;
        }
        queue.start()
    })
 },

/**
 * el-upload 自定义上传方法入口
 */

 async handleHttpRequest(options){
  const file = options.file
    const task = await this.getTaskInfo(file)
    if (task) {
        const { finished, path, taskRecord } = task
        const { fileIdentifier: identifier } = taskRecord
        if (finished) {
            return path
        } else {
            const errorList = await this.handleUpload(file, taskRecord, options)
            if (errorList.length > 0) {
                ElNotification.error({
                    title: '文件上传错误',
                    message: '部分分片上次失败,请尝试重新上传文件'
                })
                return;
            }
            const { code, data, msg } = await merge(identifier)
            if (code === 200000) {
                return path;
            } else {
                ElNotification.error({
                    title: '文件上传错误',
                    message: msg
                })
            }
        }
    } else {
        ElNotification.error({
            title: '文件上传错误',
            message: '获取上传任务失败'
        })
    }
 },

/**
 * 移除文件列表中的文件
 * 如果文件存在上传队列任务对象,则停止该队列的任务
 */

 handleRemoveFile(uploadFile, uploadFiles){
  const queueObject = this.fileUploadChunkQueue[uploadFile.uid]
    if (queueObject) {
        queueObject.stop()
        this.fileUploadChunkQueue[uploadFile.uid] = undefined
    }
 }


    
  }
}
</script>

md5.js

import SparkMD5 from 'spark-md5'
const DEFAULT_SIZE = 20 * 1024 * 1024
const md5 = (file, chunkSize = DEFAULT_SIZE) => {
    return new Promise((resolve, reject) => {
        const startMs = new Date().getTime();
        let blobSlice =
            File.prototype.slice ||
            File.prototype.mozSlice ||
            File.prototype.webkitSlice;
        let chunks = Math.ceil(file.size / chunkSize);
        let currentChunk = 0;
        let spark = new SparkMD5.ArrayBuffer(); //追加数组缓冲区。
        let fileReader = new FileReader(); //读取文件
        fileReader.onload = function (e) {
            spark.append(e.target.result);
            currentChunk++;
            if (currentChunk < chunks) {
                loadNext();
            } else {
                const md5 = spark.end(); //完成md5的计算,返回十六进制结果。
                console.log('文件md5计算结束,总耗时:', (new Date().getTime() - startMs) / 1000, 's')
                resolve(md5);
            }
        };
        fileReader.onerror = function (e) {
            reject(e);
        };

        function loadNext() {
            console.log('当前part number:', currentChunk, '总块数:', chunks);
            let start = currentChunk * chunkSize;
            let end = start + chunkSize;
            (end > file.size) && (end = file.size);
            fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
        }
        loadNext();
    });
}

export default md5

api.js

import axios from 'axios'
import axiosExtra from 'axios-extra'
const baseUrl = 'http://localhost:3080'

const http = axios.create({
    baseURL: baseUrl
})

const httpExtra = axiosExtra.create({
    maxConcurrent: 5, //并发为1
    queueOptions: {
        retry: 3, //请求失败时,最多会重试3次
        retryIsJump: false //是否立即重试, 否则将在请求队列尾部插入重试请求
    }
})

http.interceptors.response.use(response => {
    return response.data
})

/**
 * 根据文件的md5获取未上传完的任务
 * @param identifier 文件md5
 * @returns {Promise<AxiosResponse<any>>}
 */
const taskInfo = (identifier) => {
    return http.get(`/v1/minio/tasks/${identifier}`)
}

/**
 * 初始化一个分片上传任务
 * @param identifier 文件md5
 * @param fileName 文件名称
 * @param totalSize 文件大小
 * @param chunkSize 分块大小
 * @returns {Promise<AxiosResponse<any>>}
 */
const initTask = ({ identifier, fileName, totalSize, chunkSize }) => {
    return http.post('/v1/minio/tasks', {identifier, fileName, totalSize, chunkSize})
}

/**
 * 获取预签名分片上传地址
 * @param identifier 文件md5
 * @param partNumber 分片编号
 * @returns {Promise<AxiosResponse<any>>}
 */
const preSignUrl = ({ identifier, partNumber }) => {
    return http.get(`/v1/minio/tasks/${identifier}/${partNumber}`)
}

/**
 * 合并分片
 * @param identifier
 * @returns {Promise<AxiosResponse<any>>}
 */
const merge = (identifier) => {
    return http.post(`/v1/minio/tasks/merge/${identifier}`)
}

export {
    taskInfo,
    initTask,
    preSignUrl,
    merge,
    httpExtra
}

文章来源:https://blog.csdn.net/weixin_44692055/article/details/135061060
本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。