这篇文章主要介绍了Node.js如何实现分片上传的相关知识,内容详细易懂,操作简单快捷,具有一定借鉴价值,相信大家阅读完这篇Node.js如何实现分片上传文章都会有所收获,下面我们一起来看看吧。
大文件上传会消耗大量的时间,而且中途有可能上传失败。这时我们需要前端和后端配合来解决这个问题。
解决步骤:
文件分片,减少每次请求消耗的时间,如果某次请求失败可以单独上传,而不是从头开始
通知服务端合并文件分片
控制并发的请求数量,避免浏览器内存溢出
当因为网络或者其他原因导致某次的请求失败,我们重新发送请求
在JavaScript中,FIle对象是' Blob '对象的子类,该对象包含一个重要的方法slice,通过该方法我们可以这样分割二进制文件:
<!DOCTYPE html> <html> <head> <meta charset="UTF-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <title>Document</title> <script src="https://cdn.bootcdn.net/ajax/libs/axios/0.24.0/axios.min.js"></script> </head> <body> <input type="file" multiple="multiple" id="fileInput" /> <button onclick="SliceUpload()">上传</button> <script> function SliceUpload() { const file = document.getElementById('fileInput').files[0] if (!file) return // 文件分片 let size = 1024 * 50; //50KB 50KB Section size let fileChunks = []; let index = 0; //Section num for (let cur = 0; cur < file.size; cur += size) { fileChunks.push({ hash: index++, chunk: file.slice(cur, cur + size), }); } // 上传分片 const uploadList = fileChunks.map((item, index) => { let formData = new FormData(); formData.append("filename", file.name); formData.append("hash", item.hash); formData.append("chunk", item.chunk); return axios({ method: "post", url: "/upload", data: formData, }); }); await Promise.all(uploadList); // 所有分片上传完成,通知服务器合并分片 await axios({ method: "get", url: "/merge", params: { filename: file.name, }, }); console.log("Upload to complete"); } </script> </body> </html>
如果文件很大,这样切分的分片会很多,浏览器短时间内就会发起大量的请求,可能会导致内存耗尽,所以要进行并发控制。
这里我们结合Promise.race()方法 控制并发请求的数量,避免浏览器内存溢出。
// 加入并发控制 async function SliceUpload() { const file = document.getElementById('fileInput').files[0] if (!file) return // 文件分片 let size = 1024 * 50; //50KB 50KB Section size let fileChunks = []; let index = 0; //Section num for (let cur = 0; cur < file.size; cur += size) { fileChunks.push({ hash: index++, chunk: file.slice(cur, cur + size), }); } let pool = []; //Concurrent pool let max = 3; //Maximum concurrency for (let i = 0; i < fileChunks.length; i++) { let item = fileChunks[i]; let formData = new FormData(); formData.append("filename", file.name); formData.append("hash", item.hash); formData.append("chunk", item.chunk); // 上传分片 let task = axios({ method: "post", url: "/upload", data: formData, }); task.then(() => { // 从并发池中移除已经完成的请求 let index = pool.findIndex((t) => t === task); pool.splice(index); }); // 把请求放入并发池中,如果已经达到最大并发量 pool.push(task); if (pool.length === max) { //All requests are requested complete await Promise.race(pool); } } // 所有分片上传完成,通知服务器合并分片 await axios({ method: "get", url: "/merge", params: { filename: file.name, }, }); console.log("Upload to complete"); }
function SliceUpload() { const file = document.getElementById('fileInput').files[0] if (!file) return // 文件分片 let size = 1024 * 50; // 分片大小设置 let fileChunks = []; let index = 0; // 分片序号 for (let cur = 0; cur < file.size; cur += size) { fileChunks.push({ hash: index++, chunk: file.slice(cur, cur + size), }); } const uploadFileChunks = async function(list){ if(list.length === 0){ // 所有分片上传完成,通知如无 await axios({ method: 'get', url: '/merge', params: { filename: file.name } }); console.log('Upload to complete') return } let pool = [] // 并发池 let max = 3 // 最大并发数 let finish = 0 // 完成数量 let failList = [] // 失败列表 for(let i=0;i<list.length;i++){ let item = list[i] let formData = new FormData() formData.append('filename', file.name) formData.append('hash', item.hash) formData.append('chunk', item.chunk) let task = axios({ method: 'post', url: '/upload', data: formData }) task.then((data)=>{ // 从并发池中移除已经完成的请求 let index = pool.findIndex(t=> t===task) pool.splice(index) }).catch(()=>{ failList.push(item) }).finally(()=>{ finish++ // 如果有失败的重新上传 if(finish===list.length){ uploadFileChunks(failList) } }) pool.push(task) if(pool.length === max){ await Promise.race(pool) } } } uploadFileChunks(fileChunks) }
const express = require('express') const multiparty = require('multiparty') const fs = require('fs') const path = require('path') const { Buffer } = require('buffer') // file path const STATIC_FILES = path.join(__dirname, './static/files') // Temporary path to upload files const STATIC_TEMPORARY = path.join(__dirname, './static/temporary') const server = express() // Static file hosting server.use(express.static(path.join(__dirname, './dist'))) // Interface for uploading slices server.post('/upload', (req, res) => { const form = new multiparty.Form(); form.parse(req, function(err, fields, files) { let filename = fields.filename[0] let hash = fields.hash[0] let chunk = files.chunk[0] let dir = `${STATIC_TEMPORARY}/${filename}` // console.log(filename, hash, chunk) try { if (!fs.existsSync(dir)) fs.mkdirSync(dir) const buffer = fs.readFileSync(chunk.path) const ws = fs.createWriteStream(`${dir}/${hash}`) ws.write(buffer) ws.close() res.send(`${filename}-${hash} Section uploaded successfully`) } catch (error) { console.error(error) res.status(500).send(`${filename}-${hash} Section uploading failed`) } }) }) //Merged slice interface server.get('/merge', async (req, res) => { const { filename } = req.query try { let len = 0 const bufferList = fs.readdirSync(`${STATIC_TEMPORARY}/${filename}`).map((hash,index) => { const buffer = fs.readFileSync(`${STATIC_TEMPORARY}/${filename}/${index}`) len += buffer.length return buffer }); //Merge files const buffer = Buffer.concat(bufferList, len); const ws = fs.createWriteStream(`${STATIC_FILES}/${filename}`) ws.write(buffer); ws.close(); res.send(`Section merge completed`); } catch (error) { console.error(error); } }) server.listen(3000, _ => { console.log('http://localhost:3000/') })
关于“Node.js如何实现分片上传”这篇文章的内容就介绍到这里,感谢各位的阅读!相信大家对“Node.js如何实现分片上传”知识都有一定的了解,大家如果还想学习更多知识,欢迎关注亿速云行业资讯频道。
免责声明:本站发布的内容(图片、视频和文字)以原创、转载和分享为主,文章观点不代表本网站立场,如果涉及侵权请联系站长邮箱:is@yisu.com进行举报,并提供相关证据,一经查实,将立刻删除涉嫌侵权内容。