chunkUploadMD5.js onFileChange(e) { var files = e.target.files || e.dataTransfer.files; if (!files.length)return; this.createChunk(files);},createChunk:function (rfile) { var vm = this; var blobSlice = File.prototype.slice || File.prototype
onFileChange(e) { var files = e.target.files || e.dataTransfer.files; if (!files.length)return; this.createChunk(files); }, createChunk:function (rfile) { var vm = this; var blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice, file = rfile[0], fileName = rfile[0].name, fileSize = rfile[0].size, chunkSize = 2*1024*1024, // Read in chunks of 2MB = 2097152 chunks = Math.ceil(file.size / chunkSize), // clip file to chunks currentChunk = 0, spark = new SparkMD5.ArrayBuffer(), // return hex hash code fileReader = new FileReader(); // create fileReader obj vm.thefilesize = (file.size/1024/1024).toFixed(2); fileReader.onload = function (e) { vm.hashTEXT = ((currentChunk/chunks)*100).toFixed(2); console.log('read chunk on', currentChunk + 1, 'of', chunks,':',((currentChunk/chunks)*100),'%'); spark.append(e.target.result); // Append array buffer currentChunk++; if (currentChunk < chunks) { loadNext(); // Continue the next chunk } else { vm.fileHASH = spark.end(); // finished sparkHash and return hashcode console.log('finished loading 100%'); console.info('computed hash', vm.fileHASH); // Compute hash vm.hashTEXT = 100; vm.files.push({ "name":file.name, "file":e.target.result, "type":file.type, }); console.log('chunkarr:',vm.chunkarr) } }; fileReader.onerror = function () { console.warn('oops, something went wrong.'); }; function loadNext() { var start = currentChunk * chunkSize; var end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize; var ckitem = { file:blobSlice.call(file, start, end), total_blob_num:chunks, blob_num:(currentChunk+1), file_name:file.name, begin:start, end:end } vm.chunkarr.push(ckitem); // store each chunk if(!/(video)/.test(file.type)){ fileReader.readAsDataURL(file); // create base64 data for image preview }else{ fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); } } loadNext(); }, /** * do upload file by FormData */ doUploadChunk:function (i) { if($("#fileinput").val()==''){ this.fail("内容不能为空"); return false; } var vm = this; var fd = new FormData(); // 最好是新建一个空的FormData对象 fd.append('file_hash',vm.fileHASH); // 文件哈希码 fd.append('file',vm.chunkarr[i].file); // 文件块 fd.append('total_blob_num',vm.chunkarr[i].total_blob_num); // 总片段数 fd.append('blob_num',vm.chunkarr[i].blob_num); // 当前片段 fd.append('file_name',vm.chunkarr[i].file_name); // 当前片段 var url = this.prefix+'/addArticleUpload/'+vm.$route.params.m_id+'?chunk='+i; axios.post(url,fd).then(res=>{ if(res.data.code==1){ this.loadednum++; console.log('片段',i,'上传完成',this.percent) this.percent = (((this.loadednum)*(100/this.chunkarr[i].total_blob_num)).toFixed(2))+'%'; }else if(res.data.code==3){ console.log('片段',i,'上传失败',this.percent) }else if(res.data.code==2){ this.percent = '100%'; this.success("全部上传成功") } }).catch(err=>{ console.log('片段',i,'上传失败',this.percent) }) },