md5计算javascript中的文件 [英] md5 calculation for files in javascript

查看:74
本文介绍了md5计算javascript中的文件的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我正在上传文件,我想为每个文件计算md5



我写的函数calcMD5运行良好。问题是,当我选择多个文件时,它首先完成上传,然后开始计算md5。我也调试了它,但它甚至没有进入函数,直到它将所有blolbs发送到服务器并上传所有内容,然后它开始计算



任何人都可以帮助我这个问题



这是我的代码:





I am uploading files and I want to calculate md5 for each file

I have written the function calcMD5 that is working well. THE PROBLEM is that when I choose more than one file, it first finish uploading and then it startcalculating the md5. I also have debugged it but it does not even enter the function till it send all the blolbs to the server and upload everything, then it starts calculating

Can anybody help me with this issue

this is my code:


$(document).ready(function () {
    $("#btnUpload").click(function (evt) {
        var fl = document.getElementById("files");
        var L = fl.files.length;
        for (var i = 0; i < L ; i++) {
            var file = fl.files[i];
            var file = fl.files[i];
    calcMD5(file);
    var bytes_per_chunk = 1024 * 1024; //1048576
    var start = 0;
    var end = bytes_per_chunk;
    var size = file.size;
    var j = bytes_per_chunk;
    while (start < size) {
        //push the fragments to an array
        blobs.push(file.slice(start, end));
        start = j;
        j = start + bytes_per_chunk;;
        end = start + bytes_per_chunk;
        if (end > size) {
            end = size;
        }
    }
    var fileName = file.name;
    var fileType = file.type;
    var fileUp = (100 * bytes_per_chunk) / file.size;
    var rec = 0;
    var count = 0;
    var temp = 0;
    while (blob = blobs.shift()) {
        rec = fileUp + rec;
        if (rec > 100) {
            rec = 100;
        }
        xhr.open('POST', 'FileUploadHandler.ashx', false);
        xhr.setRequestHeader('X_FILE_NAME', fileName);
        xhr.setRequestHeader('Content-Type', fileType);
        xhr.send(blob);
        count++;
        }
    });
});

function calcMD5(f) {
    // MD5 Calculation
    var blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
    file = f,
    chunkSize = 1024 * 1024,                             // Read in chunks of 2MB
    chunks = Math.ceil(file.size / chunkSize),
    currentChunk = 0,
    spark = new SparkMD5.ArrayBuffer(),
    fileReader = new FileReader();

    fileReader.onload = function (e) {
        console.log('read chunk nr', currentChunk + 1, 'of', chunks);
        spark.append(e.target.result);                   // Append array buffer
        currentChunk++;

        if (currentChunk < chunks) {
            loadNext();
        } else {
            console.log('finished loading');
            console.info('computed hash', spark.end());  // Compute hash
        }
    };

    fileReader.onerror = function () {
        console.warn('oops, something went wrong.');
    };
    function loadNext() {
        var start = currentChunk * chunkSize,
            end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;

        fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
    }
    loadNext();
}

推荐答案

(document).ready(function(){
(document).ready(function () {


#btnUpload)。click(function(evt){
var fl = document.getElementById( files);
var L = fl.files.length;
for var i = 0 ; i < L; i ++ ){
var file = fl.files [i];
var 文件= fl.files [i];
calcMD5(file);
var bytes_per_chunk = 1024 * 1024 ; // 1048576
var start = 0 ;
var end = bytes_per_chunk;
var size = file.size;
var j = bytes_per_chunk;
while (start < size){
// 将片段推送到数组
blobs.push(file.slice(start,end));
start = j;
j = start + bytes_per_chunk ;;
end = start + bytes_per_chunk;
如果(结束> 尺寸){
end = size;
}
}
var fileName = file.name;
var fileType = file.type;
var fileUp =( 100 * bytes_per_chunk)/ file.size;
var rec = 0 ;
var count = 0 ;
var temp = 0 ;
while (blob = blobs.shift()){
rec = fileUp + rec;
如果(rec > 100 ){
rec = 100 ;
}
xhr.open(' POST'' FileUploadHandler.ashx' false );
xhr.setRequestHeader(' X_FILE_NAME',fileName);
xhr.setRequestHeader(' Content-Type',fileType);
xhr.send(blob);
count ++;
}
});
});

函数calcMD5(f){
// MD5计算
var blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
file = f,
chunkSize = 1024 * 1024 // 读取2MB的块
chunks = Math.ceil(file.size / chunkSize),
currentChunk = 0
spark = new SparkMD5.ArrayBuffer (),
fileReader = new FileReader();

fileReader.onload = function(e){
console.log(' read chunk nr',currentChunk + 1 ' of of ,chunk);
spark.append(e.target.result); // 追加数组缓冲区
currentChunk ++;

if (currentChunk < 块){
loadNext( );
} else {
console.log(' 完成加载');
console.info(' computed hash',spark.end()); // 计算哈希
}
};

fileReader.onerror = function(){
console.warn(' oops出了点问题。');
};
函数loadNext(){
var start = currentChunk * chunkSize,
end =((start + chunkSize)> = file.size)? file.size:start + chunkSize;

fileReader.readAsArrayBuffer(blobSlice.call(file,start,end));
}
loadNext();
}
("#btnUpload").click(function (evt) { var fl = document.getElementById("files"); var L = fl.files.length; for (var i = 0; i < L ; i++) { var file = fl.files[i]; var file = fl.files[i]; calcMD5(file); var bytes_per_chunk = 1024 * 1024; //1048576 var start = 0; var end = bytes_per_chunk; var size = file.size; var j = bytes_per_chunk; while (start < size) { //push the fragments to an array blobs.push(file.slice(start, end)); start = j; j = start + bytes_per_chunk;; end = start + bytes_per_chunk; if (end > size) { end = size; } } var fileName = file.name; var fileType = file.type; var fileUp = (100 * bytes_per_chunk) / file.size; var rec = 0; var count = 0; var temp = 0; while (blob = blobs.shift()) { rec = fileUp + rec; if (rec > 100) { rec = 100; } xhr.open('POST', 'FileUploadHandler.ashx', false); xhr.setRequestHeader('X_FILE_NAME', fileName); xhr.setRequestHeader('Content-Type', fileType); xhr.send(blob); count++; } }); }); function calcMD5(f) { // MD5 Calculation var blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice, file = f, chunkSize = 1024 * 1024, // Read in chunks of 2MB chunks = Math.ceil(file.size / chunkSize), currentChunk = 0, spark = new SparkMD5.ArrayBuffer(), fileReader = new FileReader(); fileReader.onload = function (e) { console.log('read chunk nr', currentChunk + 1, 'of', chunks); spark.append(e.target.result); // Append array buffer currentChunk++; if (currentChunk < chunks) { loadNext(); } else { console.log('finished loading'); console.info('computed hash', spark.end()); // Compute hash } }; fileReader.onerror = function () { console.warn('oops, something went wrong.'); }; function loadNext() { var start = currentChunk * chunkSize, end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize; fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); } loadNext(); }


我已经解决了我的问题

解决方案是开始切片文件

I already Solved my problem
the solution is to start slicing the file in
fileReader.onload = function (e) {
    i++;
    console.log('read chunk nr', currentChunk + 1, 'of', chunks);
    spark.append(e.target.result);                   // Append array buffer
    currentChunk++;
    if (currentChunk < chunks) {
        loadNext();
    }else 
    {
         console.log('finished loading');
         console.info('computed hash', spark.end());  // Compute hash
         ret = spark.end();
         UploadFile(); // here in upload part we do the file slicing
    }
}







无论如何,谢谢:)




Thanks anyway :)


这篇关于md5计算javascript中的文件的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆