在flow.js上传到node / express服务器后,重新组装二进制文件 [英] reassemble binary after flow.js upload on node/express server
问题描述
我无法弄清楚如何将flow.js库与节点后端一起使用,并将我的代码放在 flow.js github。
获取blob文件,但是我没有建立上传完成后的二进制文件。最后的get不会被触发,或者我的路线是错误的:
app.get('/ download /:identifier', function(req,res){
console.log('we writin')
flow.write(req.params.identifier,res);
});
任何人都有这方面的经验可以得到像百万计算器堆栈,因为这似乎是一个普遍的问题当使用node.js和flow.js,这里还有两个未解答的问题:
我找到了一种方法,但可能不是理想的方法。
这里我在 编辑:我在创建文件后添加了一个清理块的方法。 c>回调发送 File:flow-node.js 如果你想删除块,在flow.write调用flow.clean与onDone。 I can't figure out how to use the flow.js library with a node backend, and basing my code off the sample on the flow.js github. I'm getting the blob files up, but I'm not building the binary afterward the upload completes. The final get isn't getting triggered or my route is wrong: anyone have any experience with this could get like a million stackoverflow pts because this seems to be a common issue when using node.js and flow.js and here are two other unanswered questions: Flowjs file upload - AngularJS and Node
Reassembling file chunks produced in a multi-part upload I've found a method that works but might not be the ideal approach. Here I'm calling Edit: I added a way to clean the chunks after creating the file. I had to modify File: flow-node.js } In flow.write call flow.clean with onDone if you want to remove the chunks.
这篇关于在flow.js上传到node / express服务器后,重新组装二进制文件的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋! flow.post $ c中调用
是 flow.write
code> status done
和 currentTestChunk> $ c> numberOfChunks
。我做了大于检查,因为有时 status done
多次提到这里。
flow.post(req,function(status,filename,original_filename,identifier,currentTestChunk,numberOfChunk) .log('POST',status,original_filename,identifier);
res.send(200);
if(status ==='done'&&& currentTestChunk> numberOfChunks){
var stream = fs.createWriteStream('./ tmp /'+ filename);
//编辑:我删除了选项{end:true},因为不需要
//并添加了{ onDone:flow.clean}在写
//文件后删除块。
flow.write(identifier,stream,{onDone:flow.clean});
} $ b $我必须修改 flow.post $ c $>
currentTestChunk
和 numberOfChunks
。
//这里有一些codez,我们可以忽略...
fs.rename(files [$ .fileParameterName] .path,chunkFilename,function(){
//是否有所有的块?
var currentTestChunk = 1;
var numberOfChunks = Math.max(Math .floor(totalSize /(chunkSize * 1.0)),1);
var testChunkExists = function(){
fs.exists(getChunkFilename(currentTestChunk,identifier),function(exists){
if(exists){
currentTestChunk ++;
if(currentTestChunk> numberOfChunks){
//将currentTestChunk和numberOfChunks添加到回调
回调('完成',文件名,original_filename,标识符,currentTestChunk,numberOfChunks);
} else {
//递归
testChunkExists();
}
} else {
//将currentTestChunk和numberOfChunks添加到回调中
$ b callback('partially_done',filename,original_filename,identifier,currentTestChunk,numberOfChunk) ;
}
});
}
testChunkExists();
});
} else {
callback(validation,filename,original_filename,identifier);
code
$ b $ p
$ b
$ .write = function(identifier, writableStream,options){
options = options || {};
options.end =(typeof options ['end'] =='undefined'?true:options ['end']);
//遍历每个块
var pipeChunk =函数(number){
var chunkFilename = getChunkFilename(number,identifier);
fs.exists(chunkFilename,function(exists){
if(exists){
//如果包含当前编号的块存在,
// //从文件
//创建一个ReadStream并将其传递给指定的WritableStream
var sourceStream = fs.createReadStream(chunkFilename);
sourceStream.pipe(writableStream,{
end :false
);
sourceStream.on('end',function(){
//当数据块完全流式传输时,
//跳转到下一个
pipeChunk(number + 1);
});
} else {
//当所有的块已经被管道,结束流
if(options.end ){
writableStream.end();
}
//Options.onDone包含flow.clean,所以我在这里删除所有的分块文件。
if(options.onDone){
options.onDone(identifier);
}
}
});
}
pipeChunk(1);
}
app.get('/download/:identifier', function(req, res){
console.log('we writin')
flow.write(req.params.identifier, res);
});
flow.write
in flow.post
if status
is done
and currentTestChunk > numberOfChunks
. I do the greater than check because sometimes flow.post
sends status done
more than once as mentioned here. flow.post(req, function(status, filename, original_filename, identifier, currentTestChunk, numberOfChunks) {
console.log('POST', status, original_filename, identifier);
res.send(200);
if (status === 'done' && currentTestChunk > numberOfChunks) {
var stream = fs.createWriteStream('./tmp/' + filename);
//EDIT: I removed options {end: true} because it isn't needed
//and added {onDone: flow.clean} to remove the chunks after writing
//the file.
flow.write(identifier, stream, { onDone: flow.clean });
}
})
flow.post
's callback to send currentTestChunk
and numberOfChunks
. $.post = function(req, callback){
//There's some codez here that we can overlook...
fs.rename(files[$.fileParameterName].path, chunkFilename, function(){
// Do we have all the chunks?
var currentTestChunk = 1;
var numberOfChunks = Math.max(Math.floor(totalSize/(chunkSize*1.0)), 1);
var testChunkExists = function(){
fs.exists(getChunkFilename(currentTestChunk, identifier), function(exists){
if(exists){
currentTestChunk++;
if(currentTestChunk>numberOfChunks) {
//Add currentTestChunk and numberOfChunks to the callback
callback('done', filename, original_filename, identifier, currentTestChunk, numberOfChunks);
} else {
// Recursion
testChunkExists();
}
} else {
//Add currentTestChunk and numberOfChunks to the callback
callback('partly_done', filename, original_filename, identifier, currentTestChunk, numberOfChunks);
}
});
}
testChunkExists();
});
} else {
callback(validation, filename, original_filename, identifier);
}
$.write = function(identifier, writableStream, options) {
options = options || {};
options.end = (typeof options['end'] == 'undefined' ? true : options['end']);
// Iterate over each chunk
var pipeChunk = function(number) {
var chunkFilename = getChunkFilename(number, identifier);
fs.exists(chunkFilename, function(exists) {
if (exists) {
// If the chunk with the current number exists,
// then create a ReadStream from the file
// and pipe it to the specified writableStream.
var sourceStream = fs.createReadStream(chunkFilename);
sourceStream.pipe(writableStream, {
end: false
});
sourceStream.on('end', function() {
// When the chunk is fully streamed,
// jump to the next one
pipeChunk(number + 1);
});
} else {
// When all the chunks have been piped, end the stream
if (options.end) {
writableStream.end();
}
//Options.onDone contains flow.clean so here I'm deleting all the chunked files.
if (options.onDone) {
options.onDone(identifier);
}
}
});
}
pipeChunk(1);
}