节点`fs.appendFile`是原子的吗? [英] Is Node `fs.appendFile` atomic?

查看:55
本文介绍了节点`fs.appendFile`是原子的吗?的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

如果多个独立的节点进程使用 fs.appendFile()将文本块(每个块均大于4KB)附加到单个文件,我是否可以通过文件锁定来丢失/损坏数据方案还是 fs.appendFile 是原子操作?

If multiple, independent node processes use fs.appendFile() to append text chunks (each of which is > 4KB) to a single file, can I lose/corrupt data w/o a file locking scheme or is fs.appendFile an atomic op?

我最关心的是Linux&本地文件.

I'm mostly concerned about Linux & a local fs.

推荐答案

答案可能取决于操作系统和/或文件系统,但是您可以使用它来测试:

The answer might be OS and/or filesystem dependent, but you can use this to test:

// This script aims to test/prove that you can append to a single file from
// multiple processes with buffers up to a certain size, without causing one
// process' output to corrupt the other's.
//
// The script takes one parameter, the length of the buffer. It then creates
// 20 worker processes which each write 50 lines of the specified buffer
// size to the same file. When all processes are done outputting, it tests
// the output file to ensure it is in the correct format.

const child_process = require('child_process');
const fs = require('fs');
const NUM_WORKERS=20;
const LINES_PER_WORKER=50;
const OUTPUT_FILE='./foo';

// each worker will output $LINES_PER_WORKER lines to the output file
function run_worker(worker_num, buf_len) {
    // Each line will be a specific character, multiplied by the line length.
    // The character changes based on the worker number.
    const filler_len=buf_len-1; // -1 -> leave room for \n
    const filler_char=String.fromCharCode(+worker_num+64);
    line=filler_char.repeat(filler_len) + '\n';
    for (let i=0; i<LINES_PER_WORKER; i++) {
        fs.appendFile(OUTPUT_FILE, line, (e) => { 
            if (e) {
                console.log('Oh noes!', e);
                throw e;
            }
        });
    }
}

if (process.argv[2] === "worker") {
    run_worker(process.argv[3], process.argv[4]);
} else {

    const buf_len=+process.argv[2] || 4096;

    try {
        fs.unlinkSync(OUTPUT_FILE);
    } catch (e) {
        // swallow file if it doesn't exist (the script was never run before)
        if (e.code !== 'ENOENT') {
            throw e;
        }
    }

    console.log(`Launching ${NUM_WORKERS} worker processes`);
    let finished = 0;
    for (let i=1; i <= NUM_WORKERS; i++) {
        let proc = child_process.spawn(process.execPath, [process.argv[1], 'worker', i, buf_len], {stdio: 'inherit'});
        proc.on('close', (code) => {
            if (code) {
                console.log(`Worker exited with code ${code}`);
                process.exit(code);
            }

            finished++;
            if (finished === NUM_WORKERS) {
                allFinished();
            }
        });
        proc.on('error', (e) => {
            console.log('Worker errored!');
            throw e;
        });
    }

    console.log(`Each line will be ${buf_len} characters long`);
    console.log(`Waiting for processes to exit`);

    // Now we want to test the output file. Each line should be the same letter
    // repeated buf_len-1 times (remember the \n takes up one byte). If we had
    // workers writing over eachother's lines, then there will be mixed characters
    // and/or longer/shorter lines.

    function allFinished() {
        console.log(`Testing output file`);

        // Make sure the file is the right size (ensures processes didn't write over
        // each other's lines)
        const expected_file_size=NUM_WORKERS * LINES_PER_WORKER * buf_len;
        const actual_file_size=fs.statSync(OUTPUT_FILE).size;
        if ( expected_file_size !== actual_file_size) {
            console.log(`Expected file size of ${expected_file_size}, but got ${actual_file_size}`);
            process.exit(1)
        }

        // File size is OK, test the actual content

        // Scan line by line
        // Note: Doesn't work on cygwin for lines < 255
        const line_length=buf_len-1;
        const lineReader = require('readline').createInterface({
            input: require('fs').createReadStream(OUTPUT_FILE)
        });

        let num_lines = 0;
        lineReader.on('line', function (line) {
            const first_char = line[0];
            if (line !== first_char.repeat(line_length)) {
                num_lines++
            }
        });

        if (num_lines > 0) {
            console.log("Found $num_lines instances of corrupted lines");
        } else {
            console.log(`All's good! The output file had no corrupted lines.`);
        }

        fs.unlinkSync(OUTPUT_FILE);
    }
}

两件事:

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆