The project that I am working on (Node.js) implies lots of operations with the file system (copying, reading, writing, etc.).
Which methods are the fastest?
The project that I am working on (Node.js) implies lots of operations with the file system (copying, reading, writing, etc.).
Which methods are the fastest?
Use the standard built-in way fs.copyFile
:
const fs = require('fs');
// File destination.txt will be created or overwritten by default.
fs.copyFile('source.txt', 'destination.txt', (err) => {
if (err) throw err;
console.log('source.txt was copied to destination.txt');
});
If you have to support old end-of-life versions of Node.js - here is how you do it in versions that do not support fs.copyFile
:
const fs = require('fs');
fs.createReadStream('test.log').pipe(fs.createWriteStream('newLog.log'));
createReadStream
and createWriteStream
for errors, so you wouldn't get a one-liner (though it would still be just as fast). –
Bazan cp test.log newLog.log
via require('child_process').exec
? –
Ticktack copy
is not portable on Window, contrary to a full Node.js solution. –
Xe { end: false}
to pipe, otherwise will be closed by default. See here nodejs.org/api/… –
Hurricane child_process.execFile('/bin/cp', ['--no-target-directory', source, target])
. –
Spermaceti fs.createReadStream('./init/xxx.json').pipe(fs.createWriteStream('xxx.json'));
–
Grimona process.exit
because the latter terminates all IO without waiting for the streams to finish their data exchange –
Carlie fs-extra.copySync
(see other answer in this thread), it works a lot better. –
Noblesse fs.copyFile
function and is the preferred solution: stackoverflow.com/a/46253698 –
Baggage fs.copyFile
answer. Looking in the node source fs.copyFile
uses the OS level copy on MacOS and Windows and so should actually copy the files where as the code above mearly creates a new file and copies to bytes to the new file. –
Tangible copyFile()
is better when you care about privileges, in my case I had to copy an executable to my linux /tmp/
directory, using the stream solution copy the file with non executable privileges, so the copyFile()
is the one to go with, It would be great if you mention it in your solution. –
Breannebrear xxx.json
reminds of a teacher who put the example URL in a demo to xxx.com –
Anachronistic Same mechanism, but this adds error handling:
function copyFile(source, target, cb) {
var cbCalled = false;
var rd = fs.createReadStream(source);
rd.on("error", function(err) {
done(err);
});
var wr = fs.createWriteStream(target);
wr.on("error", function(err) {
done(err);
});
wr.on("close", function(ex) {
done();
});
rd.pipe(wr);
function done(err) {
if (!cbCalled) {
cb(err);
cbCalled = true;
}
}
}
WriteStream
will only unpipe it. You would have to call rd.destroy()
yourself. At least that's what happened to me. Sadly there's not much documentation except from the source code. –
Spermaceti cb
stand for? what should we pass in as the third argument? –
Priest rd.on('open', function() {})
, and create the write stream there. –
Sudarium open
event. I think in my case I actually wanted to move the file and there was an accidental write error at some point, so copyFile
dutifully returned the error. My app then tried it again successfully and unlink
ed the source file. But it still showed up in readdir
because of the open handle. –
Spermaceti ERROR: There are some read requests waiting on finished stream
. I guess a stream is unique and instead of waiting for one to finish before starting it just breaks. Any ideas on how to handle this? Thanks –
Dorian .destroy()
is now the official way to close a readable stream as per the documentation - nodejs.org/api/stream.html#stream_readable_destroy_error –
Bullyboy Since Node.js 8.5.0 we have the new fs.copyFile and fs.copyFileSync methods.
Usage example:
var fs = require('fs');
// File "destination.txt" will be created or overwritten by default.
fs.copyFile('source.txt', 'destination.txt', (err) => {
if (err)
throw err;
console.log('source.txt was copied to destination.txt');
});
copyFile()
is bugged while overwriting longer files. Courtesy of uv_fs_copyfile()
till Node v8.7.0 (libuv 1.15.0). see github.com/libuv/libuv/pull/1552 –
Tetanic UnhandledPromiseRejectionWarning: TypeError [ERR_INVALID_ARG_TYPE]: The "mode" argument must be integer. Received type function
–
Stokehold I was not able to get the createReadStream/createWriteStream
method working for some reason, but using the fs-extra npm module it worked right away. I am not sure of the performance difference though.
npm install --save fs-extra
var fs = require('fs-extra');
fs.copySync(path.resolve(__dirname, './init/xxx.json'), 'xxx.json');
fs.copy(src, dst, callback);
, and these should resolve @mvillar's concern. –
Clypeus require('child').fork(...)
since it wouldn't block the main event loop. It's all about context and what you're trying to achieve. –
Convoke Fast to write and convenient to use, with promise and error management:
function copyFile(source, target) {
var rd = fs.createReadStream(source);
var wr = fs.createWriteStream(target);
return new Promise(function(resolve, reject) {
rd.on('error', reject);
wr.on('error', reject);
wr.on('finish', resolve);
rd.pipe(wr);
}).catch(function(error) {
rd.destroy();
wr.end();
throw error;
});
}
The same with async/await syntax:
async function copyFile(source, target) {
var rd = fs.createReadStream(source);
var wr = fs.createWriteStream(target);
try {
return await new Promise(function(resolve, reject) {
rd.on('error', reject);
wr.on('error', reject);
wr.on('finish', resolve);
rd.pipe(wr);
});
} catch (error) {
rd.destroy();
wr.end();
throw error;
}
}
new Promise(function(resolve, reject) { resolve(1); resolve(2); reject(3); reject(4); console.log("DONE"); }).then(console.log.bind(console), function(e){console.log("E", e);});
and looked up the spec on this and you are right: Attempting to resolve or reject a resolved promise has no effect. Perhaps you could extend your answer and explain why you have written the function in this way? Thanks :-) –
Anikaanil close
should be finish
for Writable streams. –
Anikaanil /dev/stdin
, that is a bug github.com/joyent/node/issues/25375 –
Anikaanil Well, usually it is good to avoid asynchronous file operations. Here is the short (i.e. no error handling) sync example:
var fs = require('fs');
fs.writeFileSync(targetFile, fs.readFileSync(sourceFile));
*Sync
methods are totally against nodejs' philosphy! I also think they are slowly being deprecated. The whole idea of nodejs is that it's single threaded and event-driven. –
Chromyl *Sync
being deprecated ?... hum... that's why fs.exists()
is deprecated and fs.existsSync()
is not. lol. –
Libbey fs.copyFileSync
? –
Magnifico If you don't care about it being async, and aren't copying gigabyte-sized files, and would rather not add another dependency just for a single function:
function copySync(src, dest) {
var data = fs.readFileSync(src);
fs.writeFileSync(dest, data);
}
fs.existsSync
call should be omitted. The file could disappear in the time between the fs.existsSync
call and the fs.readFileSync
call, which means the fs.existsSync
call doesn't protect us from anything. –
Handstand false
if fs.existsSync
fails is likely poor ergonomics because few consumers of copySync
will think to manually inspect the return value every time it's called, any more than we do for fs.writeFileSync
et al.. Throwing an exception is actually preferable. –
Handstand 'utf-8'
encoding from the snippet too, which means this will now work on any file. data
is now a Buffer
, not a String
. –
Handstand Mike Schilling's solution with error handling with a shortcut for the error event handler.
function copyFile(source, target, cb) {
var cbCalled = false;
var rd = fs.createReadStream(source);
rd.on("error", done);
var wr = fs.createWriteStream(target);
wr.on("error", done);
wr.on("close", function(ex) {
done();
});
rd.pipe(wr);
function done(err) {
if (!cbCalled) {
cb(err);
cbCalled = true;
}
}
}
You may want to use async/await, since node v10.0.0
it's possible with the built-in fs Promises API
.
Example:
const fs = require('fs')
const copyFile = async (src, dest) => {
await fs.promises.copyFile(src, dest)
}
Note:
As of
node v11.14.0, v10.17.0
the API is no longer experimental.
More information:
const fs = require("fs");
fs.copyFileSync("filepath1", "filepath2"); //fs.copyFileSync("file1.txt", "file2.txt");
This is what I personally use to copy a file and replace another file using Node.js :)
For fast copies you should use the fs.constants.COPYFILE_FICLONE
flag. It allows (for filesystems that support this) to not actually copy the content of the file. Just a new file entry is created, but it points to a Copy-on-Write "clone" of the source file.
To do nothing/less is the fastest way of doing something ;)
https://nodejs.org/api/fs.html#fs_fs_copyfile_src_dest_flags_callback
let fs = require("fs");
fs.copyFile(
"source.txt",
"destination.txt",
fs.constants.COPYFILE_FICLONE,
(err) => {
if (err) {
// TODO: handle error
console.log("error");
}
console.log("success");
}
);
Using promises instead:
let fs = require("fs");
let util = require("util");
let copyFile = util.promisify(fs.copyFile);
copyFile(
"source.txt",
"destination.txt",
fs.constants.COPYFILE_FICLONE
)
.catch(() => console.log("error"))
.then(() => console.log("success"));
fs.promises.copyFile
–
Tangible Use Node.js's built-in copy function
It provides both async and sync version:
const fs = require('fs');
// File "destination.txt" will be created or overwritten by default.
fs.copyFile('source.txt', 'destination.txt', (err) => {
if (err)
throw err;
console.log('source.txt was copied to destination.txt');
});
You can do it using the fs-extra
module very easily:
const fse = require('fs-extra');
let srcDir = 'path/to/file';
let destDir = 'pat/to/destination/directory';
fse.moveSync(srcDir, destDir, function (err) {
// To move a file permanently from a directory
if (err) {
console.error(err);
} else {
console.log("success!");
}
});
Or
fse.copySync(srcDir, destDir, function (err) {
// To copy a file from a directory
if (err) {
console.error(err);
} else {
console.log("success!");
}
});
I wrote a little utility to test the different methods:
https://www.npmjs.com/package/copy-speed-test
run it with
npx copy-speed-test --source someFile.zip --destination someNonExistentFolder
It does a native copy using child_process.exec(), a copy file using fs.copyFile and it uses createReadStream with a variety of different buffer sizes (you can change buffer sizes by passing them on the command line. run npx copy-speed-test -h for more info).
Mike's solution, but with promises:
const FileSystem = require('fs');
exports.copyFile = function copyFile(source, target) {
return new Promise((resolve,reject) => {
const rd = FileSystem.createReadStream(source);
rd.on('error', err => reject(err));
const wr = FileSystem.createWriteStream(target);
wr.on('error', err => reject(err));
wr.on('close', () => resolve());
rd.pipe(wr);
});
};
Improvement of one other answer.
Features:
promise
, which makes it easier to use in a larger project.Usage:
var onePromise = copyFilePromise("src.txt", "dst.txt");
var anotherPromise = copyMultiFilePromise(new Array(new Array("src1.txt", "dst1.txt"), new Array("src2.txt", "dst2.txt")));
Code:
function copyFile(source, target, cb) {
console.log("CopyFile", source, target);
var ensureDirectoryExistence = function (filePath) {
var dirname = path.dirname(filePath);
if (fs.existsSync(dirname)) {
return true;
}
ensureDirectoryExistence(dirname);
fs.mkdirSync(dirname);
}
ensureDirectoryExistence(target);
var cbCalled = false;
var rd = fs.createReadStream(source);
rd.on("error", function (err) {
done(err);
});
var wr = fs.createWriteStream(target);
wr.on("error", function (err) {
done(err);
});
wr.on("close", function (ex) {
done();
});
rd.pipe(wr);
function done(err) {
if (!cbCalled) {
cb(err);
cbCalled = true;
}
}
}
function copyFilePromise(source, target) {
return new Promise(function (accept, reject) {
copyFile(source, target, function (data) {
if (data === undefined) {
accept();
} else {
reject(data);
}
});
});
}
function copyMultiFilePromise(srcTgtPairArr) {
var copyFilePromiseArr = new Array();
srcTgtPairArr.forEach(function (srcTgtPair) {
copyFilePromiseArr.push(copyFilePromise(srcTgtPair[0], srcTgtPair[1]));
});
return Promise.all(copyFilePromiseArr);
}
© 2022 - 2024 — McMap. All rights reserved.