File system
The fs module provides a lot of very useful functionality to access and interact with the file system.
const fs = require('fs')
fs.access()
: check if the file exists and Node.js can access it with its permissionsfs.appendFile()
: append data to a file. If the file does not exist, it's createdfs.chmod()
: change the permissions of a file specified by the filename passed. Related: fs.lchmod()
, fs.fchmod()
fs.chown()
: change the owner and group of a file specified by the filename passed. Related: fs.fchown()
, fs.lchown()
fs.close()
: close a file descriptorfs.copyFile()
: copies a filefs.createReadStream()
: create a readable file streamfs.createWriteStream()
: create a writable file streamfs.link()
: create a new hard link to a filefs.mkdir()
: create a new folderfs.mkdtemp()
: create a temporary directoryfs.open()
: set the file modefs.readdir()
: read the contents of a directoryfs.readFile()
: read the content of a file. Related: fs.read()
fs.readlink()
: read the value of a symbolic linkfs.realpath()
: resolve relative file path pointers (.
, ..
) to the full pathfs.rename()
: rename a file or folderfs.rmdir()
: remove a folderfs.stat()
: returns the status of the file identified by the filename passed. Related: fs.fstat()
, fs.lstat()
fs.symlink()
: create a new symbolic link to a filefs.truncate()
: truncate to the specified length the file identified by the filename passed. Related: fs.ftruncate()
fs.unlink()
: remove a file or a symbolic linkfs.unwatchFile()
: stop watching for changes on a filefs.utimes()
: change the timestamp of the file identified by the filename passed. Related: fs.futimes()
fs.watchFile()
: start watching for changes on a file. Related: fs.watch()
fs.writeFile()
: write data to a file. Related: fs.write()
const express = require("express");
const fs = require("fs");
const app = express();
app.get("/file", (req, res) => {
let file = fs.readFileSync("package.json", "utf8");
res.setHeader('Content-Length', file.length);
res.setHeader('Content-disposition', 'attachment; filename=test.txt');
res.setHeader('Content-type', 'text/html');
res.write(file, 'binary');
res.end();
})
readFileSync also blocks any other code from execution
This makes a huge difference in your application flow.
const fs = require('fs')
fs.rename()
fs.renameSync()
fs.write()
fs.writeSync()
try {
fs.renameSync('before.json', 'after.json')
//done
} catch (err) {
console.error(err)
}
const express = require("express");
const fs = require("fs");
const app = express();
app.get("/file", (req, res) => {
fs.readFile("package.json", "utf8", (error, data) => {
res.setHeader('Content-Length', data.length);
res.setHeader('Content-disposition', 'attachment; filename=test.txt');
res.setHeader('Content-type', 'text/html');
res.write(data, 'binary');
res.end();
});
})
Well partially...
fs.readfile loads the whole file into the memory you pointed out, the fs.createReadStream, on the other hand, reads the entire file in chunks of sizes that you specified
fs.readdir(path.resolve(), (err, list) => {
console.log('Files:', list);
})
/*
Files: [
'asd.js',
'evens.js',
'fp.js',
'index.js',
'monitor.js',
'node_modules',
'package-lock.json',
'package.json',
'test.ts',
'Untitled-1.mongodb'
]
*/
The path module provides a lot of very useful functionality to access and interact with the file system.
const path = require('path');
console.log(path.basename('/test/something/file.txt')); // file.txt
console.log(path.dirname('/test/something/file.txt')); // /test/something
console.log(path.extname('/test/something/file.txt')); // .txt
console.log(path.parse('/test/something/file.txt'));
/*
{
root: '/',
dir: '/test/something',
base: 'file.txt',
ext: '.txt',
name: 'file'
}
*/
console.log(path.normalize('/users/joe/..//test.txt')); // /users/test.txt
console.log(path.join('/', 'users', 'joe', 'notes.txt')) //'/users/joe/notes.txt'
console.log(path.resolve('asd.js')) // /Users/joe/tmp/asd.js;
const util = require('util');
const fs = require('fs');
const stat = util.promisify(fs.stat);
async function callStat() {
const stats = await stat('.');
console.log(`This directory is owned by ${stats.uid}`);
}
An alternative set of asynchronous file system methods that return Promise objects rather than using callbacks. The API is accessible via require('fs').promises.
const fs = require('fs/promises');
const path = require('path');
fs.readdir(pref, { withFileTypes: true })
.then(list => {
console.log('files', list)
});
Fixed-size chunk of memory (can't be resized) allocated outside of the V8 JavaScript engine.
Buffers were introduced to help developers deal with binary data, in an ecosystem that traditionally only dealt with strings rather than binaries.
Buffers in Node.js are not related to the concept of buffering data. That is what happens when a stream processor receives data faster than it can digest.
const buf = Buffer.from('Hey!');
console.log(buf[0]) //72
console.log(buf[1]) //101
console.log(buf[2]) //121
buf[1] = 111 //o in UTF-8
console.log(buf.toString()) // utf by default
const buf = Buffer.alloc(1024);
const buf = Buffer.from('Hey?')
let bufcopy = Buffer.from('Moo!')
bufcopy.set(buf.subarray(1, 3), 1)
bufcopy.toString() //'Mey!'
A buffer, being an array of bytes, can be accessed like an array
fs.readFile(path.resolve('evens.js'))
.then((data) => {
console.log(Buffer.from(data).toString())
})
http.get(url.parse('http://myserver.com:9999/package'), function(res) {
var data = [];
res.on('data', function(chunk) {
data.push(chunk);
}).on('end', function() {
//at this point data is an array of Buffers
//so Buffer.concat() can make us a new Buffer
//of all of them together
var buffer = Buffer.concat(data);
console.log(buffer.toString('base64'));
});
});
They are a way to handle reading/writing files, network communications, or any kind of end-to-end information exchange in an efficient way.
Streams basically provide two major advantages over using other data handling methods:
const http = require('http')
const fs = require('fs')
const server = http.createServer(function(req, res) {
fs.readFile(__dirname + '/data.txt', (err, data) => {
res.end(data)
})
})
server.listen(3000)
If the file is big, the operation will take quite a bit of time. Here is the same thing written using streams
const http = require('http')
const fs = require('fs')
const server = http.createServer((req, res) => {
const stream = fs.createReadStream(__dirname + '/data.txt')
stream.pipe(res)
})
server.listen(3000)
Instead of waiting until the file is fully read, we start streaming it to the HTTP client as soon as we have a chunk of data ready to be sent.
src.pipe(dest1).pipe(dest2);
// same as
src.pipe(dest1)
dest1.pipe(dest2)
It takes the source, and pipes it into a destination.
Due to their advantages, many Node.js core modules provide native stream handling capabilities, most notably:
const Stream = require('stream')
const readableStream = new Stream.Readable({
read() {}
})
const writableStream = new Stream.Writable()
writableStream._write = (chunk, encoding, next) => {
console.log(chunk.toString())
next()
}
readableStream.pipe(writableStream)
readableStream.push('hi!')
readableStream.push('ho!')
'use strict';
const fs = require('fs');
const zlib = require('zlib');
const http = require('http');
const rs = fs.createReadStream('index.html');
const gs = zlib.createGzip();
const buffers = [];
let buffer = null;
gs.on('data', buffer => {
buffers.push(buffer);
});
gs.on('end', () => {
buffer = Buffer.concat(buffers);
});
rs.pipe(gs);
const server = http.createServer((request, response) => {
console.log(request.url);
response.writeHead(200, { 'Content-Encoding': 'gzip' });
response.end(buffer);
});
server.listen(8000);