Skip to content

Basic Framework

Overview

Understanding Node.js fundamentals is crucial for building robust applications. This chapter covers the core concepts that make Node.js unique: the event loop, modules system, streams, and the non-blocking I/O model.

The Node.js Runtime Architecture

V8 JavaScript Engine

Node.js is built on Google's V8 engine, which compiles JavaScript to native machine code:

javascript
// performance-test.js
console.time('V8 Performance Test');

// CPU-intensive operation
function fibonacci(n) {
  if (n < 2) return n;
  return fibonacci(n - 1) + fibonacci(n - 2);
}

const result = fibonacci(35);
console.log('Fibonacci(35):', result);
console.timeEnd('V8 Performance Test');

Event Loop Deep Dive

The event loop is the heart of Node.js's non-blocking architecture:

javascript
// event-loop-demo.js
console.log('=== Event Loop Phases ===');

// 1. Timer phase
setTimeout(() => console.log('Timer: setTimeout'), 0);

// 2. I/O callbacks phase
setImmediate(() => console.log('Check: setImmediate'));

// 3. Poll phase
process.nextTick(() => console.log('Next Tick: process.nextTick'));

// 4. Microtasks (Promises)
Promise.resolve().then(() => console.log('Microtask: Promise.resolve'));

console.log('Synchronous: Main thread');

// Output order demonstrates event loop phases

Understanding Phases

javascript
// event-loop-phases.js
const fs = require('fs');

console.log('Start');

// Timers phase
setTimeout(() => console.log('1: Timer'), 0);
setTimeout(() => console.log('2: Timer'), 0);

// I/O callbacks phase
fs.readFile(__filename, () => {
  console.log('3: I/O callback');
  
  // Check phase (setImmediate)
  setImmediate(() => console.log('4: setImmediate inside I/O'));
  
  // Next tick (highest priority)
  process.nextTick(() => console.log('5: nextTick inside I/O'));
});

// Check phase
setImmediate(() => console.log('6: setImmediate'));

// Next tick queue (highest priority)
process.nextTick(() => console.log('7: nextTick'));

console.log('End');

Module System

CommonJS Modules

Node.js uses CommonJS module system by default:

javascript
// math-operations.js
const PI = 3.14159;

function calculateArea(radius) {
  return PI * radius * radius;
}

function calculateCircumference(radius) {
  return 2 * PI * radius;
}

class Calculator {
  constructor() {
    this.history = [];
  }

  add(a, b) {
    const result = a + b;
    this.history.push(`${a} + ${b} = ${result}`);
    return result;
  }

  getHistory() {
    return this.history;
  }
}

// Different export patterns
module.exports = {
  PI,
  calculateArea,
  calculateCircumference,
  Calculator
};

// Alternative export syntax
// exports.PI = PI;
// exports.calculateArea = calculateArea;

Using the module:

javascript
// app.js
const { PI, calculateArea, Calculator } = require('./math-operations');
const mathOps = require('./math-operations');

console.log('PI value:', PI);
console.log('Circle area (radius 5):', calculateArea(5));

const calc = new Calculator();
console.log('Addition result:', calc.add(10, 15));
console.log('Calculator history:', calc.getHistory());

ES6 Modules (ESM)

Enable ES6 modules by adding "type": "module" to package.json:

javascript
// math-utils.mjs (or .js with "type": "module")
export const PI = 3.14159;

export function calculateArea(radius) {
  return PI * radius * radius;
}

export default class Calculator {
  constructor() {
    this.operations = [];
  }

  multiply(a, b) {
    const result = a * b;
    this.operations.push({ operation: 'multiply', a, b, result });
    return result;
  }
}
javascript
// main.mjs
import Calculator, { PI, calculateArea } from './math-utils.mjs';
import * as mathUtils from './math-utils.mjs';

console.log('Using named imports:', PI);
console.log('Area calculation:', calculateArea(3));

const calc = new Calculator();
console.log('Multiplication:', calc.multiply(4, 7));

Core Modules Deep Dive

File System (fs) Module

javascript
// file-operations.js
const fs = require('fs');
const path = require('path');

class FileManager {
  constructor(baseDir = './data') {
    this.baseDir = baseDir;
    this.ensureDirectory();
  }

  ensureDirectory() {
    if (!fs.existsSync(this.baseDir)) {
      fs.mkdirSync(this.baseDir, { recursive: true });
    }
  }

  // Synchronous operations (use sparingly)
  writeFileSync(filename, data) {
    const filePath = path.join(this.baseDir, filename);
    fs.writeFileSync(filePath, data, 'utf8');
    return filePath;
  }

  // Asynchronous operations (preferred)
  async writeFile(filename, data) {
    const filePath = path.join(this.baseDir, filename);
    await fs.promises.writeFile(filePath, data, 'utf8');
    return filePath;
  }

  async readFile(filename) {
    const filePath = path.join(this.baseDir, filename);
    return await fs.promises.readFile(filePath, 'utf8');
  }

  async listFiles() {
    const files = await fs.promises.readdir(this.baseDir);
    const fileStats = await Promise.all(
      files.map(async (file) => {
        const filePath = path.join(this.baseDir, file);
        const stats = await fs.promises.stat(filePath);
        return {
          name: file,
          size: stats.size,
          isDirectory: stats.isDirectory(),
          modified: stats.mtime
        };
      })
    );
    return fileStats;
  }

  async deleteFile(filename) {
    const filePath = path.join(this.baseDir, filename);
    await fs.promises.unlink(filePath);
  }
}

// Usage example
async function demonstrateFileOperations() {
  const fileManager = new FileManager('./temp');

  try {
    // Write files
    await fileManager.writeFile('test.txt', 'Hello, Node.js!');
    await fileManager.writeFile('data.json', JSON.stringify({ message: 'Hello World' }, null, 2));

    // Read file
    const content = await fileManager.readFile('test.txt');
    console.log('File content:', content);

    // List files
    const files = await fileManager.listFiles();
    console.log('Files in directory:');
    files.forEach(file => {
      console.log(`- ${file.name} (${file.size} bytes, ${file.isDirectory ? 'directory' : 'file'})`);
    });

  } catch (error) {
    console.error('File operation error:', error.message);
  }
}

demonstrateFileOperations();

Path Module

javascript
// path-operations.js
const path = require('path');
const os = require('os');

console.log('=== Path Operations ===');

// Path joining
console.log('Join paths:', path.join('/users', 'john', 'documents', 'file.txt'));
console.log('Resolve path:', path.resolve('..', 'project', 'src'));

// Path parsing
const filePath = '/users/john/documents/report.pdf';
console.log('Parse path:', path.parse(filePath));
console.log('Directory name:', path.dirname(filePath));
console.log('Base name:', path.basename(filePath));
console.log('Extension:', path.extname(filePath));

// Cross-platform paths
console.log('Platform separator:', path.sep);
console.log('Path delimiter:', path.delimiter);

// Normalize paths
console.log('Normalize:', path.normalize('/users//john/../jane/./documents'));

// Relative paths
console.log('Relative path:', path.relative('/users/john', '/users/jane/documents'));

// Check if path is absolute
console.log('Is absolute:', path.isAbsolute('/users/john'));
console.log('Is absolute:', path.isAbsolute('./relative/path'));

OS Module

javascript
// system-info.js
const os = require('os');

function getSystemInfo() {
  return {
    platform: os.platform(),
    architecture: os.arch(),
    cpus: os.cpus().length,
    totalMemory: Math.round(os.totalmem() / 1024 / 1024 / 1024 * 100) / 100 + ' GB',
    freeMemory: Math.round(os.freemem() / 1024 / 1024 / 1024 * 100) / 100 + ' GB',
    uptime: Math.round(os.uptime() / 3600 * 100) / 100 + ' hours',
    hostname: os.hostname(),
    userInfo: os.userInfo(),
    networkInterfaces: Object.keys(os.networkInterfaces()),
    homeDirectory: os.homedir(),
    tempDirectory: os.tmpdir()
  };
}

console.log('System Information:');
console.log(JSON.stringify(getSystemInfo(), null, 2));

Streams

Streams are a powerful feature for handling data efficiently:

Readable Streams

javascript
// readable-stream.js
const { Readable } = require('stream');
const fs = require('fs');

// Custom readable stream
class NumberStream extends Readable {
  constructor(options) {
    super(options);
    this.current = 0;
    this.max = 10;
  }

  _read() {
    if (this.current < this.max) {
      this.push(`Number: ${this.current}\n`);
      this.current++;
    } else {
      this.push(null); // End of stream
    }
  }
}

// Using the custom stream
const numberStream = new NumberStream();
numberStream.on('data', (chunk) => {
  console.log('Received:', chunk.toString().trim());
});

numberStream.on('end', () => {
  console.log('Stream ended');
});

// File stream example
const fileStream = fs.createReadStream('package.json');
fileStream.on('data', (chunk) => {
  console.log(`Received ${chunk.length} bytes`);
});

Writable Streams

javascript
// writable-stream.js
const { Writable } = require('stream');
const fs = require('fs');

// Custom writable stream
class LogStream extends Writable {
  constructor(options) {
    super(options);
    this.logFile = fs.createWriteStream('app.log', { flags: 'a' });
  }

  _write(chunk, encoding, callback) {
    const timestamp = new Date().toISOString();
    const logEntry = `[${timestamp}] ${chunk.toString()}`;
    
    console.log('Writing to log:', logEntry.trim());
    this.logFile.write(logEntry, callback);
  }
}

// Using the custom stream
const logger = new LogStream();
logger.write('Application started\n');
logger.write('User logged in\n');
logger.write('Processing request\n');
logger.end();

Transform Streams

javascript
// transform-stream.js
const { Transform } = require('stream');

// Custom transform stream
class UpperCaseTransform extends Transform {
  _transform(chunk, encoding, callback) {
    const upperCased = chunk.toString().toUpperCase();
    callback(null, upperCased);
  }
}

// Pipeline example
const fs = require('fs');

// Create a pipeline: read file -> transform to uppercase -> write to new file
fs.createReadStream('input.txt')
  .pipe(new UpperCaseTransform())
  .pipe(fs.createWriteStream('output.txt'))
  .on('finish', () => {
    console.log('File transformation complete');
  });

Buffer and Binary Data

javascript
// buffer-operations.js
console.log('=== Buffer Operations ===');

// Creating buffers
const buf1 = Buffer.from('Hello World', 'utf8');
const buf2 = Buffer.alloc(10); // Allocate 10 bytes
const buf3 = Buffer.allocUnsafe(10); // Faster but may contain old data

console.log('Buffer from string:', buf1);
console.log('Buffer length:', buf1.length);
console.log('Buffer as string:', buf1.toString());

// Buffer manipulation
buf2.write('Node.js');
console.log('Written buffer:', buf2.toString());

// Buffer concatenation
const combined = Buffer.concat([buf1, Buffer.from(' - '), buf2]);
console.log('Combined buffer:', combined.toString());

// Binary data operations
const binaryData = Buffer.from([0x48, 0x65, 0x6c, 0x6c, 0x6f]); // "Hello" in hex
console.log('Binary to string:', binaryData.toString());

// JSON serialization
const jsonBuffer = Buffer.from(JSON.stringify({ message: 'Hello' }));
console.log('JSON buffer:', jsonBuffer);
console.log('Parsed JSON:', JSON.parse(jsonBuffer.toString()));

Error Handling Patterns

Error-First Callbacks

javascript
// error-handling.js
const fs = require('fs');

// Traditional callback pattern
function readFileCallback(filename, callback) {
  fs.readFile(filename, 'utf8', (error, data) => {
    if (error) {
      return callback(error, null);
    }
    callback(null, data);
  });
}

// Usage
readFileCallback('nonexistent.txt', (error, data) => {
  if (error) {
    console.error('Callback error:', error.message);
    return;
  }
  console.log('File data:', data);
});

Promise-based Error Handling

javascript
// promise-errors.js
const fs = require('fs').promises;

async function safeFileOperation(filename) {
  try {
    const data = await fs.readFile(filename, 'utf8');
    return { success: true, data };
  } catch (error) {
    return { success: false, error: error.message };
  }
}

// Usage
safeFileOperation('package.json')
  .then(result => {
    if (result.success) {
      console.log('File read successfully');
    } else {
      console.error('Error:', result.error);
    }
  });

Global Error Handling

javascript
// global-error-handling.js

// Handle uncaught exceptions
process.on('uncaughtException', (error) => {
  console.error('Uncaught Exception:', error);
  process.exit(1);
});

// Handle unhandled promise rejections
process.on('unhandledRejection', (reason, promise) => {
  console.error('Unhandled Rejection at:', promise, 'reason:', reason);
  process.exit(1);
});

// Graceful shutdown
process.on('SIGINT', () => {
  console.log('Received SIGINT. Graceful shutdown...');
  process.exit(0);
});

process.on('SIGTERM', () => {
  console.log('Received SIGTERM. Graceful shutdown...');
  process.exit(0);
});

Performance Considerations

Memory Management

javascript
// memory-management.js

function demonstrateMemoryUsage() {
  const used = process.memoryUsage();
  
  console.log('Memory Usage:');
  for (let key in used) {
    console.log(`${key}: ${Math.round(used[key] / 1024 / 1024 * 100) / 100} MB`);
  }
}

console.log('Initial memory usage:');
demonstrateMemoryUsage();

// Create large array
const largeArray = new Array(1000000).fill('data');
console.log('\nAfter creating large array:');
demonstrateMemoryUsage();

// Clean up
largeArray.length = 0;
global.gc && global.gc(); // Force garbage collection if --expose-gc flag is used

console.log('\nAfter cleanup:');
demonstrateMemoryUsage();

Next Steps

In the next chapter, we'll explore project structure and organization patterns that help build maintainable Node.js applications.

Practice Exercises

  1. Create a custom readable stream that generates random numbers
  2. Build a file watcher using fs.watch() that logs file changes
  3. Implement a simple event emitter for a chat application
  4. Create a buffer-based data parser for a custom file format

Key Takeaways

  • The event loop enables non-blocking I/O operations
  • CommonJS and ES6 modules provide different ways to organize code
  • Core modules (fs, path, os) provide essential system functionality
  • Streams enable efficient processing of large data sets
  • Buffers handle binary data effectively
  • Proper error handling is crucial for robust applications
  • Understanding memory management helps optimize performance

Content is for learning and research only.