Bun Performance Optimization
Bun is already very fast, but you can further improve performance with the right optimization strategies. This chapter introduces performance tuning tips for Bun.
Why Bun is Fast
Architecture Advantages
┌─────────────────────────────────────────┐
│ Bun Architecture │
├─────────────────────────────────────────┤
│ Core written in Zig language │
│ JavaScriptCore engine │
│ Optimized system calls │
│ Built-in features reduce dependencies │
└─────────────────────────────────────────┘Performance Comparison
| Operation | Node.js | Bun | Improvement |
|---|---|---|---|
| Startup time | 40ms | 4ms | 10x |
| Package install | 30s | 1s | 30x |
| HTTP requests/sec | 50k | 150k | 3x |
| File read | baseline | 3x faster | 3x |
HTTP Server Optimization
Basic Optimization
typescript
const server = Bun.serve({
port: 3000,
// Use concise responses
fetch(request) {
// ✅ Return Response directly
return new Response("Hello!");
// ❌ Avoid unnecessary async
// return Promise.resolve(new Response("Hello!"));
},
});Static Response Caching
typescript
// Pre-create common responses
const notFoundResponse = new Response("Not Found", { status: 404 });
const okResponse = new Response("OK");
const jsonHeaders = { "Content-Type": "application/json" };
const server = Bun.serve({
port: 3000,
fetch(request) {
const url = new URL(request.url);
if (url.pathname === "/health") {
return okResponse; // Reuse response object
}
if (url.pathname === "/api/data") {
// Pre-computed JSON
return new Response(cachedJsonString, { headers: jsonHeaders });
}
return notFoundResponse;
},
});Streaming Responses
typescript
// Use streaming responses for large data
const server = Bun.serve({
port: 3000,
fetch(request) {
const stream = new ReadableStream({
async start(controller) {
for (let i = 0; i < 1000; i++) {
controller.enqueue(`data: ${i}\n`);
// Don't block other requests
if (i % 100 === 0) {
await Bun.sleep(0);
}
}
controller.close();
},
});
return new Response(stream, {
headers: { "Content-Type": "text/event-stream" },
});
},
});File I/O Optimization
Using Bun.file()
typescript
// ✅ Use Bun.file (fastest)
const content = await Bun.file("./data.txt").text();
// ❌ Avoid Node.js fs (slower)
// import { readFile } from "fs/promises";
// const content = await readFile("./data.txt", "utf-8");Batch File Operations
typescript
// ✅ Read multiple files in parallel
const files = ["a.txt", "b.txt", "c.txt"];
const contents = await Promise.all(
files.map(f => Bun.file(f).text())
);
// ❌ Avoid serial reads
// for (const f of files) {
// contents.push(await Bun.file(f).text());
// }Streaming Writes for Large Files
typescript
// Use writer for large files
const writer = Bun.file("./large-output.txt").writer();
for (let i = 0; i < 1000000; i++) {
writer.write(`Line ${i}\n`);
// Periodically flush buffer
if (i % 10000 === 0) {
await writer.flush();
}
}
await writer.end();Database Optimization
SQLite Optimization
typescript
import { Database } from "bun:sqlite";
const db = new Database("app.db");
// Enable WAL mode
db.run("PRAGMA journal_mode = WAL");
db.run("PRAGMA synchronous = NORMAL");
db.run("PRAGMA cache_size = -64000"); // 64MB cache
db.run("PRAGMA temp_store = MEMORY");
// Use prepared statements
const insert = db.prepare("INSERT INTO items (name) VALUES (?)");
const select = db.prepare("SELECT * FROM items WHERE id = ?");
// Batch inserts using transactions
const insertMany = db.transaction((items: string[]) => {
for (const item of items) {
insert.run(item);
}
});Connection Pool Pattern
typescript
// Create multiple connections for high concurrency scenarios
class DatabasePool {
private connections: Database[] = [];
private index = 0;
constructor(path: string, size: number = 4) {
for (let i = 0; i < size; i++) {
const db = new Database(path);
db.run("PRAGMA journal_mode = WAL");
this.connections.push(db);
}
}
get(): Database {
this.index = (this.index + 1) % this.connections.length;
return this.connections[this.index];
}
close() {
for (const conn of this.connections) {
conn.close();
}
}
}Memory Optimization
Reducing Memory Allocation
typescript
// ✅ Reuse Buffer
const buffer = new Uint8Array(1024);
function processData(data: Uint8Array) {
// Use pre-allocated buffer
buffer.set(data.slice(0, 1024));
return processBuffer(buffer);
}
// ❌ Avoid creating new objects frequently
// function processData(data) {
// const buffer = new Uint8Array(1024); // Allocated each time
// ...
// }Using --smol Mode
bash
# Reduce memory usage (suitable for constrained environments)
bun --smol app.tsMonitoring Memory Usage
typescript
// Periodically check memory
setInterval(() => {
const usage = process.memoryUsage();
console.log({
heapUsed: `${(usage.heapUsed / 1024 / 1024).toFixed(2)} MB`,
heapTotal: `${(usage.heapTotal / 1024 / 1024).toFixed(2)} MB`,
external: `${(usage.external / 1024 / 1024).toFixed(2)} MB`,
});
}, 10000);Concurrency Optimization
Using Workers
typescript
// main.ts
const worker = new Worker("./worker.ts");
worker.postMessage({ type: "process", data: [1, 2, 3, 4, 5] });
worker.onmessage = (event) => {
console.log("Result:", event.data);
};
// worker.ts
self.onmessage = (event) => {
const { type, data } = event.data;
if (type === "process") {
const result = data.map((n: number) => n * 2);
self.postMessage(result);
}
};Parallel Processing
typescript
// Use Promise.all for parallel processing
async function processItems(items: string[]) {
const batchSize = 100;
const batches = [];
for (let i = 0; i < items.length; i += batchSize) {
batches.push(items.slice(i, i + batchSize));
}
const results = await Promise.all(
batches.map(batch => processBatch(batch))
);
return results.flat();
}Build Optimization
Production Build
typescript
await Bun.build({
entrypoints: ["./src/index.ts"],
outdir: "./dist",
// Production optimization
minify: true,
// Tree Shaking
treeshaking: true,
// Define environment
define: {
"process.env.NODE_ENV": JSON.stringify("production"),
},
});Code Splitting
typescript
await Bun.build({
entrypoints: ["./src/index.ts"],
outdir: "./dist",
splitting: true, // Automatic code splitting
});Network Optimization
Connection Reuse
typescript
// Reuse HTTP connections
const agent = {
keepAlive: true,
keepAliveMsecs: 30000,
};
// Bun reuses connections by default, but can be configuredCompressing Responses
typescript
const server = Bun.serve({
port: 3000,
async fetch(request) {
const data = JSON.stringify(largeData);
// Check if client supports compression
const acceptEncoding = request.headers.get("accept-encoding") || "";
if (acceptEncoding.includes("gzip")) {
const compressed = Bun.gzipSync(data);
return new Response(compressed, {
headers: {
"Content-Type": "application/json",
"Content-Encoding": "gzip",
},
});
}
return Response.json(largeData);
},
});Performance Analysis
Built-in Profiling
bash
# View detailed startup time
bun --timing app.tsCustom Timing
typescript
// Use console.time
console.time("operation");
await heavyOperation();
console.timeEnd("operation");
// Or use performance
const start = performance.now();
await heavyOperation();
const duration = performance.now() - start;
console.log(`Duration: ${duration.toFixed(2)}ms`);Benchmark Testing
typescript
// bench.ts
import { bench, run } from "bun:test";
bench("string concatenation", () => {
let s = "";
for (let i = 0; i < 1000; i++) {
s += "x";
}
});
bench("array join", () => {
const arr = [];
for (let i = 0; i < 1000; i++) {
arr.push("x");
}
arr.join("");
});
await run();Run:
bash
bun run bench.tsBest Practices Checklist
✅ Recommended Practices
Use Bun Native APIs
- Bun.file() instead of fs
- Bun.serve() instead of http
- bun:sqlite instead of third-party libraries
Reduce Allocations
- Reuse objects and buffers
- Use object pools
- Avoid unnecessary copies
Batch Processing
- Use transactions for database operations
- Batch file operations
- Reduce I/O counts
Parallel Processing
- Promise.all for parallel requests
- Workers for CPU-intensive tasks
- Streaming for large data
Proper Configuration
- Enable compression
- Use WAL mode
- Configure cache
❌ Practices to Avoid
- Frequently creating new objects
- Synchronous blocking operations
- Ignoring connection reuse
- Unoptimized database queries
- Unnecessary data copying
Summary
This chapter introduced:
- ✅ Bun's performance advantages
- ✅ HTTP server optimization
- ✅ File and database optimization
- ✅ Memory and concurrency optimization
- ✅ Build and network optimization
- ✅ Performance analysis tools
- ✅ Best practices checklist
Next Steps
Continue reading Node.js Compatibility to learn how to migrate from Node.js to Bun.