Advanced TypeScript Optimization Techniques
/ 7 min read
Advanced TypeScript Optimization Techniques
Table of Contents
- Data Structure Optimization
- Algorithmic Optimization
- Advanced Caching Strategies
- Worker Thread Optimization
- Network Optimization
Data Structure Optimization
Optimized Set Operations
class BitwiseSet { private data: number = 0;
add(value: number): void { this.data |= (1 << value); }
remove(value: number): void { this.data &= ~(1 << value); }
has(value: number): boolean { return (this.data & (1 << value)) !== 0; }
intersection(other: BitwiseSet): BitwiseSet { const result = new BitwiseSet(); result.data = this.data & other.data; return result; }
union(other: BitwiseSet): BitwiseSet { const result = new BitwiseSet(); result.data = this.data | other.data; return result; }}
// Usageconst set1 = new BitwiseSet();set1.add(1);set1.add(3);
const set2 = new BitwiseSet();set2.add(2);set2.add(3);
const intersection = set1.intersection(set2);Trie Data Structure
class TrieNode { children: Map<string, TrieNode> = new Map(); isEndOfWord = false;}
class Trie { private root = new TrieNode();
insert(word: string): void { let current = this.root; for (const char of word) { let node = current.children.get(char); if (!node) { node = new TrieNode(); current.children.set(char, node); } current = node; } current.isEndOfWord = true; }
search(word: string): boolean { const node = this.traverse(word); return node !== null && node.isEndOfWord; }
startsWith(prefix: string): boolean { return this.traverse(prefix) !== null; }
private traverse(str: string): TrieNode | null { let current = this.root; for (const char of str) { const node = current.children.get(char); if (!node) return null; current = node; } return current; }}
// Usage for autocompleteclass Autocomplete { private trie = new Trie(); private cache = new Map<string, string[]>();
constructor(words: string[]) { words.forEach(word => this.trie.insert(word)); }
suggest(prefix: string): string[] { if (this.cache.has(prefix)) { return this.cache.get(prefix)!; }
const suggestions: string[] = []; this.findAllWords(prefix, suggestions);
this.cache.set(prefix, suggestions); return suggestions; }
private findAllWords(prefix: string, result: string[]): void { // Implementation }}Algorithmic Optimization
Parallel Processing with Web Workers
interface WorkerMessage { type: 'process'; data: number[];}
self.onmessage = (e: MessageEvent<WorkerMessage>) => { if (e.data.type === 'process') { const result = processData(e.data.data); self.postMessage(result); }};
function processData(data: number[]): number[] { return data.map(x => x * x);}
// main.tsclass ParallelProcessor { private workers: Worker[] = []; private taskQueue: (() => void)[] = []; private processing = false;
constructor(private workerCount: number) { for (let i = 0; i < workerCount; i++) { const worker = new Worker('worker.ts'); this.workers.push(worker); } }
async process(data: number[][]): Promise<number[][]> { const chunks = this.splitIntoChunks(data, this.workerCount); const promises = chunks.map((chunk, index) => this.processChunk(chunk, this.workers[index]) ); return Promise.all(promises); }
private processChunk(chunk: number[], worker: Worker): Promise<number[]> { return new Promise((resolve) => { worker.onmessage = (e) => resolve(e.data); worker.postMessage({ type: 'process', data: chunk }); }); }
private splitIntoChunks<T>(array: T[], count: number): T[][] { const chunks: T[][] = []; const size = Math.ceil(array.length / count); for (let i = 0; i < array.length; i += size) { chunks.push(array.slice(i, i + size)); } return chunks; }}Incremental Processing
class IncrementalProcessor<T, R> { private queue: T[] = []; private processing = false; private results: R[] = []; private currentIndex = 0;
constructor( private processor: (item: T) => R, private options: { batchSize: number; maxTimePerBatch: number; } ) {}
add(items: T[]): void { this.queue.push(...items); if (!this.processing) { this.processQueue(); } }
private async processQueue(): Promise<void> { this.processing = true;
while (this.currentIndex < this.queue.length) { const startTime = performance.now(); const batch = this.queue.slice( this.currentIndex, this.currentIndex + this.options.batchSize );
for (const item of batch) { this.results.push(this.processor(item)); this.currentIndex++;
if (performance.now() - startTime > this.options.maxTimePerBatch) { // Yield to main thread await new Promise(resolve => setTimeout(resolve, 0)); break; } } }
this.processing = false; }
getResults(): R[] { return this.results; }}
// Usageconst processor = new IncrementalProcessor<number, number>( (x) => x * x, { batchSize: 1000, maxTimePerBatch: 16 });Advanced Caching Strategies
LRU Cache with Time-Based Expiration
class CacheEntry<T> { constructor( public value: T, public expiresAt: number ) {}
isExpired(): boolean { return Date.now() > this.expiresAt; }}
class LRUCache<K, V> { private cache = new Map<K, CacheEntry<V>>(); private readonly maxSize: number; private readonly ttl: number;
constructor(maxSize: number, ttlMs: number) { this.maxSize = maxSize; this.ttl = ttlMs; }
get(key: K): V | undefined { const entry = this.cache.get(key); if (!entry) return undefined;
if (entry.isExpired()) { this.cache.delete(key); return undefined; }
// Refresh entry by removing and adding it again this.cache.delete(key); this.cache.set(key, entry); return entry.value; }
set(key: K, value: V): void { if (this.cache.size >= this.maxSize) { // Remove oldest entry const firstKey = this.cache.keys().next().value; this.cache.delete(firstKey); }
this.cache.set( key, new CacheEntry(value, Date.now() + this.ttl) ); }
clear(): void { this.cache.clear(); }
cleanup(): void { for (const [key, entry] of this.cache.entries()) { if (entry.isExpired()) { this.cache.delete(key); } } }}Two-Level Cache
class TwoLevelCache<K, V> { private memoryCache: LRUCache<K, V>; private diskCache: DiskCache<K, V>; private loading = new Set<K>(); private callbacks = new Map<K, ((value: V | undefined) => void)[]>();
constructor( memoryCacheSize: number, memoryTtl: number, diskCachePath: string ) { this.memoryCache = new LRUCache(memoryCacheSize, memoryTtl); this.diskCache = new DiskCache(diskCachePath); }
async get(key: K): Promise<V | undefined> { // Check memory cache first const memValue = this.memoryCache.get(key); if (memValue !== undefined) { return memValue; }
// If already loading, wait for result if (this.loading.has(key)) { return new Promise((resolve) => { const callbacks = this.callbacks.get(key) || []; callbacks.push(resolve); this.callbacks.set(key, callbacks); }); }
this.loading.add(key);
try { // Check disk cache const diskValue = await this.diskCache.get(key); if (diskValue !== undefined) { this.memoryCache.set(key, diskValue); }
// Notify waiting callbacks const callbacks = this.callbacks.get(key) || []; callbacks.forEach(cb => cb(diskValue)); this.callbacks.delete(key);
return diskValue; } finally { this.loading.delete(key); } }
async set(key: K, value: V): Promise<void> { this.memoryCache.set(key, value); await this.diskCache.set(key, value); }}Worker Thread Optimization
Thread Pool
class WorkerPool { private workers: Worker[] = []; private queue: Array<{ task: any; resolve: (value: any) => void; reject: (error: any) => void; }> = []; private availableWorkers: Worker[] = [];
constructor( private workerScript: string, private poolSize: number ) { for (let i = 0; i < poolSize; i++) { const worker = new Worker(workerScript); worker.onmessage = this.createWorkerMessageHandler(worker); worker.onerror = this.createWorkerErrorHandler(worker); this.workers.push(worker); this.availableWorkers.push(worker); } }
execute<T>(task: any): Promise<T> { return new Promise((resolve, reject) => { if (this.availableWorkers.length > 0) { const worker = this.availableWorkers.pop()!; this.runTask(worker, task, resolve, reject); } else { this.queue.push({ task, resolve, reject }); } }); }
private runTask( worker: Worker, task: any, resolve: (value: any) => void, reject: (error: any) => void ): void { worker.onmessage = (e) => { resolve(e.data); this.makeWorkerAvailable(worker); }; worker.onerror = (e) => { reject(e); this.makeWorkerAvailable(worker); }; worker.postMessage(task); }
private makeWorkerAvailable(worker: Worker): void { if (this.queue.length > 0) { const { task, resolve, reject } = this.queue.shift()!; this.runTask(worker, task, resolve, reject); } else { this.availableWorkers.push(worker); } }
terminate(): void { this.workers.forEach(worker => worker.terminate()); this.workers = []; this.availableWorkers = []; this.queue = []; }}Network Optimization
Request Batching and Deduplication
class RequestBatcher { private batch: Map<string, { params: any; callbacks: Array<(result: any) => void>; }> = new Map(); private timer: NodeJS.Timeout | null = null;
constructor( private batchHandler: (requests: any[]) => Promise<any[]>, private options: { maxBatchSize: number; maxWaitTime: number; } ) {}
request<T>(key: string, params: any): Promise<T> { return new Promise((resolve) => { const existing = this.batch.get(key); if (existing) { existing.callbacks.push(resolve); } else { this.batch.set(key, { params, callbacks: [resolve] }); }
this.scheduleBatch(); }); }
private scheduleBatch(): void { if (this.timer) return;
if (this.batch.size >= this.options.maxBatchSize) { this.processBatch(); } else { this.timer = setTimeout( () => this.processBatch(), this.options.maxWaitTime ); } }
private async processBatch(): void { if (this.timer) { clearTimeout(this.timer); this.timer = null; }
const entries = Array.from(this.batch.entries()); this.batch.clear();
if (entries.length === 0) return;
try { const results = await this.batchHandler( entries.map(([_, { params }]) => params) );
entries.forEach(([_, { callbacks }], index) => { callbacks.forEach(cb => cb(results[index])); }); } catch (error) { entries.forEach(([_, { callbacks }]) => { callbacks.forEach(cb => cb(undefined)); }); } }}
// Usageconst batcher = new RequestBatcher( async (requests) => { // Batch API call const results = await fetch('/api/batch', { method: 'POST', body: JSON.stringify(requests) }); return results.json(); }, { maxBatchSize: 100, maxWaitTime: 50 });These advanced optimization techniques help you:
- Improve data structure efficiency
- Optimize algorithmic performance
- Implement sophisticated caching strategies
- Utilize worker threads effectively
- Optimize network requests
Would you like me to:
- Add more optimization techniques?
- Create posts about specific optimization scenarios?
- Add more implementation details?