Monitoring with Hooks
Use cache hooks to integrate with metrics, logging, and monitoring systems.
Basic Metrics Integration
import { MemoryCache } from '@humanspeak/memory-cache'
const cache = new MemoryCache<string>({
maxSize: 1000,
ttl: 5 * 60 * 1000,
hooks: {
onHit: ({ key }) => {
metrics.increment('cache.hit')
metrics.increment(`cache.hit.${extractPrefix(key)}`)
},
onMiss: ({ key, reason }) => {
metrics.increment('cache.miss')
metrics.increment(`cache.miss.${reason}`)
},
onSet: ({ key, isUpdate }) => {
metrics.increment(isUpdate ? 'cache.update' : 'cache.insert')
},
onEvict: ({ key }) => {
metrics.increment('cache.eviction')
},
onExpire: ({ key }) => {
metrics.increment('cache.expiration')
},
onDelete: ({ key, source }) => {
metrics.increment('cache.delete')
metrics.increment(`cache.delete.${source}`)
}
}
})
function extractPrefix(key: string): string {
return key.split(':')[0] || 'unknown'
}import { MemoryCache } from '@humanspeak/memory-cache'
const cache = new MemoryCache<string>({
maxSize: 1000,
ttl: 5 * 60 * 1000,
hooks: {
onHit: ({ key }) => {
metrics.increment('cache.hit')
metrics.increment(`cache.hit.${extractPrefix(key)}`)
},
onMiss: ({ key, reason }) => {
metrics.increment('cache.miss')
metrics.increment(`cache.miss.${reason}`)
},
onSet: ({ key, isUpdate }) => {
metrics.increment(isUpdate ? 'cache.update' : 'cache.insert')
},
onEvict: ({ key }) => {
metrics.increment('cache.eviction')
},
onExpire: ({ key }) => {
metrics.increment('cache.expiration')
},
onDelete: ({ key, source }) => {
metrics.increment('cache.delete')
metrics.increment(`cache.delete.${source}`)
}
}
})
function extractPrefix(key: string): string {
return key.split(':')[0] || 'unknown'
}DataDog Integration
import { MemoryCache } from '@humanspeak/memory-cache'
import StatsD from 'hot-shots'
const dogstatsd = new StatsD()
const cache = new MemoryCache<unknown>({
maxSize: 5000,
ttl: 5 * 60 * 1000,
hooks: {
onHit: ({ key }) => {
dogstatsd.increment('cache.operations', { operation: 'hit' })
},
onMiss: ({ key, reason }) => {
dogstatsd.increment('cache.operations', {
operation: 'miss',
reason
})
},
onEvict: () => {
dogstatsd.increment('cache.operations', { operation: 'eviction' })
},
onExpire: () => {
dogstatsd.increment('cache.operations', { operation: 'expiration' })
}
}
})
// Periodically report cache size
setInterval(() => {
dogstatsd.gauge('cache.size', cache.size())
const stats = cache.getStats()
dogstatsd.gauge('cache.hit_rate', stats.hits / (stats.hits + stats.misses) || 0)
}, 10000)import { MemoryCache } from '@humanspeak/memory-cache'
import StatsD from 'hot-shots'
const dogstatsd = new StatsD()
const cache = new MemoryCache<unknown>({
maxSize: 5000,
ttl: 5 * 60 * 1000,
hooks: {
onHit: ({ key }) => {
dogstatsd.increment('cache.operations', { operation: 'hit' })
},
onMiss: ({ key, reason }) => {
dogstatsd.increment('cache.operations', {
operation: 'miss',
reason
})
},
onEvict: () => {
dogstatsd.increment('cache.operations', { operation: 'eviction' })
},
onExpire: () => {
dogstatsd.increment('cache.operations', { operation: 'expiration' })
}
}
})
// Periodically report cache size
setInterval(() => {
dogstatsd.gauge('cache.size', cache.size())
const stats = cache.getStats()
dogstatsd.gauge('cache.hit_rate', stats.hits / (stats.hits + stats.misses) || 0)
}, 10000)Prometheus Integration
import { MemoryCache } from '@humanspeak/memory-cache'
import { Counter, Gauge, register } from 'prom-client'
const cacheHits = new Counter({
name: 'cache_hits_total',
help: 'Total cache hits',
labelNames: ['cache_name']
})
const cacheMisses = new Counter({
name: 'cache_misses_total',
help: 'Total cache misses',
labelNames: ['cache_name', 'reason']
})
const cacheEvictions = new Counter({
name: 'cache_evictions_total',
help: 'Total cache evictions',
labelNames: ['cache_name']
})
const cacheSize = new Gauge({
name: 'cache_size',
help: 'Current cache size',
labelNames: ['cache_name']
})
function createMonitoredCache<T>(name: string, options: { maxSize: number; ttl: number }) {
return new MemoryCache<T>({
...options,
hooks: {
onHit: () => cacheHits.inc({ cache_name: name }),
onMiss: ({ reason }) => cacheMisses.inc({ cache_name: name, reason }),
onEvict: () => cacheEvictions.inc({ cache_name: name }),
onSet: () => {
// Update size gauge after each set
// Note: This is approximate since we're inside the hook
}
}
})
}
// Usage
const userCache = createMonitoredCache<User>('users', { maxSize: 1000, ttl: 60000 })
const productCache = createMonitoredCache<Product>('products', { maxSize: 500, ttl: 300000 })import { MemoryCache } from '@humanspeak/memory-cache'
import { Counter, Gauge, register } from 'prom-client'
const cacheHits = new Counter({
name: 'cache_hits_total',
help: 'Total cache hits',
labelNames: ['cache_name']
})
const cacheMisses = new Counter({
name: 'cache_misses_total',
help: 'Total cache misses',
labelNames: ['cache_name', 'reason']
})
const cacheEvictions = new Counter({
name: 'cache_evictions_total',
help: 'Total cache evictions',
labelNames: ['cache_name']
})
const cacheSize = new Gauge({
name: 'cache_size',
help: 'Current cache size',
labelNames: ['cache_name']
})
function createMonitoredCache<T>(name: string, options: { maxSize: number; ttl: number }) {
return new MemoryCache<T>({
...options,
hooks: {
onHit: () => cacheHits.inc({ cache_name: name }),
onMiss: ({ reason }) => cacheMisses.inc({ cache_name: name, reason }),
onEvict: () => cacheEvictions.inc({ cache_name: name }),
onSet: () => {
// Update size gauge after each set
// Note: This is approximate since we're inside the hook
}
}
})
}
// Usage
const userCache = createMonitoredCache<User>('users', { maxSize: 1000, ttl: 60000 })
const productCache = createMonitoredCache<Product>('products', { maxSize: 500, ttl: 300000 })Debug Logging
import { MemoryCache } from '@humanspeak/memory-cache'
const cache = new MemoryCache<unknown>({
maxSize: 100,
ttl: 60000,
hooks: {
onHit: ({ key, value }) => {
console.debug(`[CACHE HIT] ${key}`, { valueType: typeof value })
},
onMiss: ({ key, reason }) => {
console.debug(`[CACHE MISS] ${key} (${reason})`)
},
onSet: ({ key, isUpdate }) => {
console.debug(`[CACHE ${isUpdate ? 'UPDATE' : 'SET'}] ${key}`)
},
onEvict: ({ key }) => {
console.warn(`[CACHE EVICT] ${key} - consider increasing maxSize`)
},
onExpire: ({ key, source }) => {
console.debug(`[CACHE EXPIRE] ${key} via ${source}`)
},
onDelete: ({ key, source }) => {
console.debug(`[CACHE DELETE] ${key} via ${source}`)
}
}
})import { MemoryCache } from '@humanspeak/memory-cache'
const cache = new MemoryCache<unknown>({
maxSize: 100,
ttl: 60000,
hooks: {
onHit: ({ key, value }) => {
console.debug(`[CACHE HIT] ${key}`, { valueType: typeof value })
},
onMiss: ({ key, reason }) => {
console.debug(`[CACHE MISS] ${key} (${reason})`)
},
onSet: ({ key, isUpdate }) => {
console.debug(`[CACHE ${isUpdate ? 'UPDATE' : 'SET'}] ${key}`)
},
onEvict: ({ key }) => {
console.warn(`[CACHE EVICT] ${key} - consider increasing maxSize`)
},
onExpire: ({ key, source }) => {
console.debug(`[CACHE EXPIRE] ${key} via ${source}`)
},
onDelete: ({ key, source }) => {
console.debug(`[CACHE DELETE] ${key} via ${source}`)
}
}
})Hit Rate Dashboard
import { MemoryCache } from '@humanspeak/memory-cache'
class CacheMonitor {
private hits = 0
private misses = 0
private evictions = 0
private expirations = 0
createCache<T>(options: { maxSize: number; ttl: number }): MemoryCache<T> {
return new MemoryCache<T>({
...options,
hooks: {
onHit: () => this.hits++,
onMiss: () => this.misses++,
onEvict: () => this.evictions++,
onExpire: () => this.expirations++
}
})
}
getStats() {
const total = this.hits + this.misses
return {
hits: this.hits,
misses: this.misses,
evictions: this.evictions,
expirations: this.expirations,
hitRate: total > 0 ? (this.hits / total * 100).toFixed(2) + '%' : 'N/A'
}
}
reset() {
this.hits = 0
this.misses = 0
this.evictions = 0
this.expirations = 0
}
}
// Usage
const monitor = new CacheMonitor()
const cache = monitor.createCache<User>({ maxSize: 1000, ttl: 60000 })
// Check stats periodically
setInterval(() => {
console.log('Cache Performance:', monitor.getStats())
}, 60000)import { MemoryCache } from '@humanspeak/memory-cache'
class CacheMonitor {
private hits = 0
private misses = 0
private evictions = 0
private expirations = 0
createCache<T>(options: { maxSize: number; ttl: number }): MemoryCache<T> {
return new MemoryCache<T>({
...options,
hooks: {
onHit: () => this.hits++,
onMiss: () => this.misses++,
onEvict: () => this.evictions++,
onExpire: () => this.expirations++
}
})
}
getStats() {
const total = this.hits + this.misses
return {
hits: this.hits,
misses: this.misses,
evictions: this.evictions,
expirations: this.expirations,
hitRate: total > 0 ? (this.hits / total * 100).toFixed(2) + '%' : 'N/A'
}
}
reset() {
this.hits = 0
this.misses = 0
this.evictions = 0
this.expirations = 0
}
}
// Usage
const monitor = new CacheMonitor()
const cache = monitor.createCache<User>({ maxSize: 1000, ttl: 60000 })
// Check stats periodically
setInterval(() => {
console.log('Cache Performance:', monitor.getStats())
}, 60000)Key Considerations
- Performance: Keep hooks lightweight to avoid impacting cache performance
- Error Handling: Hooks errors are silently caught, but avoid throwing
- Sampling: For high-traffic caches, consider sampling instead of logging every event
- Async Operations: Hooks are synchronous; queue async work if needed