Performance and Optimization
Binary Serialization Benefits
One of Hypermodern's key performance advantages is its use of binary serialization by default. This provides significant improvements over JSON in terms of both speed and bandwidth usage.
Performance Comparison
class SerializationBenchmark {
static Future<void> runBenchmarks() async {
final user = User(
id: 12345,
username: 'john_doe',
email: 'john@example.com',
profile: UserProfile(
firstName: 'John',
lastName: 'Doe',
bio: 'Software developer with 10 years of experience...',
avatarUrl: 'https://example.com/avatars/john_doe.jpg',
preferences: {
'theme': 'dark',
'notifications': true,
'language': 'en',
},
),
roles: [
Role(id: 1, name: 'user'),
Role(id: 2, name: 'developer'),
],
createdAt: DateTime.now(),
updatedAt: DateTime.now(),
);
// JSON serialization benchmark
final jsonStopwatch = Stopwatch()..start();
String jsonData = '';
for (int i = 0; i < 10000; i++) {
jsonData = jsonEncode(user.toJson());
}
jsonStopwatch.stop();
// Binary serialization benchmark
final binaryStopwatch = Stopwatch()..start();
Uint8List binaryData = Uint8List(0);
for (int i = 0; i < 10000; i++) {
binaryData = user.toBinary();
}
binaryStopwatch.stop();
print('Serialization Performance:');
print('JSON: ${jsonStopwatch.elapsedMilliseconds}ms');
print('Binary: ${binaryStopwatch.elapsedMilliseconds}ms');
print('Binary is ${(jsonStopwatch.elapsedMilliseconds / binaryStopwatch.elapsedMilliseconds).toStringAsFixed(1)}x faster');
print('\nSize Comparison:');
print('JSON: ${jsonData.length} bytes');
print('Binary: ${binaryData.length} bytes');
print('Binary is ${((1 - binaryData.length / jsonData.length) * 100).toStringAsFixed(1)}% smaller');
// Deserialization benchmark
final jsonDeserializeStopwatch = Stopwatch()..start();
for (int i = 0; i < 10000; i++) {
User.fromJson(jsonDecode(jsonData));
}
jsonDeserializeStopwatch.stop();
final binaryDeserializeStopwatch = Stopwatch()..start();
for (int i = 0; i < 10000; i++) {
User.fromBinary(binaryData);
}
binaryDeserializeStopwatch.stop();
print('\nDeserialization Performance:');
print('JSON: ${jsonDeserializeStopwatch.elapsedMilliseconds}ms');
print('Binary: ${binaryDeserializeStopwatch.elapsedMilliseconds}ms');
print('Binary is ${(jsonDeserializeStopwatch.elapsedMilliseconds / binaryDeserializeStopwatch.elapsedMilliseconds).toStringAsFixed(1)}x faster');
}
}
Optimized Binary Serialization
class OptimizedBinaryWriter {
final BytesBuilder _buffer = BytesBuilder();
void writeVarInt(int value) {
while (value >= 0x80) {
_buffer.addByte((value & 0xFF) | 0x80);
value >>>= 7;
}
_buffer.addByte(value & 0xFF);
}
void writeString(String value) {
final bytes = utf8.encode(value);
writeVarInt(bytes.length);
_buffer.add(bytes);
}
void writeOptionalString(String? value) {
if (value == null) {
_buffer.addByte(0);
} else {
_buffer.addByte(1);
writeString(value);
}
}
void writeDateTime(DateTime value) {
final microseconds = value.microsecondsSinceEpoch;
writeVarInt(microseconds);
}
void writeList<T>(List<T> list, void Function(T) writeItem) {
writeVarInt(list.length);
for (final item in list) {
writeItem(item);
}
}
void writeMap<K, V>(Map<K, V> map, void Function(K) writeKey, void Function(V) writeValue) {
writeVarInt(map.length);
for (final entry in map.entries) {
writeKey(entry.key);
writeValue(entry.value);
}
}
Uint8List toBytes() => _buffer.toBytes();
}
class OptimizedBinaryReader {
final Uint8List _data;
int _offset = 0;
OptimizedBinaryReader(this._data);
int readVarInt() {
int result = 0;
int shift = 0;
while (true) {
final byte = _data[_offset++];
result |= (byte & 0x7F) << shift;
if ((byte & 0x80) == 0) {
break;
}
shift += 7;
}
return result;
}
String readString() {
final length = readVarInt();
final bytes = _data.sublist(_offset, _offset + length);
_offset += length;
return utf8.decode(bytes);
}
String? readOptionalString() {
final hasValue = _data[_offset++];
return hasValue == 1 ? readString() : null;
}
DateTime readDateTime() {
final microseconds = readVarInt();
return DateTime.fromMicrosecondsSinceEpoch(microseconds);
}
List<T> readList<T>(T Function() readItem) {
final length = readVarInt();
return List.generate(length, (_) => readItem());
}
Map<K, V> readMap<K, V>(K Function() readKey, V Function() readValue) {
final length = readVarInt();
final map = <K, V>{};
for (int i = 0; i < length; i++) {
final key = readKey();
final value = readValue();
map[key] = value;
}
return map;
}
}
Connection Pooling
Advanced Connection Pool Implementation
class AdvancedConnectionPool {
final ConnectionFactory _factory;
final int _minConnections;
final int _maxConnections;
final Duration _connectionTimeout;
final Duration _idleTimeout;
final Duration _maxLifetime;
final Duration _validationInterval;
final Queue<PooledConnection> _availableConnections = Queue();
final Set<PooledConnection> _activeConnections = {};
final Queue<Completer<PooledConnection>> _waitingQueue = Queue();
Timer? _maintenanceTimer;
bool _isShuttingDown = false;
AdvancedConnectionPool({
required ConnectionFactory factory,
int minConnections = 5,
int maxConnections = 20,
Duration connectionTimeout = const Duration(seconds: 10),
Duration idleTimeout = const Duration(minutes: 10),
Duration maxLifetime = const Duration(hours: 1),
Duration validationInterval = const Duration(minutes: 5),
}) : _factory = factory,
_minConnections = minConnections,
_maxConnections = maxConnections,
_connectionTimeout = connectionTimeout,
_idleTimeout = idleTimeout,
_maxLifetime = maxLifetime,
_validationInterval = validationInterval;
Future<void> initialize() async {
// Create minimum connections
for (int i = 0; i < _minConnections; i++) {
final connection = await _createConnection();
_availableConnections.add(connection);
}
// Start maintenance timer
_maintenanceTimer = Timer.periodic(_validationInterval, (_) => _performMaintenance());
print('Connection pool initialized with $_minConnections connections');
}
Future<PooledConnection> acquire() async {
if (_isShuttingDown) {
throw StateError('Connection pool is shutting down');
}
// Return available connection if exists
if (_availableConnections.isNotEmpty) {
final connection = _availableConnections.removeFirst();
// Validate connection before returning
if (await _validateConnection(connection)) {
_activeConnections.add(connection);
connection._markAsActive();
return connection;
} else {
// Connection is invalid, close it and try again
await connection._close();
return await acquire();
}
}
// Create new connection if under limit
if (_getTotalConnections() < _maxConnections) {
final connection = await _createConnection();
_activeConnections.add(connection);
connection._markAsActive();
return connection;
}
// Wait for available connection
final completer = Completer<PooledConnection>();
_waitingQueue.add(completer);
return completer.future.timeout(_connectionTimeout, onTimeout: () {
_waitingQueue.remove(completer);
throw TimeoutException('Timeout waiting for database connection');
});
}
Future<void> release(PooledConnection connection) async {
_activeConnections.remove(connection);
connection._markAsIdle();
// Fulfill waiting request or return to pool
if (_waitingQueue.isNotEmpty && await _validateConnection(connection)) {
final completer = _waitingQueue.removeFirst();
_activeConnections.add(connection);
connection._markAsActive();
completer.complete(connection);
} else if (await _validateConnection(connection)) {
_availableConnections.add(connection);
} else {
// Connection is invalid, close it
await connection._close();
}
}
Future<PooledConnection> _createConnection() async {
final rawConnection = await _factory.create();
return PooledConnection(rawConnection, this);
}
Future<bool> _validateConnection(PooledConnection connection) async {
try {
// Check if connection is too old
if (DateTime.now().difference(connection._createdAt) > _maxLifetime) {
return false;
}
// Check if connection has been idle too long
if (connection._isIdle &&
DateTime.now().difference(connection._lastUsed) > _idleTimeout) {
return false;
}
// Perform actual validation query
await connection._rawConnection.query('SELECT 1');
return true;
} catch (e) {
return false;
}
}
Future<void> _performMaintenance() async {
if (_isShuttingDown) return;
// Remove expired connections from available pool
final expiredConnections = <PooledConnection>[];
for (final connection in _availableConnections) {
if (!await _validateConnection(connection)) {
expiredConnections.add(connection);
}
}
for (final connection in expiredConnections) {
_availableConnections.remove(connection);
await connection._close();
}
// Ensure minimum connections
while (_availableConnections.length < _minConnections &&
_getTotalConnections() < _maxConnections) {
try {
final connection = await _createConnection();
_availableConnections.add(connection);
} catch (e) {
logger.error('Failed to create connection during maintenance', error: e);
break;
}
}
// Log pool statistics
_logPoolStatistics();
}
void _logPoolStatistics() {
logger.debug('Connection pool statistics', extra: {
'available_connections': _availableConnections.length,
'active_connections': _activeConnections.length,
'waiting_requests': _waitingQueue.length,
'total_connections': _getTotalConnections(),
});
}
int _getTotalConnections() {
return _availableConnections.length + _activeConnections.length;
}
Future<void> shutdown() async {
_isShuttingDown = true;
_maintenanceTimer?.cancel();
// Fail all waiting requests
while (_waitingQueue.isNotEmpty) {
final completer = _waitingQueue.removeFirst();
completer.completeError(StateError('Connection pool is shutting down'));
}
// Close all connections
final allConnections = [..._availableConnections, ..._activeConnections];
for (final connection in allConnections) {
await connection._close();
}
_availableConnections.clear();
_activeConnections.clear();
print('Connection pool shut down');
}
ConnectionPoolStats getStats() {
return ConnectionPoolStats(
availableConnections: _availableConnections.length,
activeConnections: _activeConnections.length,
waitingRequests: _waitingQueue.length,
totalConnections: _getTotalConnections(),
maxConnections: _maxConnections,
minConnections: _minConnections,
);
}
}
class PooledConnection {
final Connection _rawConnection;
final AdvancedConnectionPool _pool;
final DateTime _createdAt;
DateTime _lastUsed;
bool _isIdle = true;
PooledConnection(this._rawConnection, this._pool)
: _createdAt = DateTime.now(),
_lastUsed = DateTime.now();
Future<List<Map<String, dynamic>>> query(String sql, [List<dynamic>? parameters]) async {
_lastUsed = DateTime.now();
return await _rawConnection.query(sql, parameters);
}
Future<void> execute(String sql, [List<dynamic>? parameters]) async {
_lastUsed = DateTime.now();
return await _rawConnection.execute(sql, parameters);
}
Future<T> transaction<T>(Future<T> Function(Connection) callback) async {
_lastUsed = DateTime.now();
return await _rawConnection.transaction(callback);
}
void _markAsActive() {
_isIdle = false;
_lastUsed = DateTime.now();
}
void _markAsIdle() {
_isIdle = true;
_lastUsed = DateTime.now();
}
Future<void> _close() async {
await _rawConnection.close();
}
Future<void> release() async {
await _pool.release(this);
}
}
Caching Strategies
Multi-Level Caching System
class MultiLevelCache {
final MemoryCache _l1Cache;
final RedisCache _l2Cache;
final Duration _defaultTtl;
MultiLevelCache({
required MemoryCache l1Cache,
required RedisCache l2Cache,
Duration defaultTtl = const Duration(minutes: 15),
}) : _l1Cache = l1Cache,
_l2Cache = l2Cache,
_defaultTtl = defaultTtl;
Future<T?> get<T>(String key, T Function(Map<String, dynamic>) deserializer) async {
// Try L1 cache first (memory)
final l1Result = await _l1Cache.get<T>(key, deserializer);
if (l1Result != null) {
return l1Result;
}
// Try L2 cache (Redis)
final l2Result = await _l2Cache.get<T>(key, deserializer);
if (l2Result != null) {
// Populate L1 cache
await _l1Cache.set(key, l2Result, ttl: _defaultTtl);
return l2Result;
}
return null;
}
Future<void> set<T>(String key, T value, {Duration? ttl}) async {
final effectiveTtl = ttl ?? _defaultTtl;
// Set in both caches
await Future.wait([
_l1Cache.set(key, value, ttl: effectiveTtl),
_l2Cache.set(key, value, ttl: effectiveTtl),
]);
}
Future<void> delete(String key) async {
await Future.wait([
_l1Cache.delete(key),
_l2Cache.delete(key),
]);
}
Future<void> clear() async {
await Future.wait([
_l1Cache.clear(),
_l2Cache.clear(),
]);
}
}
class MemoryCache {
final Map<String, CacheEntry> _cache = {};
final int _maxSize;
Timer? _cleanupTimer;
MemoryCache({
int maxSize = 1000,
Duration cleanupInterval = const Duration(minutes: 5),
}) : _maxSize = maxSize {
_cleanupTimer = Timer.periodic(cleanupInterval, (_) => _cleanup());
}
Future<T?> get<T>(String key, T Function(Map<String, dynamic>) deserializer) async {
final entry = _cache[key];
if (entry == null || entry.isExpired) {
_cache.remove(key);
return null;
}
entry.lastAccessed = DateTime.now();
return deserializer(entry.data);
}
Future<void> set<T>(String key, T value, {Duration? ttl}) async {
// Ensure we don't exceed max size
if (_cache.length >= _maxSize) {
_evictLeastRecentlyUsed();
}
final expiresAt = ttl != null ? DateTime.now().add(ttl) : null;
_cache[key] = CacheEntry(
data: (value as dynamic).toJson(),
expiresAt: expiresAt,
lastAccessed: DateTime.now(),
);
}
Future<void> delete(String key) async {
_cache.remove(key);
}
Future<void> clear() async {
_cache.clear();
}
void _evictLeastRecentlyUsed() {
if (_cache.isEmpty) return;
String? lruKey;
DateTime? oldestAccess;
for (final entry in _cache.entries) {
if (oldestAccess == null || entry.value.lastAccessed.isBefore(oldestAccess)) {
oldestAccess = entry.value.lastAccessed;
lruKey = entry.key;
}
}
if (lruKey != null) {
_cache.remove(lruKey);
}
}
void _cleanup() {
final now = DateTime.now();
_cache.removeWhere((key, entry) => entry.isExpired);
}
void dispose() {
_cleanupTimer?.cancel();
}
}
class RedisCache {
final RedisConnection _redis;
final String _keyPrefix;
RedisCache(this._redis, {String keyPrefix = 'cache:'}) : _keyPrefix = keyPrefix;
Future<T?> get<T>(String key, T Function(Map<String, dynamic>) deserializer) async {
final redisKey = '$_keyPrefix$key';
final data = await _redis.get(redisKey);
if (data == null) return null;
try {
final json = jsonDecode(data) as Map<String, dynamic>;
return deserializer(json);
} catch (e) {
// Invalid data, remove from cache
await _redis.del([redisKey]);
return null;
}
}
Future<void> set<T>(String key, T value, {Duration? ttl}) async {
final redisKey = '$_keyPrefix$key';
final data = jsonEncode((value as dynamic).toJson());
if (ttl != null) {
await _redis.setex(redisKey, ttl.inSeconds, data);
} else {
await _redis.set(redisKey, data);
}
}
Future<void> delete(String key) async {
final redisKey = '$_keyPrefix$key';
await _redis.del([redisKey]);
}
Future<void> clear() async {
final pattern = '$_keyPrefix*';
final keys = await _redis.keys(pattern);
if (keys.isNotEmpty) {
await _redis.del(keys);
}
}
}
class CacheEntry {
final Map<String, dynamic> data;
final DateTime? expiresAt;
DateTime lastAccessed;
CacheEntry({
required this.data,
this.expiresAt,
required this.lastAccessed,
});
bool get isExpired {
return expiresAt != null && DateTime.now().isAfter(expiresAt!);
}
}
Query Result Caching
class QueryCache {
final MultiLevelCache _cache;
final Duration _defaultTtl;
final Set<String> _cacheableQueries;
QueryCache({
required MultiLevelCache cache,
Duration defaultTtl = const Duration(minutes: 10),
Set<String> cacheableQueries = const {},
}) : _cache = cache,
_defaultTtl = defaultTtl,
_cacheableQueries = cacheableQueries;
Future<List<Map<String, dynamic>>> query(
Database db,
String sql,
List<dynamic> parameters, {
Duration? ttl,
bool forceRefresh = false,
}) async {
final cacheKey = _generateCacheKey(sql, parameters);
// Check if query is cacheable
if (!_isQueryCacheable(sql)) {
return await db.query(sql, parameters);
}
// Try to get from cache
if (!forceRefresh) {
final cached = await _cache.get<List<Map<String, dynamic>>>(
cacheKey,
(json) => (json['data'] as List).cast<Map<String, dynamic>>(),
);
if (cached != null) {
return cached;
}
}
// Execute query and cache result
final result = await db.query(sql, parameters);
await _cache.set(
cacheKey,
CachedQueryResult(data: result),
ttl: ttl ?? _defaultTtl,
);
return result;
}
Future<void> invalidatePattern(String pattern) async {
// This would require a more sophisticated cache implementation
// that supports pattern-based invalidation
await _cache.clear(); // Simplified for now
}
Future<void> invalidateTable(String tableName) async {
// Invalidate all queries that might involve this table
await invalidatePattern('*$tableName*');
}
String _generateCacheKey(String sql, List<dynamic> parameters) {
final normalizedSql = sql.replaceAll(RegExp(r'\s+'), ' ').trim().toLowerCase();
final paramString = parameters.map((p) => p.toString()).join(':');
final combined = '$normalizedSql:$paramString';
// Use hash to keep key length manageable
return 'query:${combined.hashCode}';
}
bool _isQueryCacheable(String sql) {
final normalizedSql = sql.toLowerCase().trim();
// Only cache SELECT queries
if (!normalizedSql.startsWith('select')) {
return false;
}
// Don't cache queries with functions that return different results
final nonCacheableFunctions = [
'now()', 'current_timestamp', 'random()', 'uuid_generate_v4()',
];
for (final func in nonCacheableFunctions) {
if (normalizedSql.contains(func)) {
return false;
}
}
// Check if query is in cacheable list (if specified)
if (_cacheableQueries.isNotEmpty) {
return _cacheableQueries.any((pattern) =>
normalizedSql.contains(pattern.toLowerCase()));
}
return true;
}
}
class CachedQueryResult {
final List<Map<String, dynamic>> data;
CachedQueryResult({required this.data});
Map<String, dynamic> toJson() => {'data': data};
}
Monitoring and Metrics
Performance Monitoring System
class PerformanceMonitor {
final MetricsCollector _metricsCollector;
final AlertManager _alertManager;
final Duration _collectionInterval;
Timer? _collectionTimer;
PerformanceMonitor({
required MetricsCollector metricsCollector,
required AlertManager alertManager,
Duration collectionInterval = const Duration(seconds: 30),
}) : _metricsCollector = metricsCollector,
_alertManager = alertManager,
_collectionInterval = collectionInterval;
void start() {
_collectionTimer = Timer.periodic(_collectionInterval, (_) async {
await _collectMetrics();
});
print('Performance monitoring started');
}
void stop() {
_collectionTimer?.cancel();
_collectionTimer = null;
print('Performance monitoring stopped');
}
Future<void> _collectMetrics() async {
try {
final metrics = await _gatherSystemMetrics();
await _metricsCollector.record(metrics);
await _checkAlerts(metrics);
} catch (e) {
logger.error('Failed to collect metrics', error: e);
}
}
Future<SystemMetrics> _gatherSystemMetrics() async {
final results = await Future.wait([
_getMemoryMetrics(),
_getCpuMetrics(),
_getNetworkMetrics(),
_getDatabaseMetrics(),
_getApplicationMetrics(),
]);
return SystemMetrics(
memory: results[0] as MemoryMetrics,
cpu: results[1] as CpuMetrics,
network: results[2] as NetworkMetrics,
database: results[3] as DatabaseMetrics,
application: results[4] as ApplicationMetrics,
timestamp: DateTime.now(),
);
}
Future<MemoryMetrics> _getMemoryMetrics() async {
final processInfo = await Process.run('ps', ['-o', 'pid,rss,vsz', '-p', pid.toString()]);
final lines = processInfo.stdout.toString().split('\n');
if (lines.length >= 2) {
final parts = lines[1].trim().split(RegExp(r'\s+'));
final rss = int.parse(parts[1]) * 1024; // Convert KB to bytes
final vsz = int.parse(parts[2]) * 1024; // Convert KB to bytes
return MemoryMetrics(
used: rss,
virtual: vsz,
available: _getAvailableMemory(),
);
}
return MemoryMetrics(used: 0, virtual: 0, available: 0);
}
Future<CpuMetrics> _getCpuMetrics() async {
// Simplified CPU metrics - in production, use proper system monitoring
final loadAvg = await _getLoadAverage();
return CpuMetrics(
usage: loadAvg,
loadAverage1m: loadAvg,
loadAverage5m: loadAvg,
loadAverage15m: loadAvg,
);
}
Future<NetworkMetrics> _getNetworkMetrics() async {
// This would typically read from /proc/net/dev on Linux
return NetworkMetrics(
bytesReceived: 0,
bytesSent: 0,
packetsReceived: 0,
packetsSent: 0,
);
}
Future<DatabaseMetrics> _getDatabaseMetrics() async {
// Collect database-specific metrics
return DatabaseMetrics(
activeConnections: 0,
totalConnections: 0,
queryCount: 0,
averageQueryTime: 0.0,
);
}
Future<ApplicationMetrics> _getApplicationMetrics() async {
return ApplicationMetrics(
requestCount: RequestCounter.instance.getCount(),
averageResponseTime: ResponseTimeTracker.instance.getAverageTime(),
errorCount: ErrorCounter.instance.getCount(),
activeUsers: ActiveUserTracker.instance.getCount(),
);
}
Future<void> _checkAlerts(SystemMetrics metrics) async {
// Memory usage alert
if (metrics.memory.used > metrics.memory.available * 0.9) {
await _alertManager.sendAlert(Alert(
type: AlertType.highMemoryUsage,
severity: AlertSeverity.critical,
message: 'Memory usage is above 90%',
data: {'usage': metrics.memory.used, 'available': metrics.memory.available},
));
}
// CPU usage alert
if (metrics.cpu.usage > 0.8) {
await _alertManager.sendAlert(Alert(
type: AlertType.highCpuUsage,
severity: AlertSeverity.warning,
message: 'CPU usage is above 80%',
data: {'usage': metrics.cpu.usage},
));
}
// Response time alert
if (metrics.application.averageResponseTime > 2000) { // 2 seconds
await _alertManager.sendAlert(Alert(
type: AlertType.slowResponseTime,
severity: AlertSeverity.warning,
message: 'Average response time is above 2 seconds',
data: {'response_time': metrics.application.averageResponseTime},
));
}
}
int _getAvailableMemory() {
// Simplified - would read from /proc/meminfo on Linux
return 8 * 1024 * 1024 * 1024; // 8GB
}
Future<double> _getLoadAverage() async {
try {
final result = await Process.run('uptime', []);
final output = result.stdout.toString();
final match = RegExp(r'load average: ([\d.]+)').firstMatch(output);
return match != null ? double.parse(match.group(1)!) : 0.0;
} catch (e) {
return 0.0;
}
}
}
class RequestCounter {
static final RequestCounter instance = RequestCounter._();
RequestCounter._();
int _count = 0;
void increment() => _count++;
int getCount() => _count;
void reset() => _count = 0;
}
class ResponseTimeTracker {
static final ResponseTimeTracker instance = ResponseTimeTracker._();
ResponseTimeTracker._();
final List<int> _responseTimes = [];
final int _maxSamples = 1000;
void recordTime(int milliseconds) {
_responseTimes.add(milliseconds);
if (_responseTimes.length > _maxSamples) {
_responseTimes.removeAt(0);
}
}
double getAverageTime() {
if (_responseTimes.isEmpty) return 0.0;
return _responseTimes.reduce((a, b) => a + b) / _responseTimes.length;
}
void reset() => _responseTimes.clear();
}
class ErrorCounter {
static final ErrorCounter instance = ErrorCounter._();
ErrorCounter._();
int _count = 0;
void increment() => _count++;
int getCount() => _count;
void reset() => _count = 0;
}
class ActiveUserTracker {
static final ActiveUserTracker instance = ActiveUserTracker._();
ActiveUserTracker._();
final Set<int> _activeUsers = {};
void addUser(int userId) => _activeUsers.add(userId);
void removeUser(int userId) => _activeUsers.remove(userId);
int getCount() => _activeUsers.length;
void reset() => _activeUsers.clear();
}
Request Tracing and Profiling
class RequestTracer {
static final Map<String, RequestTrace> _activeTraces = {};
static String startTrace(String endpoint, {Map<String, dynamic>? metadata}) {
final traceId = _generateTraceId();
final trace = RequestTrace(
id: traceId,
endpoint: endpoint,
startTime: DateTime.now(),
metadata: metadata ?? {},
);
_activeTraces[traceId] = trace;
return traceId;
}
static void addSpan(String traceId, String name, {Map<String, dynamic>? data}) {
final trace = _activeTraces[traceId];
if (trace != null) {
trace.spans.add(TraceSpan(
name: name,
startTime: DateTime.now(),
data: data ?? {},
));
}
}
static void finishSpan(String traceId, String name, {Map<String, dynamic>? data}) {
final trace = _activeTraces[traceId];
if (trace != null) {
final span = trace.spans.lastWhere((s) => s.name == name && s.endTime == null);
span.endTime = DateTime.now();
if (data != null) {
span.data.addAll(data);
}
}
}
static RequestTrace? finishTrace(String traceId, {int? statusCode, String? error}) {
final trace = _activeTraces.remove(traceId);
if (trace != null) {
trace.endTime = DateTime.now();
trace.statusCode = statusCode;
trace.error = error;
// Log slow requests
if (trace.duration.inMilliseconds > 1000) {
logger.warning('Slow request detected', extra: {
'trace_id': traceId,
'endpoint': trace.endpoint,
'duration_ms': trace.duration.inMilliseconds,
'spans': trace.spans.length,
});
}
}
return trace;
}
static String _generateTraceId() {
return '${DateTime.now().millisecondsSinceEpoch}_${Random().nextInt(999999)}';
}
}
class RequestTrace {
final String id;
final String endpoint;
final DateTime startTime;
final Map<String, dynamic> metadata;
final List<TraceSpan> spans = [];
DateTime? endTime;
int? statusCode;
String? error;
RequestTrace({
required this.id,
required this.endpoint,
required this.startTime,
required this.metadata,
});
Duration get duration => (endTime ?? DateTime.now()).difference(startTime);
Map<String, dynamic> toJson() => {
'id': id,
'endpoint': endpoint,
'start_time': startTime.toIso8601String(),
'end_time': endTime?.toIso8601String(),
'duration_ms': duration.inMilliseconds,
'status_code': statusCode,
'error': error,
'metadata': metadata,
'spans': spans.map((s) => s.toJson()).toList(),
};
}
class TraceSpan {
final String name;
final DateTime startTime;
final Map<String, dynamic> data;
DateTime? endTime;
TraceSpan({
required this.name,
required this.startTime,
required this.data,
});
Duration get duration => (endTime ?? DateTime.now()).difference(startTime);
Map<String, dynamic> toJson() => {
'name': name,
'start_time': startTime.toIso8601String(),
'end_time': endTime?.toIso8601String(),
'duration_ms': duration.inMilliseconds,
'data': data,
};
}
class TracingMiddleware implements Middleware {
@override
Future<dynamic> handle(
dynamic request,
Future<dynamic> Function(dynamic) next,
) async {
final traceId = RequestTracer.startTrace(
request.endpoint,
metadata: {
'protocol': request.protocol,
'client_ip': request.clientIp,
'user_id': request.context['user_id'],
},
);
request.context['trace_id'] = traceId;
try {
RequestTracer.addSpan(traceId, 'request_processing');
final response = await next(request);
RequestTracer.finishSpan(traceId, 'request_processing');
RequestTracer.finishTrace(traceId, statusCode: 200);
return response;
} catch (e) {
RequestTracer.finishSpan(traceId, 'request_processing', data: {'error': e.toString()});
RequestTracer.finishTrace(traceId, statusCode: 500, error: e.toString());
rethrow;
}
}
}
Resource Usage Analysis
Memory Profiling
class MemoryProfiler {
static final Map<String, int> _allocations = {};
static final Map<String, int> _deallocations = {};
static void recordAllocation(String type, int size) {
_allocations[type] = (_allocations[type] ?? 0) + size;
}
static void recordDeallocation(String type, int size) {
_deallocations[type] = (_deallocations[type] ?? 0) + size;
}
static MemoryProfile getProfile() {
final profile = <String, MemoryUsage>{};
for (final type in {..._allocations.keys, ..._deallocations.keys}) {
final allocated = _allocations[type] ?? 0;
final deallocated = _deallocations[type] ?? 0;
profile[type] = MemoryUsage(
allocated: allocated,
deallocated: deallocated,
current: allocated - deallocated,
);
}
return MemoryProfile(usage: profile, timestamp: DateTime.now());
}
static void reset() {
_allocations.clear();
_deallocations.clear();
}
}
class MemoryProfile {
final Map<String, MemoryUsage> usage;
final DateTime timestamp;
MemoryProfile({required this.usage, required this.timestamp});
int get totalAllocated => usage.values.fold(0, (sum, u) => sum + u.allocated);
int get totalCurrent => usage.values.fold(0, (sum, u) => sum + u.current);
void printReport() {
print('Memory Profile Report (${timestamp.toIso8601String()})');
print('=' * 60);
print('Type'.padRight(20) + 'Allocated'.padLeft(12) + 'Current'.padLeft(12));
print('-' * 60);
final sortedEntries = usage.entries.toList()
..sort((a, b) => b.value.current.compareTo(a.value.current));
for (final entry in sortedEntries) {
final type = entry.key;
final usage = entry.value;
print(type.padRight(20) +
_formatBytes(usage.allocated).padLeft(12) +
_formatBytes(usage.current).padLeft(12));
}
print('-' * 60);
print('Total'.padRight(20) +
_formatBytes(totalAllocated).padLeft(12) +
_formatBytes(totalCurrent).padLeft(12));
}
String _formatBytes(int bytes) {
if (bytes < 1024) return '${bytes}B';
if (bytes < 1024 * 1024) return '${(bytes / 1024).toStringAsFixed(1)}KB';
if (bytes < 1024 * 1024 * 1024) return '${(bytes / (1024 * 1024)).toStringAsFixed(1)}MB';
return '${(bytes / (1024 * 1024 * 1024)).toStringAsFixed(1)}GB';
}
}
class MemoryUsage {
final int allocated;
final int deallocated;
final int current;
MemoryUsage({
required this.allocated,
required this.deallocated,
required this.current,
});
}
Performance Benchmarking
class PerformanceBenchmark {
static Future<BenchmarkResult> benchmarkEndpoint({
required String endpoint,
required dynamic request,
required HypermodernClient client,
int iterations = 100,
int concurrency = 10,
}) async {
final results = <RequestResult>[];
final semaphore = Semaphore(concurrency);
final stopwatch = Stopwatch()..start();
final futures = List.generate(iterations, (i) async {
await semaphore.acquire();
try {
final requestStopwatch = Stopwatch()..start();
try {
final response = await client.request(endpoint, request);
requestStopwatch.stop();
results.add(RequestResult(
success: true,
duration: requestStopwatch.elapsed,
responseSize: _estimateResponseSize(response),
));
} catch (e) {
requestStopwatch.stop();
results.add(RequestResult(
success: false,
duration: requestStopwatch.elapsed,
error: e.toString(),
));
}
} finally {
semaphore.release();
}
});
await Future.wait(futures);
stopwatch.stop();
return BenchmarkResult(
endpoint: endpoint,
iterations: iterations,
concurrency: concurrency,
totalDuration: stopwatch.elapsed,
results: results,
);
}
static int _estimateResponseSize(dynamic response) {
// Rough estimation of response size
final json = jsonEncode((response as dynamic).toJson());
return json.length;
}
}
class BenchmarkResult {
final String endpoint;
final int iterations;
final int concurrency;
final Duration totalDuration;
final List<RequestResult> results;
BenchmarkResult({
required this.endpoint,
required this.iterations,
required this.concurrency,
required this.totalDuration,
required this.results,
});
int get successfulRequests => results.where((r) => r.success).length;
int get failedRequests => results.where((r) => !r.success).length;
double get successRate => successfulRequests / iterations;
Duration get averageResponseTime {
final successful = results.where((r) => r.success);
if (successful.isEmpty) return Duration.zero;
final totalMs = successful.fold<int>(0, (sum, r) => sum + r.duration.inMilliseconds);
return Duration(milliseconds: totalMs ~/ successful.length);
}
Duration get minResponseTime {
final successful = results.where((r) => r.success);
if (successful.isEmpty) return Duration.zero;
return successful.map((r) => r.duration).reduce((a, b) => a < b ? a : b);
}
Duration get maxResponseTime {
final successful = results.where((r) => r.success);
if (successful.isEmpty) return Duration.zero;
return successful.map((r) => r.duration).reduce((a, b) => a > b ? a : b);
}
double get requestsPerSecond => iterations / totalDuration.inMilliseconds * 1000;
void printReport() {
print('Benchmark Report: $endpoint');
print('=' * 50);
print('Iterations: $iterations');
print('Concurrency: $concurrency');
print('Total Duration: ${totalDuration.inMilliseconds}ms');
print('');
print('Results:');
print(' Successful: $successfulRequests');
print(' Failed: $failedRequests');
print(' Success Rate: ${(successRate * 100).toStringAsFixed(1)}%');
print('');
print('Response Times:');
print(' Average: ${averageResponseTime.inMilliseconds}ms');
print(' Min: ${minResponseTime.inMilliseconds}ms');
print(' Max: ${maxResponseTime.inMilliseconds}ms');
print('');
print('Throughput: ${requestsPerSecond.toStringAsFixed(1)} req/sec');
if (failedRequests > 0) {
print('');
print('Errors:');
final errorCounts = <String, int>{};
for (final result in results.where((r) => !r.success)) {
final error = result.error ?? 'Unknown error';
errorCounts[error] = (errorCounts[error] ?? 0) + 1;
}
for (final entry in errorCounts.entries) {
print(' ${entry.key}: ${entry.value}');
}
}
}
}
class RequestResult {
final bool success;
final Duration duration;
final int? responseSize;
final String? error;
RequestResult({
required this.success,
required this.duration,
this.responseSize,
this.error,
});
}
class Semaphore {
final int maxCount;
int _currentCount;
final Queue<Completer<void>> _waitQueue = Queue();
Semaphore(this.maxCount) : _currentCount = maxCount;
Future<void> acquire() async {
if (_currentCount > 0) {
_currentCount--;
return;
}
final completer = Completer<void>();
_waitQueue.add(completer);
return completer.future;
}
void release() {
if (_waitQueue.isNotEmpty) {
final completer = _waitQueue.removeFirst();
completer.complete();
} else {
_currentCount++;
}
}
}
What's Next
You now have comprehensive knowledge of performance optimization techniques in Hypermodern applications. The next chapter will focus on development workflow, covering the CLI tools, hot reload capabilities, code generation workflow, and testing strategies that make Hypermodern development efficient and productive.