File Upload and Storage
Modern web applications frequently need to handle file uploads, from simple profile pictures to complex document management systems. Hypermodern provides a comprehensive file upload and storage system that handles everything from basic uploads to enterprise-grade features like virus scanning, image processing, and chunked uploads for large files.
Understanding File Upload Architecture
The Hypermodern file upload system is built around several key components that work together to provide a secure, scalable, and flexible solution:
- Upload Middleware: Handles multipart form parsing and initial validation
- File Storage Service: Abstracts storage backends (local, cloud)
- Processing Pipeline: Handles file transformation and optimization
- Security Layer: Provides virus scanning and content validation
- Metadata Management: Tracks file information and relationships
Core Components
import 'package:hypermodern_server/hypermodern_server.dart';
class FileUploadSystem {
final FileUploadMiddleware uploadMiddleware;
final FileStorageService storageService;
final FileProcessingPipeline processingPipeline;
final FileSecurityService securityService;
final FileMetadataService metadataService;
FileUploadSystem({
required this.uploadMiddleware,
required this.storageService,
required this.processingPipeline,
required this.securityService,
required this.metadataService,
});
}
Basic File Upload Setup
Configuration
Start by configuring the file upload system with appropriate limits and validation rules:
class FileUploadConfig {
static FileUploadConfig get standard => FileUploadConfig(
uploadDir: 'temp_uploads',
maxFileSize: 50 * 1024 * 1024, // 50MB
maxFiles: 10,
allowedExtensions: ['.jpg', '.jpeg', '.png', '.pdf', '.txt', '.doc', '.docx'],
allowedMimeTypes: [
'image/jpeg',
'image/png',
'application/pdf',
'text/plain',
'application/msword',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
],
autoSave: false, // Handle saving manually for more control
virusScanEnabled: true,
generateThumbnails: true,
);
static FileUploadConfig get restrictive => FileUploadConfig(
uploadDir: 'secure_uploads',
maxFileSize: 10 * 1024 * 1024, // 10MB
maxFiles: 5,
allowedExtensions: ['.pdf', '.txt'],
allowedMimeTypes: ['application/pdf', 'text/plain'],
autoSave: false,
virusScanEnabled: true,
requireAuthentication: true,
);
}
Server Setup
Configure your Hypermodern server with file upload capabilities:
void configureFileUploads(HypermodernServer server) {
// Configure upload middleware
final uploadConfig = FileUploadConfig.standard;
server.addMiddleware(FileUploadMiddleware(config: uploadConfig));
// Set up storage service
final storageConfig = FileStorageConfig(
baseDirectory: 'storage/uploads',
organizeByDate: true,
preserveOriginalNames: true,
maxFileSize: uploadConfig.maxFileSize,
);
final storageService = FileStorageService(config: storageConfig);
// Set up processing pipeline
final processingPipeline = FileProcessingPipeline([
VirusScanProcessor(),
ImageOptimizationProcessor(),
ThumbnailGenerationProcessor(),
MetadataExtractionProcessor(),
]);
// Register upload endpoints
_registerFileUploadEndpoints(server, storageService, processingPipeline);
}
Single File Upload
Basic Implementation
class FileUploadController {
static Future<dynamic> uploadSingle(RequestData data) async {
final uploadedFiles = data.files.values.toList();
if (uploadedFiles.isEmpty) {
return {'error': 'No files uploaded', 'code': 400};
}
if (uploadedFiles.length > 1) {
return {'error': 'Only one file allowed', 'code': 400};
}
final file = uploadedFiles.first;
try {
// Validate file
await _validateFile(file);
// Process file
final processedFile = await FileProcessingService.process(file);
// Store file
final storedFile = await FileStorageService.store(processedFile);
// Create database record
final fileRecord = await FileMetadataService.create(
storedFile: storedFile,
uploadedBy: data.user?.id,
metadata: {
'original_name': file.filename,
'upload_ip': data.clientIP,
'upload_time': DateTime.now().toIso8601String(),
},
);
return {
'success': true,
'file': {
'id': fileRecord.id,
'filename': storedFile.originalName,
'size': storedFile.size,
'contentType': storedFile.contentType,
'url': '/api/files/${fileRecord.id}',
'uploadedAt': fileRecord.createdAt.toIso8601String(),
},
};
} catch (e) {
return {'error': e.toString(), 'code': 500};
}
}
static Future<void> _validateFile(UploadedFile file) async {
// Size validation
if (file.size > 50 * 1024 * 1024) {
throw ValidationException('File too large (max 50MB)');
}
// Extension validation
final allowedExtensions = ['.jpg', '.jpeg', '.png', '.pdf', '.txt'];
final extension = file.filename.toLowerCase().split('.').last;
if (!allowedExtensions.contains('.$extension')) {
throw ValidationException('File type not allowed');
}
// Content type validation
final allowedTypes = ['image/jpeg', 'image/png', 'application/pdf', 'text/plain'];
if (!allowedTypes.contains(file.contentType)) {
throw ValidationException('Invalid content type');
}
// Virus scan
final scanResult = await VirusScanService.scan(file.bytes);
if (!scanResult.isClean) {
throw SecurityException('File failed security scan');
}
}
}
Advanced Single Upload with Processing
class AdvancedFileUploadController {
static Future<dynamic> uploadWithProcessing(RequestData data) async {
final file = _extractSingleFile(data);
final options = _extractProcessingOptions(data);
try {
// Pre-upload validation
await FileValidator.validate(file, options.validationRules);
// Security scanning
await SecurityScanner.scan(file);
// Process based on file type
final processedFiles = await _processFile(file, options);
// Store all versions
final storedFiles = await _storeFiles(processedFiles);
// Create database records
final fileRecord = await _createFileRecord(file, storedFiles, data.user);
// Generate response
return _generateSuccessResponse(fileRecord, storedFiles);
} catch (e) {
await _handleUploadError(e, file);
rethrow;
}
}
static Future<List<ProcessedFile>> _processFile(
UploadedFile file,
ProcessingOptions options,
) async {
switch (file.contentType) {
case 'image/jpeg':
case 'image/png':
return await ImageProcessor.process(file, options.imageOptions);
case 'application/pdf':
return await PDFProcessor.process(file, options.pdfOptions);
case 'video/mp4':
return await VideoProcessor.process(file, options.videoOptions);
default:
return [ProcessedFile.fromOriginal(file)];
}
}
}
Multiple File Upload
Batch Upload Implementation
class BatchUploadController {
static Future<dynamic> uploadMultiple(RequestData data) async {
final uploadedFiles = data.files.values.toList();
if (uploadedFiles.isEmpty) {
return {'error': 'No files uploaded'};
}
if (uploadedFiles.length > 10) {
return {'error': 'Too many files (max 10)'};
}
// Validate total size
final totalSize = uploadedFiles.fold<int>(0, (sum, file) => sum + file.size);
if (totalSize > 100 * 1024 * 1024) { // 100MB total
return {'error': 'Total file size too large (max 100MB)'};
}
final results = <Map<String, dynamic>>[];
final errors = <Map<String, dynamic>>[];
// Process files concurrently
final futures = uploadedFiles.map((file) => _processFileAsync(file, data));
final outcomes = await Future.wait(futures, eagerError: false);
for (int i = 0; i < outcomes.length; i++) {
final outcome = outcomes[i];
final file = uploadedFiles[i];
if (outcome.isSuccess) {
results.add({
'fieldName': file.fieldName,
'filename': file.filename,
'fileId': outcome.fileId,
'url': outcome.url,
});
} else {
errors.add({
'fieldName': file.fieldName,
'filename': file.filename,
'error': outcome.error,
});
}
}
return {
'success': errors.isEmpty,
'uploaded': results.length,
'failed': errors.length,
'files': results,
'errors': errors,
};
}
static Future<UploadOutcome> _processFileAsync(
UploadedFile file,
RequestData data,
) async {
try {
await FileValidator.validate(file);
final processed = await FileProcessor.process(file);
final stored = await FileStorage.store(processed);
final record = await FileMetadata.create(stored, data.user?.id);
return UploadOutcome.success(
fileId: record.id,
url: '/api/files/${record.id}',
);
} catch (e) {
return UploadOutcome.failure(error: e.toString());
}
}
}
Image Upload and Processing
Comprehensive Image Handling
class ImageUploadController {
static Future<dynamic> uploadImage(RequestData data) async {
final file = _extractImageFile(data);
final options = ImageProcessingOptions.fromRequest(data);
try {
// Validate image
await ImageValidator.validate(file);
// Extract metadata
final metadata = await ImageMetadataExtractor.extract(file);
// Process image
final processedImages = await ImageProcessor.process(file, options);
// Store all versions
final storedFiles = await _storeImageVersions(processedImages);
// Create database record
final imageRecord = await ImageRecord.create(
originalFile: storedFiles.original,
thumbnails: storedFiles.thumbnails,
metadata: metadata,
uploadedBy: data.user?.id,
);
return {
'success': true,
'image': {
'id': imageRecord.id,
'original': {
'url': storedFiles.original.publicUrl,
'width': metadata.width,
'height': metadata.height,
'size': storedFiles.original.size,
},
'thumbnails': storedFiles.thumbnails.map((thumb) => {
'size': thumb.size,
'url': thumb.publicUrl,
'width': thumb.width,
'height': thumb.height,
}).toList(),
'metadata': {
'format': metadata.format,
'colorSpace': metadata.colorSpace,
'hasAlpha': metadata.hasAlpha,
'exif': metadata.exifData,
},
},
};
} catch (e) {
return {'error': e.toString()};
}
}
}
class ImageProcessor {
static Future<ProcessedImageSet> process(
UploadedFile file,
ImageProcessingOptions options,
) async {
final image = await decodeImage(file.bytes);
if (image == null) {
throw ProcessingException('Invalid image format');
}
final results = ProcessedImageSet();
// Optimize original
if (options.optimizeOriginal) {
final optimized = await _optimizeImage(image, options);
results.original = ProcessedImage(
data: optimized,
width: image.width,
height: image.height,
format: options.outputFormat,
);
}
// Generate thumbnails
for (final size in options.thumbnailSizes) {
final thumbnail = await _generateThumbnail(image, size, options);
results.thumbnails.add(thumbnail);
}
// Generate responsive versions
if (options.generateResponsive) {
final responsive = await _generateResponsiveVersions(image, options);
results.responsive.addAll(responsive);
}
return results;
}
static Future<Uint8List> _optimizeImage(
Image image,
ImageProcessingOptions options,
) async {
// Resize if needed
if (options.maxWidth != null || options.maxHeight != null) {
image = copyResize(
image,
width: options.maxWidth,
height: options.maxHeight,
maintainAspect: true,
);
}
// Apply filters
if (options.sharpen) {
image = convolution(image, [0, -1, 0, -1, 5, -1, 0, -1, 0]);
}
// Encode with quality settings
switch (options.outputFormat) {
case ImageFormat.jpeg:
return Uint8List.fromList(encodeJpg(image, quality: options.quality));
case ImageFormat.png:
return Uint8List.fromList(encodePng(image));
case ImageFormat.webp:
return Uint8List.fromList(encodeWebP(image, quality: options.quality));
default:
return Uint8List.fromList(encodeJpg(image, quality: options.quality));
}
}
}
Large File Upload with Chunking
Chunked Upload Implementation
class ChunkedUploadController {
static Future<dynamic> initiateUpload(RequestData data) async {
final filename = data.body['filename'] as String;
final fileSize = data.body['fileSize'] as int;
final chunkSize = data.body['chunkSize'] as int? ?? (5 * 1024 * 1024); // 5MB default
// Validate file size
if (fileSize > 1024 * 1024 * 1024) { // 1GB limit
return {'error': 'File too large (max 1GB)'};
}
// Create upload session
final uploadSession = await ChunkedUploadSession.create(
filename: filename,
fileSize: fileSize,
chunkSize: chunkSize,
userId: data.user?.id,
);
return {
'success': true,
'uploadId': uploadSession.id,
'chunkSize': chunkSize,
'totalChunks': uploadSession.totalChunks,
'uploadUrl': '/api/upload/chunk',
};
}
static Future<dynamic> uploadChunk(RequestData data) async {
final uploadId = data.body['uploadId'] as String;
final chunkIndex = data.body['chunkIndex'] as int;
final file = data.files.values.first;
try {
// Get upload session
final session = await ChunkedUploadSession.get(uploadId);
if (session == null) {
return {'error': 'Upload session not found'};
}
// Validate chunk
if (chunkIndex >= session.totalChunks) {
return {'error': 'Invalid chunk index'};
}
// Store chunk
await ChunkStorage.store(uploadId, chunkIndex, file.bytes);
// Update session
await session.markChunkReceived(chunkIndex);
// Check if upload is complete
if (session.isComplete) {
final assembledFile = await _assembleFile(session);
await _finalizeUpload(session, assembledFile);
return {
'success': true,
'completed': true,
'fileId': session.finalFileId,
'url': '/api/files/${session.finalFileId}',
};
}
return {
'success': true,
'completed': false,
'chunksReceived': session.chunksReceived.length,
'totalChunks': session.totalChunks,
'progress': session.progress,
};
} catch (e) {
return {'error': e.toString()};
}
}
static Future<AssembledFile> _assembleFile(ChunkedUploadSession session) async {
final chunks = <Uint8List>[];
for (int i = 0; i < session.totalChunks; i++) {
final chunkData = await ChunkStorage.get(session.id, i);
chunks.add(chunkData);
}
// Combine chunks
final totalSize = chunks.fold<int>(0, (sum, chunk) => sum + chunk.length);
final assembledData = Uint8List(totalSize);
int offset = 0;
for (final chunk in chunks) {
assembledData.setRange(offset, offset + chunk.length, chunk);
offset += chunk.length;
}
return AssembledFile(
filename: session.filename,
data: assembledData,
contentType: _detectContentType(session.filename),
);
}
static Future<void> _finalizeUpload(
ChunkedUploadSession session,
AssembledFile file,
) async {
// Process the assembled file
final processedFile = await FileProcessor.process(file);
// Store the file
final storedFile = await FileStorage.store(processedFile);
// Create database record
final fileRecord = await FileRecord.create(
storedFile: storedFile,
uploadedBy: session.userId,
uploadMethod: 'chunked',
);
// Update session with final file ID
await session.setFinalFileId(fileRecord.id);
// Clean up chunks
await ChunkStorage.cleanup(session.id);
}
}
File Security and Validation
Comprehensive Security Layer
class FileSecurityService {
static Future<SecurityScanResult> scanFile(UploadedFile file) async {
final results = <SecurityCheck>[];
// Virus scan
final virusScan = await VirusScanner.scan(file.bytes);
results.add(virusScan);
// Content validation
final contentValidation = await ContentValidator.validate(file);
results.add(contentValidation);
// Malware detection
final malwareDetection = await MalwareDetector.scan(file);
results.add(malwareDetection);
// File structure analysis
final structureAnalysis = await FileStructureAnalyzer.analyze(file);
results.add(structureAnalysis);
return SecurityScanResult(
isClean: results.every((check) => check.passed),
checks: results,
riskLevel: _calculateRiskLevel(results),
);
}
static Future<void> quarantineFile(String fileId, String reason) async {
await QuarantineService.quarantine(fileId, reason);
await NotificationService.notifyAdmins('File quarantined: $fileId - $reason');
}
}
class ContentValidator {
static Future<SecurityCheck> validate(UploadedFile file) async {
final checks = <String>[];
// Check file signature matches extension
final actualType = await FileTypeDetector.detect(file.bytes);
final expectedType = _getExpectedType(file.filename);
if (actualType != expectedType) {
checks.add('File signature mismatch');
}
// Check for embedded executables
if (await _containsExecutableCode(file.bytes)) {
checks.add('Contains executable code');
}
// Check for suspicious patterns
final suspiciousPatterns = await _scanForSuspiciousPatterns(file.bytes);
checks.addAll(suspiciousPatterns);
return SecurityCheck(
name: 'Content Validation',
passed: checks.isEmpty,
issues: checks,
);
}
static Future<bool> _containsExecutableCode(Uint8List data) async {
// Check for common executable signatures
final signatures = [
[0x4D, 0x5A], // PE executable
[0x7F, 0x45, 0x4C, 0x46], // ELF executable
[0xCA, 0xFE, 0xBA, 0xBE], // Mach-O executable
];
for (final signature in signatures) {
if (_hasSignature(data, signature)) {
return true;
}
}
return false;
}
}
File Type Validation
class FileTypeValidator {
static const Map<String, List<String>> allowedTypes = {
'image': ['image/jpeg', 'image/png', 'image/gif', 'image/webp'],
'document': ['application/pdf', 'text/plain', 'application/msword'],
'video': ['video/mp4', 'video/webm', 'video/quicktime'],
'audio': ['audio/mpeg', 'audio/wav', 'audio/ogg'],
};
static Future<ValidationResult> validate(
UploadedFile file,
String category,
) async {
final allowedMimeTypes = allowedTypes[category];
if (allowedMimeTypes == null) {
return ValidationResult.failure('Unknown file category');
}
// Check MIME type
if (!allowedMimeTypes.contains(file.contentType)) {
return ValidationResult.failure('File type not allowed for category $category');
}
// Verify file signature
final detectedType = await FileSignatureDetector.detect(file.bytes);
if (!allowedMimeTypes.contains(detectedType)) {
return ValidationResult.failure('File signature does not match declared type');
}
// Category-specific validation
switch (category) {
case 'image':
return await _validateImage(file);
case 'document':
return await _validateDocument(file);
case 'video':
return await _validateVideo(file);
default:
return ValidationResult.success();
}
}
static Future<ValidationResult> _validateImage(UploadedFile file) async {
try {
final image = await decodeImage(file.bytes);
if (image == null) {
return ValidationResult.failure('Invalid image format');
}
// Check dimensions
if (image.width > 10000 || image.height > 10000) {
return ValidationResult.failure('Image dimensions too large');
}
// Check for suspicious metadata
final metadata = await ImageMetadataExtractor.extract(file);
if (metadata.hasSuspiciousData) {
return ValidationResult.failure('Image contains suspicious metadata');
}
return ValidationResult.success();
} catch (e) {
return ValidationResult.failure('Image validation failed: $e');
}
}
}
File Storage Backends
Local Storage Implementation
class LocalFileStorage implements FileStorageBackend {
final String baseDirectory;
final bool organizeByDate;
LocalFileStorage({
required this.baseDirectory,
this.organizeByDate = true,
});
@override
Future<StoredFile> store(ProcessedFile file) async {
final storagePath = _generateStoragePath(file.filename);
final fullPath = path.join(baseDirectory, storagePath);
// Ensure directory exists
await Directory(path.dirname(fullPath)).create(recursive: true);
// Write file
final fileHandle = File(fullPath);
await fileHandle.writeAsBytes(file.data);
return StoredFile(
id: _generateFileId(),
originalName: file.filename,
storagePath: storagePath,
size: file.data.length,
contentType: file.contentType,
uploadedAt: DateTime.now(),
backend: StorageBackend.local,
);
}
@override
Future<Uint8List?> retrieve(String storagePath) async {
final fullPath = path.join(baseDirectory, storagePath);
final file = File(fullPath);
if (!await file.exists()) {
return null;
}
return await file.readAsBytes();
}
@override
Future<bool> delete(String storagePath) async {
final fullPath = path.join(baseDirectory, storagePath);
final file = File(fullPath);
if (await file.exists()) {
await file.delete();
return true;
}
return false;
}
String _generateStoragePath(String filename) {
final now = DateTime.now();
final fileId = _generateFileId();
final extension = path.extension(filename);
if (organizeByDate) {
final year = now.year.toString();
final month = now.month.toString().padLeft(2, '0');
final day = now.day.toString().padLeft(2, '0');
return '$year/$month/$day/$fileId$extension';
} else {
return '$fileId$extension';
}
}
}
Database Integration
class FileMetadataService {
static Future<FileRecord> createRecord({
required StoredFile storedFile,
String? uploadedBy,
Map<String, dynamic>? metadata,
String? description,
String? category,
}) async {
final record = FileRecord(
id: storedFile.id,
originalName: storedFile.originalName,
storagePath: storedFile.storagePath,
size: storedFile.size,
contentType: storedFile.contentType,
uploadedBy: uploadedBy,
uploadedAt: storedFile.uploadedAt,
metadata: metadata ?? {},
description: description,
category: category,
backend: storedFile.backend,
);
await Database.insert('files', record.toJson());
// Index for search if it's a document
if (_isSearchableDocument(record)) {
await SearchIndexService.indexFile(record);
}
return record;
}
static Future<FileRecord?> getRecord(String fileId) async {
final result = await Database.query(
'SELECT * FROM files WHERE id = ?',
[fileId],
);
if (result.isEmpty) {
return null;
}
return FileRecord.fromJson(result.first);
}
static Future<List<FileRecord>> getUserFiles(
String userId, {
String? category,
int limit = 50,
int offset = 0,
}) async {
var query = 'SELECT * FROM files WHERE uploaded_by = ?';
final params = <dynamic>[userId];
if (category != null) {
query += ' AND category = ?';
params.add(category);
}
query += ' ORDER BY uploaded_at DESC LIMIT ? OFFSET ?';
params.addAll([limit, offset]);
final results = await Database.query(query, params);
return results.map((row) => FileRecord.fromJson(row)).toList();
}
}
Performance Optimization
Caching Strategy
class FileCache {
static final Map<String, CachedFile> _cache = {};
static const maxCacheSize = 100 * 1024 * 1024; // 100MB
static int _currentCacheSize = 0;
static Future<Uint8List?> get(String fileId) async {
final cached = _cache[fileId];
if (cached != null && cached.isValid) {
// Move to end (LRU)
_cache.remove(fileId);
_cache[fileId] = cached;
return cached.data;
}
return null;
}
static void put(String fileId, Uint8List data) {
// Don't cache large files
if (data.length > 10 * 1024 * 1024) return; // 10MB limit
// Evict old entries if needed
while (_currentCacheSize + data.length > maxCacheSize && _cache.isNotEmpty) {
final oldestKey = _cache.keys.first;
final removed = _cache.remove(oldestKey)!;
_currentCacheSize -= removed.data.length;
}
_cache[fileId] = CachedFile(
data: data,
cachedAt: DateTime.now(),
ttl: Duration(minutes: 30),
);
_currentCacheSize += data.length;
}
}
Async Processing
class AsyncFileProcessor {
static final Queue<ProcessingJob> _queue = Queue();
static bool _isProcessing = false;
static Future<void> queueForProcessing(
String fileId,
ProcessingOptions options,
) async {
final job = ProcessingJob(
fileId: fileId,
options: options,
queuedAt: DateTime.now(),
);
_queue.add(job);
await JobQueue.add('file_processing', job.toJson());
_processQueue();
}
static Future<void> _processQueue() async {
if (_isProcessing || _queue.isEmpty) return;
_isProcessing = true;
try {
while (_queue.isNotEmpty) {
final job = _queue.removeFirst();
await _processJob(job);
}
} finally {
_isProcessing = false;
}
}
static Future<void> _processJob(ProcessingJob job) async {
try {
final fileRecord = await FileMetadataService.getRecord(job.fileId);
if (fileRecord == null) return;
final fileData = await FileStorage.retrieve(fileRecord.storagePath);
if (fileData == null) return;
// Process based on options
final processedFiles = await FileProcessor.process(
UploadedFile.fromData(fileData, fileRecord.originalName),
job.options,
);
// Store processed versions
for (final processed in processedFiles) {
await FileStorage.store(processed);
}
// Update database
await FileMetadataService.updateProcessingStatus(
job.fileId,
ProcessingStatus.completed,
);
} catch (e) {
await FileMetadataService.updateProcessingStatus(
job.fileId,
ProcessingStatus.failed,
error: e.toString(),
);
}
}
}
Best Practices and Security
1. Input Validation
Always validate files thoroughly:
class ComprehensiveFileValidator {
static Future<ValidationResult> validate(UploadedFile file) async {
final checks = <ValidationCheck>[];
// Size check
checks.add(await _validateSize(file));
// Type check
checks.add(await _validateType(file));
// Content check
checks.add(await _validateContent(file));
// Security check
checks.add(await _validateSecurity(file));
final failedChecks = checks.where((check) => !check.passed).toList();
return ValidationResult(
passed: failedChecks.isEmpty,
checks: checks,
errors: failedChecks.map((check) => check.error).toList(),
);
}
}
2. Error Handling
Implement comprehensive error handling:
class FileUploadErrorHandler {
static Map<String, dynamic> handleError(Exception error, UploadedFile? file) {
switch (error.runtimeType) {
case ValidationException:
return _handleValidationError(error as ValidationException);
case SecurityException:
return _handleSecurityError(error as SecurityException, file);
case StorageException:
return _handleStorageError(error as StorageException);
case ProcessingException:
return _handleProcessingError(error as ProcessingException);
default:
return _handleGenericError(error);
}
}
static Map<String, dynamic> _handleSecurityError(
SecurityException error,
UploadedFile? file,
) {
// Log security incident
SecurityLogger.logIncident(
type: 'file_upload_security_violation',
details: error.toString(),
file: file?.filename,
timestamp: DateTime.now(),
);
return {
'error': 'File failed security validation',
'code': 'SECURITY_VIOLATION',
'details': error.userMessage,
};
}
}
3. Monitoring and Analytics
Track file upload metrics:
class FileUploadAnalytics {
static Future<void> trackUpload(FileRecord record) async {
await AnalyticsService.track('file_upload', {
'file_id': record.id,
'file_size': record.size,
'content_type': record.contentType,
'category': record.category,
'user_id': record.uploadedBy,
'timestamp': record.uploadedAt.toIso8601String(),
});
}
static Future<Map<String, dynamic>> getUploadStats(
String userId, {
DateTime? startDate,
DateTime? endDate,
}) async {
final stats = await Database.query('''
SELECT
COUNT(*) as total_uploads,
SUM(size) as total_size,
AVG(size) as avg_size,
content_type,
COUNT(*) as type_count
FROM files
WHERE uploaded_by = ?
AND uploaded_at BETWEEN ? AND ?
GROUP BY content_type
''', [
userId,
startDate?.toIso8601String() ?? '1970-01-01',
endDate?.toIso8601String() ?? DateTime.now().toIso8601String(),
]);
return {
'totalUploads': stats.fold(0, (sum, row) => sum + row['type_count']),
'totalSize': stats.fold(0, (sum, row) => sum + row['total_size']),
'byType': stats,
};
}
}
The file upload and storage system in Hypermodern provides enterprise-grade capabilities while maintaining simplicity for basic use cases. Whether you're handling simple profile pictures or building a comprehensive document management system, the flexible architecture adapts to your needs while ensuring security, performance, and scalability.