import { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectCommand, HeadObjectCommand, ListObjectsV2Command } from '@aws-sdk/client-s3'; import { getSignedUrl } from '@aws-sdk/s3-request-presigner'; import { v4 as uuidv4 } from 'uuid'; import fs from 'fs/promises'; import path from 'path'; import { StorageProvider, StorageConfig, UploadResult, FileInfo } from './storageProvider.js'; export interface S3Config extends StorageConfig { provider: 's3'; endpoint: string; accessKeyId: string; secretAccessKey: string; bucketName: string; region: string; useSSL?: boolean; } export class S3Service implements StorageProvider { private client: S3Client; private bucketName: string; constructor(config: S3Config) { this.client = new S3Client({ endpoint: config.endpoint, region: config.region, credentials: { accessKeyId: config.accessKeyId, secretAccessKey: config.secretAccessKey, }, forcePathStyle: true, // Required for MinIO }); this.bucketName = config.bucketName; } /** * Load S3 configuration from s3-config.json file */ static async loadConfig(): Promise { try { const configPath = path.join(process.cwd(), 's3-config.json'); const configData = await fs.readFile(configPath, 'utf-8'); return JSON.parse(configData); } catch (error) { console.warn('Failed to load s3-config.json, using environment variables as fallback'); return { provider: 's3' as const, endpoint: process.env.S3_ENDPOINT || 'http://localhost:9000', accessKeyId: process.env.S3_ACCESS_KEY_ID || 'minioadmin', secretAccessKey: process.env.S3_SECRET_ACCESS_KEY || 'minioadmin', bucketName: process.env.S3_BUCKET_NAME || 'music-files', region: process.env.S3_REGION || 'us-east-1', useSSL: process.env.S3_USE_SSL !== 'false', }; } } /** * Create S3Service instance with configuration from file */ static async createFromConfig(): Promise { const config = await this.loadConfig(); return new S3Service(config); } /** * Upload a file to S3 */ async uploadFile( file: Buffer, originalName: string, contentType: string, targetFolder?: string ): Promise { // Sanitize filename to be safe for S3 const sanitizedFilename = this.sanitizeFilename(originalName); const cleaned = typeof targetFolder === 'string' ? targetFolder.replace(/^\/+|\/+$/g, '') : ''; const safeFolder = cleaned; // Use original filename instead of UUID const key = safeFolder ? `${safeFolder}/${sanitizedFilename}` : sanitizedFilename; // Check if file already exists and handle conflicts const finalKey = await this.handleFilenameConflict(key); const command = new PutObjectCommand({ Bucket: this.bucketName, Key: finalKey, Body: file, ContentType: contentType, Metadata: { originalName, uploadedAt: new Date().toISOString(), }, }); await this.client.send(command); return { key: finalKey, url: `${this.bucketName}/${finalKey}`, size: file.length, contentType, }; } /** * Recursively list all files in the S3 bucket */ async listAllFiles(prefix: string = ''): Promise { const files: FileInfo[] = []; let continuationToken: string | undefined; do { const command = new ListObjectsV2Command({ Bucket: this.bucketName, Prefix: prefix, ContinuationToken: continuationToken, }); const response = await this.client.send(command); if (response.Contents) { for (const object of response.Contents) { if (object.Key && !object.Key.endsWith('/')) { // Skip directories files.push({ key: object.Key, size: object.Size || 0, lastModified: object.LastModified || new Date(), }); } } } continuationToken = response.NextContinuationToken; } while (continuationToken); return files; } /** * List all folders (prefixes) in the bucket. Recursively collects nested prefixes. */ async listAllFolders(prefix: string = ''): Promise { const folders = new Set(); const queue: string[] = [prefix]; while (queue.length > 0) { const currentPrefix = queue.shift() || ''; let continuationToken: string | undefined; do { const command = new ListObjectsV2Command({ Bucket: this.bucketName, Prefix: currentPrefix, Delimiter: '/', ContinuationToken: continuationToken, }); const response = await this.client.send(command); const common = (response.CommonPrefixes || []).map(cp => cp.Prefix).filter(Boolean) as string[]; for (const p of common) { // Normalize: strip trailing slash const normalized = p.replace(/\/+$/, ''); if (!folders.has(normalized)) { folders.add(normalized); // Continue deeper queue.push(p); } } continuationToken = response.NextContinuationToken; } while (continuationToken); } return Array.from(folders).sort(); } /** * Generate a presigned URL for secure file access */ async getPresignedUrl(key: string, expiresIn: number = 3600): Promise { const command = new GetObjectCommand({ Bucket: this.bucketName, Key: key, }); return await getSignedUrl(this.client, command, { expiresIn }); } /** * Delete a file from S3 */ async deleteFile(key: string): Promise { throw new Error('File deletion is disabled to prevent accidental data loss'); } /** * Check if a file exists */ async fileExists(key: string): Promise { try { const command = new HeadObjectCommand({ Bucket: this.bucketName, Key: key, }); await this.client.send(command); return true; } catch (error) { return false; } } /** * Get file metadata */ async getFileMetadata(key: string) { const command = new HeadObjectCommand({ Bucket: this.bucketName, Key: key, }); return await this.client.send(command); } /** * Get file content as buffer */ async getFileContent(key: string): Promise { const command = new GetObjectCommand({ Bucket: this.bucketName, Key: key, }); const response = await this.client.send(command); if (!response.Body) { throw new Error('File has no content'); } // Convert stream to buffer const chunks: Uint8Array[] = []; const stream = response.Body as any; return new Promise((resolve, reject) => { stream.on('data', (chunk: Uint8Array) => chunks.push(chunk)); stream.on('error', reject); stream.on('end', () => resolve(Buffer.concat(chunks))); }); } /** * Get streaming URL for a file */ async getStreamingUrl(key: string): Promise { return `${process.env.S3_ENDPOINT}/${this.bucketName}/${key}`; } /** * Test the connection to S3 */ async testConnection(): Promise { try { // Try to list objects with a limit of 1 to test the connection const command = new ListObjectsV2Command({ Bucket: this.bucketName, MaxKeys: 1, }); await this.client.send(command); return true; } catch (error) { console.error('S3 connection test failed:', error); return false; } } /** * Sanitize filename to be safe for S3 */ private sanitizeFilename(filename: string): string { // Remove or replace characters that might cause issues in S3 return filename .replace(/[<>:"|?*]/g, '_') // Replace problematic characters .replace(/[\x00-\x1f\x80-\x9f]/g, '') // Remove control characters .replace(/\s+/g, ' ') // Normalize whitespace .trim(); } /** * Handle filename conflicts by adding a number suffix */ private async handleFilenameConflict(key: string): Promise { try { // Check if file exists const command = new HeadObjectCommand({ Bucket: this.bucketName, Key: key, }); await this.client.send(command); // File exists, generate a new name with number suffix const pathParts = key.split('/'); const filename = pathParts.pop() || ''; const dir = pathParts.join('/'); const nameWithoutExt = filename.substring(0, filename.lastIndexOf('.')); const extension = filename.substring(filename.lastIndexOf('.')); let counter = 1; let newKey: string; do { const newFilename = `${nameWithoutExt} (${counter})${extension}`; newKey = dir ? `${dir}/${newFilename}` : newFilename; counter++; // Prevent infinite loop if (counter > 1000) { throw new Error('Too many filename conflicts'); } } while (await this.fileExists(newKey)); return newKey; } catch (error) { // File doesn't exist, use original key return key; } } }