From 73b4bc6eb1cdb2b427500705a3a3863e88ad1c3c Mon Sep 17 00:00:00 2001 From: Geert Rademakes Date: Fri, 8 Aug 2025 16:24:25 +0200 Subject: [PATCH] fix(songs): repair generic song list route; make XML import upsert + delete stale while preserving s3File; unlink musicFiles for removed songs; auto-trigger matching --- packages/backend/src/routes/songs.ts | 64 ++++++++++++++++++---------- 1 file changed, 41 insertions(+), 23 deletions(-) diff --git a/packages/backend/src/routes/songs.ts b/packages/backend/src/routes/songs.ts index 184ebec..7b12ce2 100644 --- a/packages/backend/src/routes/songs.ts +++ b/packages/backend/src/routes/songs.ts @@ -34,21 +34,14 @@ router.get('/', async (req: Request, res: Response) => { const totalSongs = await Song.countDocuments(query); const totalPages = Math.ceil(totalSongs / limit); - // Get songs for this page in the exact playlist order - // Determine the slice of trackIds for the requested page - const pageStart = (page - 1) * limit; - const pageEnd = Math.min(pageStart + limit, trackIds.length); - const pageTrackIds = trackIds.slice(pageStart, pageEnd); - - const pageSongs = await Song.find({ id: { $in: pageTrackIds } }) + // Generic song listing (not playlist specific) + const songs = await Song.find(query) + .skip(skip) + .limit(limit) .populate('s3File.musicFileId') .lean(); - // Order them to match pageTrackIds - const idToSong: Record = {}; - for (const s of pageSongs) idToSong[s.id] = s; - const songs = pageTrackIds.map(id => idToSong[id]).filter(Boolean); - console.log(`Found ${songs.length} songs (${totalSongs} total), ${songs.filter((s: any) => s.s3File?.hasS3File).length} with S3 files`); + console.log(`Found ${songs.length} songs (${totalSongs} total)`); res.json({ songs, @@ -240,17 +233,42 @@ router.get('/export', async (req: Request, res: Response) => { router.post('/batch', async (req: Request, res: Response) => { try { console.log('Received batch upload request'); - const songs = req.body; - console.log(`Attempting to save ${songs.length} songs`); - - // Delete all existing songs first - await Song.deleteMany({}); - console.log('Cleared existing songs'); - - // Insert new songs - const result = await Song.insertMany(songs); - console.log(`Successfully saved ${result.length} songs`); - res.status(201).json(result); + const songs = Array.isArray(req.body) ? req.body : []; + console.log(`Attempting to upsert ${songs.length} songs`); + + // Build bulk upserts preserving existing s3File field (we only $set provided fields) + const ops = songs.map((s: any) => ({ + updateOne: { + filter: { id: s.id }, + update: { $set: s }, + upsert: true, + } + })); + + if (ops.length > 0) { + await Song.bulkWrite(ops, { ordered: false }); + } + + // Delete songs that are no longer present in the latest XML and unlink their music files + const incomingIds = new Set(songs.map((s: any) => s.id)); + const existing = await Song.find({}, { id: 1 }).lean(); + const staleIds = existing.map((e: any) => e.id).filter((id: string) => !incomingIds.has(id)); + if (staleIds.length > 0) { + await Song.deleteMany({ id: { $in: staleIds } }); + await MusicFile.updateMany({ songId: { $in: staleIds } }, { $unset: { songId: 1 } }); + } + + console.log('Songs upserted and stale songs removed'); + + // Optionally trigger a background matching pass to relink any unmatched files + try { + const { backgroundJobService } = await import('../services/backgroundJobService.js'); + backgroundJobService.startJob({ type: 'song-matching', options: { minConfidence: 0.7 } }); + } catch (e) { + console.warn('Could not trigger matching job after XML import:', e); + } + + res.status(201).json({ upserted: songs.length }); } catch (error) { console.error('Error creating songs:', error); res.status(500).json({ message: 'Error creating songs', error });