@@ -323,18 +323,36 @@ async function processCollectionItems(nuxt: Nuxt, collections: ResolvedCollectio
323323 body : content ,
324324 path : fullPath ,
325325 } )
326- if ( parsedContent ) {
327- db . insertDevelopmentCache ( keyInCollection , JSON . stringify ( parsedContent ) , checksum )
328- }
329326 }
330-
327+
331328 // Add manually provided components from the content
332329 if ( parsedContent ?. __metadata ?. components ) {
333330 usedComponents . push ( ...parsedContent . __metadata . components )
334331 }
335332
336- const { queries, hash } = generateCollectionInsert ( collection , parsedContent )
337- list . push ( [ key , queries , hash ] )
333+ // Special handling for CSV files
334+ if ( parsedContent . __metadata ?. rows ) {
335+ const rows = parsedContent . __metadata ?. rows as Array < Record < string , string > > ;
336+ // Since csv files can contain multiple rows, we can't process it as a single ParsedContent
337+ // As for id, priority: `id` field > first column > index
338+ for ( let i = 0 ; i < rows . length ; i ++ ) {
339+ const rowid = rows [ i ] . id || rows [ i ] [ Object . keys ( rows [ i ] ) [ 0 ] ] || String ( i )
340+ const rowContent = {
341+ id : parsedContent . id + "/" + rowid ,
342+ ...rows [ i ]
343+ }
344+ db . insertDevelopmentCache ( parsedContent . id + "/" + rowid , JSON . stringify ( parsedContent ) , checksum )
345+ const { queries, hash } = generateCollectionInsert ( collection , rowContent )
346+ list . push ( [ key , queries , hash ] )
347+ }
348+ }
349+ else {
350+ if ( parsedContent ) {
351+ db . insertDevelopmentCache ( keyInCollection , JSON . stringify ( parsedContent ) , checksum )
352+ }
353+ const { queries, hash } = generateCollectionInsert ( collection , parsedContent )
354+ list . push ( [ key , queries , hash ] )
355+ }
338356 }
339357 catch ( e : unknown ) {
340358 logger . warn ( `"${ keyInCollection } " is ignored because parsing is failed. Error: ${ e instanceof Error ? e . message : 'Unknown error' } ` )
0 commit comments