@@ -62,14 +62,18 @@ async function fetchAllStackEvents(client: StackClient, startTimestamp?: Date):
6262 // If we have a start timestamp, filter events
6363 if ( startTimestamp ) {
6464 const filteredEvents = events . filter ( ( e ) => new Date ( e . timestamp ) >= startTimestamp )
65- allEvents . push ( ...filteredEvents )
65+ for ( const event of filteredEvents ) {
66+ allEvents . push ( event )
67+ }
6668
6769 // If we got fewer filtered events than fetched, we've passed the cutoff
6870 if ( filteredEvents . length < events . length ) {
6971 hasMore = false
7072 }
7173 } else {
72- allEvents . push ( ...events )
74+ for ( const event of events ) {
75+ allEvents . push ( event )
76+ }
7377 }
7478
7579 offset += events . length
@@ -169,8 +173,7 @@ export const migrateFromStackEvents = task({
169173 let stackEvents : StackEvent [ ]
170174 if ( existingData && existingData . events . length > 0 ) {
171175 // Find latest timestamp and subtract 1 minute for safety buffer
172- const timestamps = existingData . events . map ( ( e ) => new Date ( e . timestamp ) . getTime ( ) )
173- const latestTimestamp = Math . max ( ...timestamps )
176+ const latestTimestamp = existingData . events . reduce ( ( max , e ) => Math . max ( max , new Date ( e . timestamp ) . getTime ( ) ) , 0 )
174177 const startTimestamp = new Date ( latestTimestamp - 60 * 1000 ) // 1 minute buffer
175178
176179 console . log ( `Fetching new events since ${ startTimestamp . toISOString ( ) } ` )
@@ -226,7 +229,7 @@ export const migrateFromStackEvents = task({
226229 events : mergedEvents ,
227230 }
228231
229- blobUrl = await storage . put ( blobKey , JSON . stringify ( dataToStore , null , 2 ) )
232+ blobUrl = await storage . put ( blobKey , JSON . stringify ( dataToStore ) )
230233 console . log ( `Stored events to blob: ${ blobUrl } ` )
231234 } else {
232235 console . log ( "[DRY-RUN] Would store events to blob" )
0 commit comments