@@ -643,23 +643,42 @@ export async function uploadDocumentsToMemory({
643643 name : string ;
644644 account : Account ;
645645} ) {
646- for ( const doc of documents ) {
647- try {
648- p . log . message ( `Uploading document: ${ doc . name } ....` ) ;
649- await new Promise ( resolve => setTimeout ( resolve , 800 ) ) ; // To avoid rate limiting
650- const signedUrl = await getSignedUploadUrl ( {
651- documentName : doc . name ,
652- memoryName : name ,
653- account
654- } ) ;
646+ const BATCH_SIZE = 5 ; // Number of concurrent uploads
647+ const RATE_LIMIT_DELAY = 1000 ; // 1 second delay between requests
655648
656- const uploadResponse = await uploadDocument ( signedUrl , doc . blob ) ;
657- dlog ( `Upload response status: ${ uploadResponse . status } ` ) ;
649+ // Process documents in batches to avoid rate limiting
650+ for ( let i = 0 ; i < documents . length ; i += BATCH_SIZE ) {
651+ const batch = documents . slice ( i , i + BATCH_SIZE ) ;
658652
659- p . log . message ( `Uploaded document: ${ doc . name } ` ) ;
660- } catch ( error ) {
661- throw error ;
662- }
653+ const batchUploadPromises = batch . map ( async ( doc , index ) => {
654+ try {
655+ // Stagger requests within batch
656+ await new Promise ( resolve =>
657+ setTimeout ( resolve , index * RATE_LIMIT_DELAY )
658+ ) ;
659+
660+ // p.log.message(`Uploading document: ${doc.name} ....`);
661+ const signedUrl = await getSignedUploadUrl ( {
662+ documentName : doc . name ,
663+ memoryName : name ,
664+ account
665+ } ) ;
666+
667+ const uploadResponse = await uploadDocument (
668+ signedUrl ,
669+ doc . blob
670+ ) ;
671+ dlog ( `Upload response status: ${ uploadResponse . status } ` ) ;
672+
673+ p . log . message ( `Uploaded document: ${ doc . name } ` ) ;
674+ } catch ( error : any ) {
675+ throw new Error (
676+ `Failed to upload ${ doc . name } : ${ error . message ?? error } `
677+ ) ;
678+ }
679+ } ) ;
680+
681+ await Promise . all ( batchUploadPromises ) ;
663682 }
664683}
665684
0 commit comments