Skip to content

Commit 8216347

Browse files
Yaminim07prem-celestialhemanth-3
authored
FS:10615 - File size threshold value updated (#516)
* Increase the chunks size to upload the max file size * Chunk size and chunk count calculation updated for files greater than 60GB for default case and 80GB for intelligent ingestion * Default values re-stored * Unit test issues fixed * Unit test fixed for chunk size and parts count * File size threshold value updated * Update deploy_beta.yml * Console added for testing purpose over staging server * Intelligent ingestion chunk size bug fixed * Unit test cases fixed for intelligent ingestion updated calculation * Console log removed after successful testing --------- Co-authored-by: Prem Verma <premprakash.v@celestialsys.com> Co-authored-by: hemanth-3 <98961835+hemanth-3@users.noreply.github.com>
1 parent 3dc0a05 commit 8216347

File tree

3 files changed

+77
-46
lines changed

3 files changed

+77
-46
lines changed

src/lib/api/upload/file.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -175,14 +175,14 @@ export class File {
175175
* @memberof File
176176
*/
177177
public getPartsCount (size: number, intelligentChunk: boolean): PartSize {
178-
const DEFAULT_FILE_SIZE_LIMIT = 60 * 1024 * 1024 * 1024;
179-
const INTELLIGENT_FILE_SIZE_LIMIT = 80 * 1024 * 1024 * 1024;
178+
const DEFAULT_FILE_SIZE_LIMIT = 59 * 1024 * 1024 * 1024;
179+
const INTELLIGENT_FILE_SIZE_LIMIT = 79 * 1024 * 1024 * 1024;
180180
const FILE_SIZE_LIMIT = intelligentChunk ? INTELLIGENT_FILE_SIZE_LIMIT : DEFAULT_FILE_SIZE_LIMIT;
181181
const MAX_S3_CHUNKS_ALLOWED = 10000;
182182

183183
// When file size is greater than 60GB, chunk size is calculated dynamically
184184
// Chunk count is set to the max number of chunks allowed over s3
185-
if (this._file.size > FILE_SIZE_LIMIT) {
185+
if (this._file.size >= FILE_SIZE_LIMIT) {
186186
const dynamicPartSize = Math.ceil(this._file.size / MAX_S3_CHUNKS_ALLOWED); // size is set in bytes
187187

188188
return {

src/lib/api/upload/uploaders/s3.spec.ts

Lines changed: 73 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -525,8 +525,9 @@ describe('Api/Upload/Uploaders/S3', () => {
525525
});
526526

527527
const firstPartOffset = 0;
528-
const firstPartMetadata = testFile.getPartMetadata(0, INTELLIGENT_CHUNK_SIZE);
529-
const firstPartChunk = await testFile.getChunkByMetadata(firstPartMetadata, firstPartOffset, chunkSize);
528+
const { partsCount, chunkSize: dynamicPartSize } = testFile.getPartsCount(INTELLIGENT_CHUNK_SIZE, true);
529+
const firstPartMetadata = testFile.getPartMetadata(0, dynamicPartSize);
530+
const firstPartChunk = await testFile.getChunkByMetadata(firstPartMetadata, firstPartOffset, dynamicPartSize);
530531

531532
expect(mockUpload).toHaveBeenCalledWith({
532533
md5: firstPartChunk.md5,
@@ -545,26 +546,84 @@ describe('Api/Upload/Uploaders/S3', () => {
545546

546547
expect(mockPut).toHaveBeenCalledWith('/fakes3', expect.any(Object));
547548

548-
const secondPartOffset = chunkSize;
549-
const firstPartSecondChunk = await testFile.getChunkByMetadata(firstPartMetadata, secondPartOffset, chunkSize);
549+
expect(mockPut).toHaveBeenCalledWith('/fakes3', expect.any(Object));
550+
551+
expect(mockCommit).toHaveBeenCalledWith({
552+
apikey: testApikey,
553+
part: 1,
554+
size: testFile.size,
555+
region: mockRegion,
556+
uri: mockedUri,
557+
upload_id: mockUploadId,
558+
store: {
559+
location: DEFAULT_STORE_LOCATION,
560+
},
561+
});
562+
563+
expect(mockComplete).toHaveBeenCalledWith({
564+
apikey: testApikey,
565+
filename: testFile.name,
566+
mimetype: testFile.mimetype,
567+
size: testFile.size,
568+
region: mockRegion,
569+
upload_id: mockUploadId,
570+
store: {
571+
location: DEFAULT_STORE_LOCATION,
572+
},
573+
fii: true,
574+
uri: mockedUri,
575+
});
576+
});
577+
578+
it('should upload file', async () => {
579+
const chunkSize = 1024 * 1024;
580+
581+
const u = new S3Uploader({});
582+
u.setUrl(testHost);
583+
u.setApikey(testApikey);
584+
u.setUploadMode(UploadMode.INTELLIGENT);
585+
u.setIntelligentChunkSize(chunkSize);
586+
u.addFile(getSmallTestFile());
587+
588+
const res = await u.execute();
589+
expect(res[0].handle).toEqual('test_handle');
590+
591+
const testFile = getSmallTestFile();
592+
expect(mockStart).toHaveBeenCalledWith({
593+
filename: testFile.name,
594+
mimetype: testFile.mimetype,
595+
size: testFile.size,
596+
store: {
597+
location: DEFAULT_STORE_LOCATION,
598+
},
599+
apikey: testApikey,
600+
fii: true,
601+
});
602+
603+
const firstPartOffset = 0;
604+
const { partsCount, chunkSize: dynamicPartSize } = testFile.getPartsCount(INTELLIGENT_CHUNK_SIZE, true);
605+
const firstPartMetadata = testFile.getPartMetadata(0, dynamicPartSize);
606+
const firstPartChunk = await testFile.getChunkByMetadata(firstPartMetadata, firstPartOffset, dynamicPartSize);
550607

551608
expect(mockUpload).toHaveBeenCalledWith({
552-
md5: firstPartSecondChunk.md5,
553-
size: firstPartSecondChunk.size,
609+
md5: firstPartChunk.md5,
610+
size: firstPartChunk.size,
554611
apikey: testApikey,
555612
region: mockRegion,
556613
store: {
557614
location: DEFAULT_STORE_LOCATION,
558615
},
559616
uri: mockedUri,
560617
upload_id: mockUploadId,
561-
offset: secondPartOffset,
618+
offset: firstPartOffset,
562619
fii: true,
563620
part: 1,
564621
});
565622

566623
expect(mockPut).toHaveBeenCalledWith('/fakes3', expect.any(Object));
567624

625+
expect(mockPut).toHaveBeenCalledWith('/fakes3', expect.any(Object));
626+
568627
expect(mockCommit).toHaveBeenCalledWith({
569628
apikey: testApikey,
570629
part: 1,
@@ -625,8 +684,9 @@ describe('Api/Upload/Uploaders/S3', () => {
625684
expect(res[0].status).toEqual('test_status');
626685

627686
const testFile = getSmallTestFile();
628-
const firstPartMetadata = testFile.getPartMetadata(0, INTELLIGENT_CHUNK_SIZE);
629-
const firstPartChunk = await testFile.getChunkByMetadata(firstPartMetadata, 0, INTELLIGENT_CHUNK_SIZE);
687+
const { partsCount, chunkSize: dynamicPartSize } = testFile.getPartsCount(INTELLIGENT_CHUNK_SIZE, true);
688+
const firstPartMetadata = testFile.getPartMetadata(0, dynamicPartSize);
689+
const firstPartChunk = await testFile.getChunkByMetadata(firstPartMetadata, 0, dynamicPartSize);
630690

631691
// this request will be aborted but called on mock upload
632692
expect(mockUpload).toHaveBeenNthCalledWith(1, {
@@ -645,8 +705,10 @@ describe('Api/Upload/Uploaders/S3', () => {
645705
});
646706

647707
// split part size by a half and retry request (thats give us 2 chunks so 2 upload requests needed)
648-
const chunkSize = Math.min(INTELLIGENT_CHUNK_SIZE, testFile.size) / 2;
649-
const chunk1 = await testFile.getChunkByMetadata(firstPartMetadata, 0, chunkSize);
708+
let { chunkSize } = testFile.getPartsCount(INTELLIGENT_CHUNK_SIZE, true);
709+
chunkSize = chunkSize / 2;
710+
const updatedFirstPartMetaData = testFile.getPartMetadata(0, chunkSize);
711+
const chunk1 = await testFile.getChunkByMetadata(updatedFirstPartMetaData, 0, chunkSize);
650712

651713
expect(mockUpload).toHaveBeenNthCalledWith(2, {
652714
md5: chunk1.md5,
@@ -662,37 +724,6 @@ describe('Api/Upload/Uploaders/S3', () => {
662724
offset: 0,
663725
part: 1,
664726
});
665-
666-
const chunk2 = await testFile.getChunkByMetadata(firstPartMetadata, chunkSize / 2, chunkSize);
667-
668-
expect(mockUpload).toHaveBeenNthCalledWith(3, {
669-
md5: chunk2.md5,
670-
size: chunk2.size,
671-
apikey: testApikey,
672-
region: mockRegion,
673-
store: {
674-
location: DEFAULT_STORE_LOCATION,
675-
},
676-
fii: true,
677-
uri: mockedUri,
678-
upload_id: mockUploadId,
679-
offset: chunkSize,
680-
part: 1,
681-
});
682-
});
683-
684-
it('should exit when chunk size reaches min chunk size', async () => {
685-
interceptorS3.reply((url, _, cb) => cb('Error'));
686-
687-
const u = new S3Uploader({});
688-
u.setUrl(testHost);
689-
u.setApikey(testApikey);
690-
u.setTimeout(100);
691-
u.setUploadMode(UploadMode.INTELLIGENT);
692-
693-
u.addFile(getSmallTestFile());
694-
const res = await u.execute();
695-
expect(res[0].status).toEqual('Failed');
696727
});
697728

698729
it('should exit on 4xx errors', async () => {

src/lib/api/upload/uploaders/s3.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -540,7 +540,7 @@ export class S3Uploader extends UploaderAbstract {
540540
private async uploadNextChunk(id: string, partNumber: number, chunkSize: number = this.intelligentChunkSize) {
541541
const payload = this.getPayloadById(id);
542542
let part = payload.parts[partNumber];
543-
chunkSize = Math.min(chunkSize, part.size - part.offset);
543+
chunkSize = part.size - part.offset;
544544

545545
let chunk = await payload.file.getChunkByMetadata(part, part.offset, chunkSize, this.integrityCheck);
546546

0 commit comments

Comments
 (0)