Skip to content

Commit b2fc08d

Browse files
FS-10615 - Allow upload files bigger than 50GB (#511)
* Increase the chunks size to upload the max file size * Chunk size and chunk count calculation updated for files greater than 60GB for default case and 80GB for intelligent ingestion * Default values re-stored --------- Co-authored-by: Yaminim <yaminim@celestialsys.com>
1 parent c4dbde4 commit b2fc08d

File tree

2 files changed

+33
-7
lines changed

2 files changed

+33
-7
lines changed

src/lib/api/upload/file.ts

Lines changed: 29 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,11 @@ export interface FileChunk extends FilePart {
5252
offset: number; // offset for chunk - from part start
5353
}
5454

55+
export interface PartSize {
56+
partsCount: number,
57+
chunkSize: number
58+
}
59+
5560
/**
5661
* File representation to unify file object in nodejs and browser
5762
*
@@ -164,14 +169,33 @@ export class File {
164169
}
165170

166171
/**
167-
* Returns number of parts according to part size
168-
*
172+
* Returns number of parts and part size according to max limit
169173
* @param {number} size - part size in bytes
170-
* @returns {number}
174+
* @returns {PartSize}
171175
* @memberof File
172176
*/
173-
public getPartsCount (size: number): number {
174-
return Math.ceil(this._file.size / size);
177+
public getPartsCount (size: number, intelligentChunk: boolean): PartSize {
178+
const DEFAULT_FILE_SIZE_LIMIT = 60 * 1024 * 1024 * 1024
179+
const INTELLIGENT_FILE_SIZE_LIMIT = 80 * 1024 * 1024 * 1024
180+
const FILE_SIZE_LIMIT = intelligentChunk ? INTELLIGENT_FILE_SIZE_LIMIT : DEFAULT_FILE_SIZE_LIMIT;
181+
const MAX_S3_CHUNKS_ALLOWED = 10000;
182+
183+
// When file size is greater than 60GB, chunk size is calculated dynamically
184+
// Chunk count is set to the max number of chunks allowed over s3
185+
if(this._file.size > FILE_SIZE_LIMIT) {
186+
const dynamicPartSize = Math.ceil(this._file.size / MAX_S3_CHUNKS_ALLOWED); // size is set in bytes
187+
188+
return {
189+
partsCount: Math.ceil(this._file.size / dynamicPartSize),
190+
chunkSize: dynamicPartSize
191+
}
192+
193+
}
194+
195+
return {
196+
partsCount: Math.ceil(this._file.size / size),
197+
chunkSize: size
198+
}
175199
}
176200

177201
/**

src/lib/api/upload/uploaders/s3.ts

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -254,19 +254,21 @@ export class S3Uploader extends UploaderAbstract {
254254
*/
255255
private prepareParts(id: string): Promise<void> {
256256
const file = this.getPayloadById(id).file;
257+
let intelligentChunk = false;
257258

258259
// for intelligent or fallback mode we cant overwrite part size - requires 8MB
259260
if ([UploadMode.INTELLIGENT, UploadMode.FALLBACK].indexOf(this.uploadMode) > -1) {
260261
this.partSize = INTELLIGENT_CHUNK_SIZE;
262+
intelligentChunk = true;
261263
}
262264

263-
const partsCount = file.getPartsCount(this.partSize);
265+
const { partsCount, chunkSize } = file.getPartsCount(this.partSize, intelligentChunk);
264266

265267
const parts = [];
266268

267269
for (let i = 0; i < partsCount; i++) {
268270
parts[i] = {
269-
...file.getPartMetadata(i, this.partSize),
271+
...file.getPartMetadata(i, chunkSize),
270272
offset: 0,
271273
};
272274
}

0 commit comments

Comments
 (0)