Skip to content

Commit c48127f

Browse files
authored
Merge pull request #542 from filestack/develop
Fixed FS-10615, FS-11013
2 parents ec8a950 + d7655a8 commit c48127f

File tree

9 files changed

+133
-22
lines changed

9 files changed

+133
-22
lines changed

src/config.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
/**
1919
* @private
2020
*/
21-
const PICKER_VERSION = '1.24.1';
21+
const PICKER_VERSION = '1.25.0';
2222

2323
/**
2424
* @private

src/lib/api/cloud.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -133,6 +133,12 @@ export class CloudClient {
133133
if (!Array.isArray(accept)) {
134134
accept = [accept];
135135
}
136+
// FS-11013.
137+
// google-drive storing uncommon file-types in incorrect format, eg .srt (subrip) file is stored in bin (octet-stream) format
138+
// so if user wants to accept subrip files, we should search google drive for octet-steam file.
139+
if (accept.includes('application/x-subrip') && !accept.includes('application/octet-stream')) {
140+
accept.push('application/octet-stream');
141+
}
136142
// filtering mimetypes in clouds
137143
payload.accept = accept;
138144
}

src/lib/api/upload/file.spec.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ describe('Api/Upload/File', () => {
7373
});
7474

7575
it('should return correct parts count for given size', () => {
76-
expect(file.getPartsCount(1)).toEqual(file.size);
76+
expect(file.getPartsCount(1)).toEqual({ chunkSize: 1, partsCount: 4 });
7777
});
7878

7979
it('should return correct part metadata', () => {

src/lib/api/upload/file.ts

Lines changed: 29 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,11 @@ export interface FileChunk extends FilePart {
5252
offset: number; // offset for chunk - from part start
5353
}
5454

55+
export interface PartSize {
56+
partsCount: number;
57+
chunkSize: number;
58+
}
59+
5560
/**
5661
* File representation to unify file object in nodejs and browser
5762
*
@@ -164,14 +169,33 @@ export class File {
164169
}
165170

166171
/**
167-
* Returns number of parts according to part size
168-
*
172+
* Returns number of parts and part size according to max limit
169173
* @param {number} size - part size in bytes
170-
* @returns {number}
174+
* @returns {PartSize}
171175
* @memberof File
172176
*/
173-
public getPartsCount (size: number): number {
174-
return Math.ceil(this._file.size / size);
177+
public getPartsCount (size: number, intelligentChunk: boolean): PartSize {
178+
const DEFAULT_FILE_SIZE_LIMIT = 59 * 1024 * 1024 * 1024;
179+
const INTELLIGENT_FILE_SIZE_LIMIT = 79 * 1024 * 1024 * 1024;
180+
const FILE_SIZE_LIMIT = intelligentChunk ? INTELLIGENT_FILE_SIZE_LIMIT : DEFAULT_FILE_SIZE_LIMIT;
181+
const MAX_S3_CHUNKS_ALLOWED = 10000;
182+
183+
// When file size is greater than 60GB, chunk size is calculated dynamically
184+
// Chunk count is set to the max number of chunks allowed over s3
185+
if (this._file.size >= FILE_SIZE_LIMIT) {
186+
const dynamicPartSize = Math.ceil(this._file.size / MAX_S3_CHUNKS_ALLOWED); // size is set in bytes
187+
188+
return {
189+
partsCount: Math.ceil(this._file.size / dynamicPartSize),
190+
chunkSize: dynamicPartSize,
191+
};
192+
193+
}
194+
195+
return {
196+
partsCount: Math.ceil(this._file.size / size),
197+
chunkSize: size,
198+
};
175199
}
176200

177201
/**

src/lib/api/upload/uploaders/s3.spec.ts

Lines changed: 73 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -524,8 +524,9 @@ describe('Api/Upload/Uploaders/S3', () => {
524524
});
525525

526526
const firstPartOffset = 0;
527-
const firstPartMetadata = testFile.getPartMetadata(0, INTELLIGENT_CHUNK_SIZE);
528-
const firstPartChunk = await testFile.getChunkByMetadata(firstPartMetadata, firstPartOffset, chunkSize);
527+
const { partsCount, chunkSize: dynamicPartSize } = testFile.getPartsCount(INTELLIGENT_CHUNK_SIZE, true);
528+
const firstPartMetadata = testFile.getPartMetadata(0, dynamicPartSize);
529+
const firstPartChunk = await testFile.getChunkByMetadata(firstPartMetadata, firstPartOffset, dynamicPartSize);
529530

530531
expect(mockUpload).toHaveBeenCalledWith({
531532
md5: firstPartChunk.md5,
@@ -544,26 +545,84 @@ describe('Api/Upload/Uploaders/S3', () => {
544545

545546
expect(mockPut).toHaveBeenCalledWith('/fakes3', expect.any(Object));
546547

547-
const secondPartOffset = chunkSize;
548-
const firstPartSecondChunk = await testFile.getChunkByMetadata(firstPartMetadata, secondPartOffset, chunkSize);
548+
expect(mockPut).toHaveBeenCalledWith('/fakes3', expect.any(Object));
549+
550+
expect(mockCommit).toHaveBeenCalledWith({
551+
apikey: testApikey,
552+
part: 1,
553+
size: testFile.size,
554+
region: mockRegion,
555+
uri: mockedUri,
556+
upload_id: mockUploadId,
557+
store: {
558+
location: DEFAULT_STORE_LOCATION,
559+
},
560+
});
561+
562+
expect(mockComplete).toHaveBeenCalledWith({
563+
apikey: testApikey,
564+
filename: testFile.name,
565+
mimetype: testFile.mimetype,
566+
size: testFile.size,
567+
region: mockRegion,
568+
upload_id: mockUploadId,
569+
store: {
570+
location: DEFAULT_STORE_LOCATION,
571+
},
572+
fii: true,
573+
uri: mockedUri,
574+
});
575+
});
576+
577+
it('should upload file', async () => {
578+
const chunkSize = 1024 * 1024;
579+
580+
const u = new S3Uploader({});
581+
u.setUrl(testHost);
582+
u.setApikey(testApikey);
583+
u.setUploadMode(UploadMode.INTELLIGENT);
584+
u.setIntelligentChunkSize(chunkSize);
585+
u.addFile(getSmallTestFile());
586+
587+
const res = await u.execute();
588+
expect(res[0].handle).toEqual('test_handle');
589+
590+
const testFile = getSmallTestFile();
591+
expect(mockStart).toHaveBeenCalledWith({
592+
filename: testFile.name,
593+
mimetype: testFile.mimetype,
594+
size: testFile.size,
595+
store: {
596+
location: DEFAULT_STORE_LOCATION,
597+
},
598+
apikey: testApikey,
599+
fii: true,
600+
});
601+
602+
const firstPartOffset = 0;
603+
const { partsCount, chunkSize: dynamicPartSize } = testFile.getPartsCount(INTELLIGENT_CHUNK_SIZE, true);
604+
const firstPartMetadata = testFile.getPartMetadata(0, dynamicPartSize);
605+
const firstPartChunk = await testFile.getChunkByMetadata(firstPartMetadata, firstPartOffset, dynamicPartSize);
549606

550607
expect(mockUpload).toHaveBeenCalledWith({
551-
md5: firstPartSecondChunk.md5,
552-
size: firstPartSecondChunk.size,
608+
md5: firstPartChunk.md5,
609+
size: firstPartChunk.size,
553610
apikey: testApikey,
554611
region: mockRegion,
555612
store: {
556613
location: DEFAULT_STORE_LOCATION,
557614
},
558615
uri: mockedUri,
559616
upload_id: mockUploadId,
560-
offset: secondPartOffset,
617+
offset: firstPartOffset,
561618
fii: true,
562619
part: 1,
563620
});
564621

565622
expect(mockPut).toHaveBeenCalledWith('/fakes3', expect.any(Object));
566623

624+
expect(mockPut).toHaveBeenCalledWith('/fakes3', expect.any(Object));
625+
567626
expect(mockCommit).toHaveBeenCalledWith({
568627
apikey: testApikey,
569628
part: 1,
@@ -623,8 +682,9 @@ describe('Api/Upload/Uploaders/S3', () => {
623682
expect(res[0].status).toEqual('test_status');
624683

625684
const testFile = getSmallTestFile();
626-
const firstPartMetadata = testFile.getPartMetadata(0, INTELLIGENT_CHUNK_SIZE);
627-
const firstPartChunk = await testFile.getChunkByMetadata(firstPartMetadata, 0, INTELLIGENT_CHUNK_SIZE);
685+
const { partsCount, chunkSize: dynamicPartSize } = testFile.getPartsCount(INTELLIGENT_CHUNK_SIZE, true);
686+
const firstPartMetadata = testFile.getPartMetadata(0, dynamicPartSize);
687+
const firstPartChunk = await testFile.getChunkByMetadata(firstPartMetadata, 0, dynamicPartSize);
628688

629689
// this request will be aborted but called on mock upload
630690
expect(mockUpload).toHaveBeenNthCalledWith(1, {
@@ -643,8 +703,10 @@ describe('Api/Upload/Uploaders/S3', () => {
643703
});
644704

645705
// split part size by a half and retry request (thats give us 2 chunks so 2 upload requests needed)
646-
const chunkSize = Math.min(INTELLIGENT_CHUNK_SIZE, testFile.size) / 2;
647-
const chunk1 = await testFile.getChunkByMetadata(firstPartMetadata, 0, chunkSize);
706+
let { chunkSize } = testFile.getPartsCount(INTELLIGENT_CHUNK_SIZE, true);
707+
chunkSize = chunkSize / 2;
708+
const updatedFirstPartMetaData = testFile.getPartMetadata(0, chunkSize);
709+
const chunk1 = await testFile.getChunkByMetadata(updatedFirstPartMetaData, 0, chunkSize);
648710

649711
expect(mockUpload).toHaveBeenNthCalledWith(2, {
650712
md5: chunk1.md5,

src/lib/api/upload/uploaders/s3.ts

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -254,19 +254,21 @@ export class S3Uploader extends UploaderAbstract {
254254
*/
255255
private prepareParts(id: string): Promise<void> {
256256
const file = this.getPayloadById(id).file;
257+
let intelligentChunk = false;
257258

258259
// for intelligent or fallback mode we cant overwrite part size - requires 8MB
259260
if ([UploadMode.INTELLIGENT, UploadMode.FALLBACK].indexOf(this.uploadMode) > -1) {
260261
this.partSize = INTELLIGENT_CHUNK_SIZE;
262+
intelligentChunk = true;
261263
}
262264

263-
const partsCount = file.getPartsCount(this.partSize);
265+
const { partsCount, chunkSize } = file.getPartsCount(this.partSize, intelligentChunk);
264266

265267
const parts = [];
266268

267269
for (let i = 0; i < partsCount; i++) {
268270
parts[i] = {
269-
...file.getPartMetadata(i, this.partSize),
271+
...file.getPartMetadata(i, chunkSize),
270272
offset: 0,
271273
};
272274
}
@@ -538,7 +540,7 @@ export class S3Uploader extends UploaderAbstract {
538540
private async uploadNextChunk(id: string, partNumber: number, chunkSize: number = this.intelligentChunkSize) {
539541
const payload = this.getPayloadById(id);
540542
let part = payload.parts[partNumber];
541-
chunkSize = Math.min(chunkSize, part.size - part.offset);
543+
chunkSize = part.size - part.offset;
542544

543545
let chunk = await payload.file.getChunkByMetadata(part, part.offset, chunkSize, this.integrityCheck);
544546

src/lib/picker.ts

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -414,6 +414,15 @@ export interface PickerOptions {
414414
* - unsplash
415415
*/
416416
fromSources?: string[];
417+
/**
418+
* Provide default text value for Image Search
419+
* ```javascript
420+
* websearch: {
421+
* predefinedText: 'Sample text'
422+
* }
423+
* ```
424+
*/
425+
websearch?: object;
417426
/**
418427
* Container where picker should be appended. Only relevant for `inline` and `dropPane` display modes.
419428
*/

src/lib/utils/extensions.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -921,7 +921,7 @@ export const ExtensionsMap = {
921921
'video/x-ms-wmv': ['wmv'],
922922
'video/x-ms-wmx': ['wmx'],
923923
'video/x-ms-wvx': ['wvx'],
924-
'video/x-msvideo': ['avi'],
924+
'video/vnd.avi': ['avi'],
925925
'video/x-sgi-movie': ['movie'],
926926
'video/x-smv': ['smv'],
927927
'x-conference/x-cooltalk': ['ice'],

src/schema/picker.schema.ts

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,14 @@ export const PickerParamsSchema = {
4747
acceptFn: {
4848
format: 'callback',
4949
},
50+
websearch: {
51+
type: 'object',
52+
properties: {
53+
predefinedText: {
54+
type: 'string',
55+
},
56+
},
57+
},
5058
fromSources: {
5159
type: 'array',
5260
items: [

0 commit comments

Comments
 (0)