@@ -79,15 +79,15 @@ class DataLoader<K, V, C = K> {
7979 ) ;
8080 }
8181
82- var batch = getCurrentBatch ( this ) ;
83- var cacheMap = this . _cacheMap ;
84- var cacheKey = this . _cacheKeyFn ( key ) ;
82+ const batch = getCurrentBatch ( this ) ;
83+ const cacheMap = this . _cacheMap ;
84+ const cacheKey = this . _cacheKeyFn ( key ) ;
8585
8686 // If caching and there is a cache-hit, return cached Promise.
8787 if ( cacheMap ) {
88- var cachedPromise = cacheMap . get ( cacheKey ) ;
88+ const cachedPromise = cacheMap . get ( cacheKey ) ;
8989 if ( cachedPromise ) {
90- var cacheHits = batch . cacheHits || ( batch . cacheHits = [ ] ) ;
90+ const cacheHits = batch . cacheHits || ( batch . cacheHits = [ ] ) ;
9191 return new Promise ( resolve => {
9292 cacheHits . push ( ( ) => {
9393 resolve ( cachedPromise ) ;
@@ -99,7 +99,7 @@ class DataLoader<K, V, C = K> {
9999 // Otherwise, produce a new Promise for this key, and enqueue it to be
100100 // dispatched along with the current batch.
101101 batch . keys . push ( key ) ;
102- var promise = new Promise ( ( resolve , reject ) => {
102+ const promise = new Promise ( ( resolve , reject ) => {
103103 batch . callbacks . push ( { resolve, reject } ) ;
104104 } ) ;
105105
@@ -151,9 +151,9 @@ class DataLoader<K, V, C = K> {
151151 * method chaining.
152152 */
153153 clear ( key : K ) : this {
154- var cacheMap = this . _cacheMap ;
154+ const cacheMap = this . _cacheMap ;
155155 if ( cacheMap ) {
156- var cacheKey = this . _cacheKeyFn ( key ) ;
156+ const cacheKey = this . _cacheKeyFn ( key ) ;
157157 cacheMap . delete ( cacheKey ) ;
158158 }
159159 return this ;
@@ -165,7 +165,7 @@ class DataLoader<K, V, C = K> {
165165 * method chaining.
166166 */
167167 clearAll ( ) : this {
168- var cacheMap = this . _cacheMap ;
168+ const cacheMap = this . _cacheMap ;
169169 if ( cacheMap ) {
170170 cacheMap . clear ( ) ;
171171 }
@@ -179,15 +179,15 @@ class DataLoader<K, V, C = K> {
179179 * To prime the cache with an error at a key, provide an Error instance.
180180 */
181181 prime ( key : K , value : V | Promise < V > | Error ) : this {
182- var cacheMap = this . _cacheMap ;
182+ const cacheMap = this . _cacheMap ;
183183 if ( cacheMap ) {
184- var cacheKey = this . _cacheKeyFn ( key ) ;
184+ const cacheKey = this . _cacheKeyFn ( key ) ;
185185
186186 // Only add the key if it does not already exist.
187187 if ( cacheMap . get ( cacheKey ) === undefined ) {
188188 // Cache a rejected promise if the value is an Error, in order to match
189189 // the behavior of load(key).
190- var promise ;
190+ let promise ;
191191 if ( value instanceof Error ) {
192192 promise = Promise . reject ( value ) ;
193193 // Since this is a case where an Error is intentionally being primed
@@ -236,7 +236,7 @@ class DataLoader<K, V, C = K> {
236236// for enqueuing a job to be performed after promise microtasks and before the
237237// next macrotask. For browser environments, a macrotask is used (via
238238// setImmediate or setTimeout) at a potential performance penalty.
239- var enqueuePostPromiseJob =
239+ const enqueuePostPromiseJob =
240240 typeof process === 'object' && typeof process . nextTick === 'function'
241241 ? function ( fn ) {
242242 if ( ! resolvedPromise ) {
@@ -255,7 +255,7 @@ var enqueuePostPromiseJob =
255255 } ;
256256
257257// Private: cached resolved Promise instance
258- var resolvedPromise ;
258+ let resolvedPromise ;
259259
260260// Private: Describes a batch of requests
261261type Batch < K , V > = {
@@ -273,7 +273,7 @@ type Batch<K, V> = {
273273function getCurrentBatch < K , V > ( loader : DataLoader < K , V , any > ) : Batch < K , V > {
274274 // If there is an existing batch which has not yet dispatched and is within
275275 // the limit of the batch size, then return it.
276- var existingBatch = loader . _batch ;
276+ const existingBatch = loader . _batch ;
277277 if (
278278 existingBatch !== null &&
279279 ! existingBatch . hasDispatched &&
@@ -283,7 +283,7 @@ function getCurrentBatch<K, V>(loader: DataLoader<K, V, any>): Batch<K, V> {
283283 }
284284
285285 // Otherwise, create a new batch for this loader.
286- var newBatch = { hasDispatched : false , keys : [ ] , callbacks : [ ] } ;
286+ const newBatch = { hasDispatched : false , keys : [ ] , callbacks : [ ] } ;
287287
288288 // Store it on the loader so it may be reused.
289289 loader . _batch = newBatch ;
@@ -311,7 +311,7 @@ function dispatchBatch<K, V>(
311311
312312 // Call the provided batchLoadFn for this loader with the batch's keys and
313313 // with the loader as the `this` context.
314- var batchPromise;
314+ let batchPromise;
315315 try {
316316 batchPromise = loader . _batchLoadFn ( batch . keys ) ;
317317 } catch (e) {
@@ -365,8 +365,8 @@ function dispatchBatch<K, V>(
365365 resolveCacheHits(batch);
366366
367367 // Step through values, resolving or rejecting each Promise in the batch.
368- for (var i = 0; i < batch . callbacks . length ; i ++ ) {
369- var value = values [ i ] ;
368+ for (let i = 0; i < batch . callbacks . length ; i ++ ) {
369+ const value = values [ i ] ;
370370 if ( value instanceof Error ) {
371371 batch . callbacks [ i ] . reject ( value ) ;
372372 } else {
@@ -388,7 +388,7 @@ function failedDispatch<K, V>(
388388) {
389389 // Cache hits are resolved, even though the batch failed.
390390 resolveCacheHits ( batch ) ;
391- for ( var i = 0 ; i < batch . keys . length ; i ++ ) {
391+ for ( let i = 0 ; i < batch . keys . length ; i ++ ) {
392392 loader . clear ( batch . keys [ i ] ) ;
393393 batch . callbacks [ i ] . reject ( error ) ;
394394 }
@@ -397,19 +397,19 @@ function failedDispatch<K, V>(
397397// Private: Resolves the Promises for any cache hits in this batch.
398398function resolveCacheHits ( batch : Batch < any , any > ) {
399399 if ( batch . cacheHits ) {
400- for ( var i = 0 ; i < batch . cacheHits . length ; i ++ ) {
400+ for ( let i = 0 ; i < batch . cacheHits . length ; i ++ ) {
401401 batch . cacheHits [ i ] ( ) ;
402402 }
403403 }
404404}
405405
406406// Private: given the DataLoader's options, produce a valid max batch size.
407407function getValidMaxBatchSize ( options : ?Options < any , any , any > ): number {
408- var shouldBatch = ! options || options . batch !== false ;
408+ const shouldBatch = ! options || options . batch !== false ;
409409 if ( ! shouldBatch ) {
410410 return 1 ;
411411 }
412- var maxBatchSize = options && options . maxBatchSize ;
412+ const maxBatchSize = options && options . maxBatchSize ;
413413 if ( maxBatchSize === undefined ) {
414414 return Infinity ;
415415 }
@@ -425,7 +425,7 @@ function getValidMaxBatchSize(options: ?Options<any, any, any>): number {
425425function getValidBatchScheduleFn(
426426 options: ?Options< any , any , any > ,
427427): (() => void ) => void {
428- var batchScheduleFn = options && options . batchScheduleFn ;
428+ const batchScheduleFn = options && options . batchScheduleFn ;
429429 if ( batchScheduleFn === undefined ) {
430430 return enqueuePostPromiseJob ;
431431 }
@@ -439,7 +439,7 @@ function getValidBatchScheduleFn(
439439
440440// Private: given the DataLoader's options, produce a cache key function.
441441function getValidCacheKeyFn< K , C > (options: ?Options< K , any , C > ): K => C {
442- var cacheKeyFn = options && options . cacheKeyFn ;
442+ const cacheKeyFn = options && options . cacheKeyFn ;
443443 if ( cacheKeyFn === undefined ) {
444444 return ( key => key : any ) ;
445445 }
@@ -453,17 +453,17 @@ function getValidCacheKeyFn<K, C>(options: ?Options<K, any, C>): K => C {
453453function getValidCacheMap < K , V , C > (
454454 options : ?Options < K , V , C > ,
455455) : CacheMap < C , Promise < V >> | null {
456- var shouldCache = ! options || options . cache !== false ;
456+ const shouldCache = ! options || options . cache !== false ;
457457 if ( ! shouldCache ) {
458458 return null ;
459459 }
460- var cacheMap = options && options . cacheMap ;
460+ const cacheMap = options && options . cacheMap ;
461461 if ( cacheMap === undefined ) {
462462 return new Map ( ) ;
463463 }
464464 if ( cacheMap !== null ) {
465- var cacheFunctions = [ 'get' , 'set' , 'delete' , 'clear' ] ;
466- var missingFunctions = cacheFunctions . filter (
465+ const cacheFunctions = [ 'get' , 'set' , 'delete' , 'clear' ] ;
466+ const missingFunctions = cacheFunctions . filter (
467467 fnName => cacheMap && typeof cacheMap [ fnName ] !== 'function' ,
468468 ) ;
469469 if ( missingFunctions . length !== 0 ) {
0 commit comments