Skip to content

Commit 05c662a

Browse files
committed
refactor: make DataLoader properties private and define move some method as private method in DataLoader
1 parent b73bb0b commit 05c662a

File tree

1 file changed

+125
-139
lines changed

1 file changed

+125
-139
lines changed

src/index.ts

Lines changed: 125 additions & 139 deletions
Original file line numberDiff line numberDiff line change
@@ -41,16 +41,12 @@ export type CacheMap<K, V> = {
4141
* web request.
4242
*/
4343
class DataLoader<K, V, C = K> {
44-
// TODO: Make this property private, at the moment it is used in 'dispatchBatch'
45-
_batchLoadFn: BatchLoadFn<K, V>;
46-
// TODO: Make this property private, at the moment it is used in 'getCurrentBatch'
47-
_maxBatchSize: number;
48-
// TODO: Make this property private, at the moment it is used in 'getCurrentBatch'
49-
_batchScheduleFn: (callback: () => void) => void;
44+
private _batchLoadFn: BatchLoadFn<K, V>;
45+
private _maxBatchSize: number;
46+
private _batchScheduleFn: (callback: () => void) => void;
5047
private _cacheKeyFn: (key: K) => C;
5148
private _cacheMap: CacheMap<C, Promise<V>> | null;
52-
// TODO: Make this property private, at the moment it is used in 'getCurrentBatch'
53-
_batch: Batch<K, V> | null;
49+
private _batch: Batch<K, V> | null;
5450
public name: string | null;
5551

5652
constructor(batchLoadFn: BatchLoadFn<K, V>, options?: Options<K, V, C>) {
@@ -69,6 +65,126 @@ class DataLoader<K, V, C = K> {
6965
this.name = getValidName(options);
7066
}
7167

68+
private dispatchBatch(batch: Batch<K, V>) {
69+
// Mark this batch as having been dispatched.
70+
batch.hasDispatched = true;
71+
72+
// If there's nothing to load, resolve any cache hits and return early.
73+
if (batch.keys.length === 0) {
74+
resolveCacheHits(batch);
75+
return;
76+
}
77+
78+
// Call the provided batchLoadFn for this loader with the batch's keys and
79+
// with the loader as the `this` context.
80+
let batchPromise: Promise<ReadonlyArray<V | Error>>;
81+
try {
82+
batchPromise = this._batchLoadFn(batch.keys);
83+
} catch (e) {
84+
return this.failedDispatch(
85+
batch,
86+
new TypeError(
87+
'DataLoader must be constructed with a function which accepts ' +
88+
'Array<key> and returns Promise<Array<value>>, but the function ' +
89+
`errored synchronously: ${String(e)}.`,
90+
),
91+
);
92+
}
93+
94+
// Assert the expected response from batchLoadFn
95+
if (!batchPromise || typeof batchPromise.then !== 'function') {
96+
return this.failedDispatch(
97+
batch,
98+
new TypeError(
99+
'DataLoader must be constructed with a function which accepts ' +
100+
'Array<key> and returns Promise<Array<value>>, but the function did ' +
101+
`not return a Promise: ${String(batchPromise)}.`,
102+
),
103+
);
104+
}
105+
106+
// Await the resolution of the call to batchLoadFn.
107+
batchPromise
108+
.then(values => {
109+
// Assert the expected resolution from batchLoadFn.
110+
if (!isArrayLike(values)) {
111+
throw new TypeError(
112+
'DataLoader must be constructed with a function which accepts ' +
113+
'Array<key> and returns Promise<Array<value>>, but the function did ' +
114+
`not return a Promise of an Array: ${String(values)}.`,
115+
);
116+
}
117+
if (values.length !== batch.keys.length) {
118+
throw new TypeError(
119+
'DataLoader must be constructed with a function which accepts ' +
120+
'Array<key> and returns Promise<Array<value>>, but the function did ' +
121+
'not return a Promise of an Array of the same length as the Array ' +
122+
'of keys.' +
123+
`\n\nKeys:\n${String(batch.keys)}` +
124+
`\n\nValues:\n${String(values)}`,
125+
);
126+
}
127+
128+
// Resolve all cache hits in the same micro-task as freshly loaded values.
129+
resolveCacheHits(batch);
130+
131+
// Step through values, resolving or rejecting each Promise in the batch.
132+
for (let i = 0; i < batch.callbacks.length; i++) {
133+
const value = values[i]!;
134+
if (value instanceof Error) {
135+
batch.callbacks[i]!.reject(value);
136+
} else {
137+
batch.callbacks[i]!.resolve(value);
138+
}
139+
}
140+
})
141+
.catch(error => {
142+
this.failedDispatch(batch, error);
143+
});
144+
}
145+
146+
// Private: Either returns the current batch, or creates and schedules a
147+
// dispatch of a new batch for the given loader.
148+
private getCurrentBatch(): Batch<K, V> {
149+
// If there is an existing batch which has not yet dispatched and is within
150+
// the limit of the batch size, then return it.
151+
const existingBatch = this._batch;
152+
if (
153+
existingBatch !== null &&
154+
!existingBatch.hasDispatched &&
155+
existingBatch.keys.length < this._maxBatchSize
156+
) {
157+
return existingBatch;
158+
}
159+
160+
// Otherwise, create a new batch for this loader.
161+
const newBatch: Batch<K, V> = {
162+
hasDispatched: false,
163+
keys: [],
164+
callbacks: [],
165+
};
166+
167+
// Store it on the loader so it may be reused.
168+
this._batch = newBatch;
169+
170+
// Then schedule a task to dispatch this batch of requests.
171+
this._batchScheduleFn(() => {
172+
this.dispatchBatch(newBatch);
173+
});
174+
175+
return newBatch;
176+
}
177+
178+
// Private: do not cache individual loads if the entire batch dispatch fails,
179+
// but still reject each request so they do not hang.
180+
private failedDispatch(batch: Batch<K, V>, error: Error) {
181+
// Cache hits are resolved, even though the batch failed.
182+
resolveCacheHits(batch);
183+
for (let i = 0; i < batch.keys.length; i++) {
184+
this.clear(batch.keys[i]!);
185+
batch.callbacks[i]!.reject(error);
186+
}
187+
}
72188
/**
73189
* Loads a key, returning a `Promise` for the value represented by that key.
74190
*/
@@ -80,7 +196,7 @@ class DataLoader<K, V, C = K> {
80196
);
81197
}
82198

83-
const batch = getCurrentBatch(this);
199+
const batch = this.getCurrentBatch();
84200
const cacheMap = this._cacheMap;
85201
let cacheKey: C | undefined;
86202

@@ -264,136 +380,6 @@ type Batch<K, V> = {
264380
cacheHits?: Array<() => void>;
265381
};
266382

267-
// Private: Either returns the current batch, or creates and schedules a
268-
// dispatch of a new batch for the given loader.
269-
function getCurrentBatch<K, V, C>(loader: DataLoader<K, V, C>): Batch<K, V> {
270-
// If there is an existing batch which has not yet dispatched and is within
271-
// the limit of the batch size, then return it.
272-
const existingBatch = loader._batch;
273-
if (
274-
existingBatch !== null &&
275-
!existingBatch.hasDispatched &&
276-
existingBatch.keys.length < loader._maxBatchSize
277-
) {
278-
return existingBatch;
279-
}
280-
281-
// Otherwise, create a new batch for this loader.
282-
const newBatch: Batch<K, V> = {
283-
hasDispatched: false,
284-
keys: [],
285-
callbacks: [],
286-
};
287-
288-
// Store it on the loader so it may be reused.
289-
loader._batch = newBatch;
290-
291-
// Then schedule a task to dispatch this batch of requests.
292-
loader._batchScheduleFn(() => {
293-
dispatchBatch(loader, newBatch);
294-
});
295-
296-
return newBatch;
297-
}
298-
299-
function dispatchBatch<K, V, C>(
300-
loader: DataLoader<K, V, C>,
301-
batch: Batch<K, V>,
302-
) {
303-
// Mark this batch as having been dispatched.
304-
batch.hasDispatched = true;
305-
306-
// If there's nothing to load, resolve any cache hits and return early.
307-
if (batch.keys.length === 0) {
308-
resolveCacheHits(batch);
309-
return;
310-
}
311-
312-
// Call the provided batchLoadFn for this loader with the batch's keys and
313-
// with the loader as the `this` context.
314-
let batchPromise: Promise<ReadonlyArray<V | Error>>;
315-
try {
316-
batchPromise = loader._batchLoadFn(batch.keys);
317-
} catch (e) {
318-
return failedDispatch(
319-
loader,
320-
batch,
321-
new TypeError(
322-
'DataLoader must be constructed with a function which accepts ' +
323-
'Array<key> and returns Promise<Array<value>>, but the function ' +
324-
`errored synchronously: ${String(e)}.`,
325-
),
326-
);
327-
}
328-
329-
// Assert the expected response from batchLoadFn
330-
if (!batchPromise || typeof batchPromise.then !== 'function') {
331-
return failedDispatch(
332-
loader,
333-
batch,
334-
new TypeError(
335-
'DataLoader must be constructed with a function which accepts ' +
336-
'Array<key> and returns Promise<Array<value>>, but the function did ' +
337-
`not return a Promise: ${String(batchPromise)}.`,
338-
),
339-
);
340-
}
341-
342-
// Await the resolution of the call to batchLoadFn.
343-
batchPromise
344-
.then(values => {
345-
// Assert the expected resolution from batchLoadFn.
346-
if (!isArrayLike(values)) {
347-
throw new TypeError(
348-
'DataLoader must be constructed with a function which accepts ' +
349-
'Array<key> and returns Promise<Array<value>>, but the function did ' +
350-
`not return a Promise of an Array: ${String(values)}.`,
351-
);
352-
}
353-
if (values.length !== batch.keys.length) {
354-
throw new TypeError(
355-
'DataLoader must be constructed with a function which accepts ' +
356-
'Array<key> and returns Promise<Array<value>>, but the function did ' +
357-
'not return a Promise of an Array of the same length as the Array ' +
358-
'of keys.' +
359-
`\n\nKeys:\n${String(batch.keys)}` +
360-
`\n\nValues:\n${String(values)}`,
361-
);
362-
}
363-
364-
// Resolve all cache hits in the same micro-task as freshly loaded values.
365-
resolveCacheHits(batch);
366-
367-
// Step through values, resolving or rejecting each Promise in the batch.
368-
for (let i = 0; i < batch.callbacks.length; i++) {
369-
const value = values[i]!;
370-
if (value instanceof Error) {
371-
batch.callbacks[i]!.reject(value);
372-
} else {
373-
batch.callbacks[i]!.resolve(value);
374-
}
375-
}
376-
})
377-
.catch(error => {
378-
failedDispatch(loader, batch, error);
379-
});
380-
}
381-
382-
// Private: do not cache individual loads if the entire batch dispatch fails,
383-
// but still reject each request so they do not hang.
384-
function failedDispatch<K, V, C>(
385-
loader: DataLoader<K, V, C>,
386-
batch: Batch<K, V>,
387-
error: Error,
388-
) {
389-
// Cache hits are resolved, even though the batch failed.
390-
resolveCacheHits(batch);
391-
for (let i = 0; i < batch.keys.length; i++) {
392-
loader.clear(batch.keys[i]!);
393-
batch.callbacks[i]!.reject(error);
394-
}
395-
}
396-
397383
// Private: Resolves the Promises for any cache hits in this batch.
398384
function resolveCacheHits<K, V>(batch: Batch<K, V>) {
399385
if (batch.cacheHits) {

0 commit comments

Comments
 (0)