@@ -40,32 +40,31 @@ def _validate_signature(defunc,
4040 input_signature ,
4141 input_dataset ,
4242 non_feed_inputs = None ):
43- """Validate and update input_signature if necessary to match the defunc's
44- arguments .
43+ """Validate and update ` input_signature` if necessary to match the arguments
44+ of `defunc` .
4545
4646 Args:
47- defunc (Callable or tf.function): Function whose signature
48- is analyzed.
49- input_signature (list or tuple): A sequence of tf.TensorSpec objects
50- that describe the input arguments of defunc. If defunc is a
51- tf.function and input_signature was specified during tf.function
52- creation, this argument can be None.
53- input_dataset (tf.Dataset): Dataset from which input_signature will be
47+ defunc (Callable or tf.function): Function whose signature is analyzed.
48+ input_signature (list or tuple): A sequence of `tf.TensorSpec` objects
49+ that describe the input arguments of `defunc`. If `defunc` is a
50+ `tf.function` and `input_signature` was specified during `tf.function`
51+ creation then this argument can be None.
52+ input_dataset (tf.Dataset): Dataset from which `input_signature` will be
5453 inferred.
5554 non_feed_inputs (list, optional): List of inputs that will be provided
56- to a graph without usage of infeed queue.
55+ to the graph without usage of infeed queue.
5756
5857 Returns:
59- list: List of tf.TensorSpec objects with types, shapes and names.
58+ list: List of ` tf.TensorSpec` objects with types, shapes and names.
6059
6160 Raises:
62- TypeError: If input_signature is not a tf.Dataset, tuple, list
63- or NoneType.
64- ValueError: If input_signature is not provided and defunc is
65- not a tf.function.
61+ TypeError: If ` input_signature` is not a ` tf.Dataset` , tuple, list
62+ or ` NoneType` .
63+ ValueError: If ` input_signature` is not provided and ` defunc` is
64+ not a ` tf.function` .
6665 ValueError: If the number of passed/inferred signatures of inputs that
6766 are passed to the graph using infeed queue is different than the number
68- of defunc's arguments.
67+ of arguments of `defunc` .
6968 """
7069 if input_dataset is not None :
7170 input_signature = input_dataset .element_spec
@@ -105,13 +104,13 @@ def _create_feeds(input_signature, input_dataset=None):
105104 """Create infeed and outfeed queues for the given signature.
106105
107106 Args:
108- input_signature (list): List of signatures describing types
109- and shapes of dataset elements.
107+ input_signature (list): List of signatures describing types and shapes of
108+ the dataset elements.
110109 input_dataset (tf.Dataset, optional): Dataset to be used for creating feeds.
111110
112111 Returns:
113112 tuple(IPUInfeedQueue, IPUOutfeedQueue): Infeed and outfeed queues created
114- based on the given signature .
113+ based on the `input_signature` .
115114 """
116115 if input_dataset is None :
117116 inputs = [array_ops .zeros (s .shape , s .dtype ) for s in input_signature ]
@@ -128,14 +127,15 @@ def _export_saved_model(defunc, export_dir, input_signature):
128127
129128 Args:
130129 defunc (Callable or tf.function): Function that runs inference step.
131- export_dir (str): Path to the SavedModel directory.
130+ export_dir (str): Path to the directory where the SavedModel will be
131+ written.
132132 input_signature (list): List of signatures of inputs that will be provided
133- to a graph using infeed queue.
133+ to the graph using infeed queue.
134134
135135 Returns:
136136 tf.function: A reference to the same predict function that was exported
137- using the SavedModel format. It uses embedded runtime op to run the
138- executable included as an asset in the SavedModel directory structure .
137+ using the SavedModel format. This function uses the embedded runtime op to
138+ run the executable that was included in the SavedModel's `assets` subfolder .
139139 """
140140 with tempfile .TemporaryDirectory () as tmp_folder :
141141 # Compile poplar_exec
@@ -176,35 +176,37 @@ def export_single_step(predict_step,
176176 iterations ,
177177 input_signature = None ,
178178 input_dataset = None ):
179- """Create a SavedModel at `export_dir` for TF Serving.
179+ """Create a SavedModel in `export_dir` for TensorFlow Serving.
180180
181181 Wrap `predict_step` inside a while loop, add an infeed for the inputs and
182182 an outfeed for the outputs, freeze any variables into constants and write
183183 a SavedModel containing an IPU runtime function and Poplar executable.
184184
185185 Args:
186186 predict_step (Callable or tf.function): Function to export.
187- export_dir (str): Path to the SavedModel directory.
187+ export_dir (str): Path to the directory where the SavedModel will be
188+ written.
188189 iterations (int): Number of loop iterations.
189- input_signature (list or tuple, optional): A sequence of tf.TensorSpec
190- objects that describe the input arguments of predict_step function.
191- If input_dataset is provided, this argument should be None.
192- If input_dataset is not provided, predict_step is a tf.function and
193- input_signature was specified during tf.function creation, this argument
194- can be None and signature will be captured directly from predict_step.
195- input_dataset (tf.Dataset, optional): Dataset from which input_signature
196- will be inferred. If input_signature is provided, this argument should
190+ input_signature (list or tuple, optional): A sequence of `tf.TensorSpec`
191+ objects that describe the input arguments of the `predict_step` function.
192+ If `input_dataset` is provided, this argument should be None.
193+ If `input_dataset` is not provided and `predict_step` is a `tf.function`
194+ and `input_signature` was specified during `tf.function` creation then
195+ this argument can be None and the signature will be captured directly from
196+ `predict_step`.
197+ input_dataset (tf.Dataset, optional): Dataset from which `input_signature`
198+ will be inferred. If `input_signature` is provided, this argument should
197199 be None.
198200
199201 Returns:
200202 tf.function: A reference to the same predict function that was exported
201- using the SavedModel format. This function uses embedded runtime op to run
202- executable that was included in the SavedModel's `asset ` subfolder.
203+ using the SavedModel format. This function uses the embedded runtime op to
204+ run the executable that was included in the SavedModel's `assets ` subfolder.
203205
204206 Raises:
205- ValueError: If both input_signature and input_dataset are provided.
206- TypeError: If input_dataset was provided and is not an instance of
207- tf.Dataset.
207+ ValueError: If both ` input_signature` and ` input_dataset` are provided.
208+ TypeError: If ` input_dataset` was provided and is not an instance of
209+ ` tf.Dataset` .
208210 """
209211 if input_signature is not None and input_dataset is not None :
210212 raise ValueError ('Both input_signature and input_dataset cannot be '
@@ -235,7 +237,7 @@ def predict_loop():
235237
236238def export_pipeline (computational_stages ,
237239 export_dir ,
238- gradient_accumulation_count ,
240+ pipeline_depth ,
239241 iterations ,
240242 inputs = None ,
241243 device_mapping = None ,
@@ -244,54 +246,60 @@ def export_pipeline(computational_stages,
244246 name = None ,
245247 input_signature = None ,
246248 input_dataset = None ):
247- """Create a pipelined SavedModel at `export_dir` for TF Serving.
249+ """Create a pipelined SavedModel in `export_dir` for TensorFlow Serving.
248250
249- Create a pipeline op using provided `computational_stages`, add an infeed for
251+ Create a pipeline op using `computational_stages`, add an infeed for
250252 the inputs and an outfeed for the outputs, freeze any variables into constants
251253 and write a SavedModel containing an IPU runtime function and Poplar
252254 executable.
253255
254256 Args:
255- computational_stages (list): A list of python functions or TF functions,
256- where each function represents a computational pipeline stage. The
257- function takes the outputs of the previous pipeline state as its inputs.
258- export_dir (str): Path to SavedModel directory.
259- gradient_accumulation_count (int): The number of times each pipeline stage
260- will be executed.
257+ computational_stages (list): A list of Python functions or TensorFlow
258+ functions, where each function represents a computational stage in the
259+ pipeline. The function takes the outputs of the previous pipeline stage as
260+ its inputs.
261+ export_dir (str): Path to the directory where the SavedModel will be
262+ written.
263+ pipeline_depth (int): The number of times each computational stage
264+ will be executed. It should be a multiple of the number of computational
265+ stages.
261266 iterations (int): The number of times the pipeline will be executed.
262- inputs (list, optional): Arguments passed to the first pipeline stage.
267+ inputs (list, optional): Arguments passed to the first computational stage
268+ without usage of infeed queue.
263269 device_mapping (list, optional): If provided, a list of length equal to the
264270 number of computational stages. An element at index `i` in the list
265- represents which IPU the computational stage `computational_stages[i]`
266- should reside on. This can be used to make sure computational stages which
267- share `tf.Variable` are resident on the same IPU.
271+ represents which IPU the `computational_stages[i]` should reside on. This
272+ can be used to make sure computational stages which share `tf.Variable`
273+ objects are resident on the same IPU.
268274 pipeline_schedule (PipelineSchedule, optional): Which scheduling algorithm
269275 to use for pipeline lowering. Defaults to `PipelineSchedule.Grouped`.
270276 poplar_options (list, optional): If provided, a list of length equal to the
271- number of computational stages. Each element is a PipelineStageOptions
277+ number of computational stages. Each element is a ` PipelineStageOptions`
272278 object which allows for fine grain control of the Poplar options for a
273279 given forward propagation computational stage.
274280 name (str, optional): Name of this pipeline.
275- input_signature (list or tuple, optional): A sequence of tf.TensorSpec
276- objects that describe the input arguments of first computational stage.
277- If input_dataset is provided, this argument should be None.
278- If input_dataset is not provided, first computational stage is a
279- tf.function and input_signature was specified during tf.function creation,
280- this argument can be None and signature will be captured directly from the
281- first computational stage.
282- input_dataset (tf.Dataset, optional): Dataset from which input_signature
283- will be inferred. If input_signature is provided, this argument should
281+ input_signature (list or tuple, optional): A sequence of ` tf.TensorSpec`
282+ objects that describe the input arguments of the first computational
283+ stage. If ` input_dataset` is provided, this argument should be None.
284+ If ` input_dataset` is not provided and the first computational stage is a
285+ ` tf.function` and ` input_signature` was specified during ` tf.function`
286+ creation then this argument can be None and the signature will be captured
287+ directly from the first computational stage.
288+ input_dataset (tf.Dataset, optional): Dataset from which ` input_signature`
289+ will be inferred. If ` input_signature` is provided, this argument should
284290 be None.
285291
286292 Returns:
287- function: A reference to the same predict function that was exported
288- using the SavedModel format. This function uses embedded runtime op to run
289- executable that was included in the SavedModel's `asset ` subfolder.
293+ tf. function: A reference to the same predict function that was exported
294+ using the SavedModel format. This function uses the embedded runtime op to
295+ run the executable that was included in the SavedModel's `assets ` subfolder.
290296
291297 Raises:
292- ValueError: If both input_signature and input_dataset are provided.
293- TypeError: If input_dataset was provided and is not an instance of
294- tf.Dataset.
298+ ValueError: If both `input_signature` and `input_dataset` are provided.
299+ ValueError: If `pipeline_depth` is not a multiple of the number of
300+ computational stages.
301+ TypeError: If `input_dataset` was provided and is not an instance of
302+ `tf.Dataset`.
295303 """
296304 if input_signature is not None and input_dataset is not None :
297305 raise ValueError ('Both input_signature and input_dataset cannot be '
@@ -303,6 +311,11 @@ def export_pipeline(computational_stages,
303311 raise TypeError ('If input_dataset is provided, it should be an instance '
304312 'of tf.Dataset.' )
305313
314+ if pipeline_depth % len (computational_stages ) != 0 :
315+ raise ValueError (f'pipeline_depth ({ pipeline_depth } ) should be a multiple '
316+ f'of the number of computational stages '
317+ f'({ len (computational_stages )} ).' )
318+
306319 first_stage = computational_stages [0 ]
307320 input_signature = _validate_signature (first_stage , input_signature ,
308321 input_dataset , inputs )
@@ -312,7 +325,7 @@ def export_pipeline(computational_stages,
312325 def defunc ():
313326 pipelining_ops .pipeline (
314327 computational_stages = computational_stages ,
315- gradient_accumulation_count = gradient_accumulation_count ,
328+ gradient_accumulation_count = pipeline_depth ,
316329 repeat_count = iterations ,
317330 inputs = inputs ,
318331 infeed_queue = infeed ,
0 commit comments