Skip to content

Commit c85ba55

Browse files
JackHuntgeorgepaw
authored andcommitted
Revert "Wrap ipu.outlined_function in tf.function"
This reverts commit 9eca609.
1 parent 0b6c675 commit c85ba55

File tree

2 files changed

+0
-75
lines changed

2 files changed

+0
-75
lines changed

tensorflow/python/ipu/ops/functional_ops.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,6 @@
2020
from tensorflow.compiler.xla import xla_data_pb2
2121
from tensorflow.core.framework import attr_value_pb2
2222
from tensorflow.compiler.plugin.poplar.ops import gen_functional_ops
23-
from tensorflow.python.distribute import distribution_strategy_context
24-
from tensorflow.python.eager import def_function
25-
from tensorflow.python.eager import context as eager_context
2623
from tensorflow.python.framework import constant_op
2724
from tensorflow.python.framework import dtypes
2825
from tensorflow.python.framework import func_graph as func_graph_module
@@ -102,12 +99,6 @@ def func_wrapper(*args):
10299

103100
return _pack_sequence_as(func_graph.structured_outputs, outputs)
104101

105-
# If we are executing under a distribution strategy or eagerly.
106-
if distribution_strategy_context.has_strategy() or\
107-
eager_context.executing_eagerly():
108-
return def_function.function(func_wrapper)
109-
110-
# If we are executing with a session.
111102
return func_wrapper
112103

113104
if func is not None:

tensorflow/python/ipu/tests/functional_ops_test.py

Lines changed: 0 additions & 66 deletions
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,6 @@
2121
from tensorflow.compiler.plugin.poplar.tests import test_utils as tu
2222
from tensorflow.python import ipu
2323
from tensorflow.python import keras
24-
from tensorflow.python.data.ops import dataset_ops
25-
from tensorflow.python.eager import backprop
26-
from tensorflow.python.eager import def_function
2724
from tensorflow.python.framework import ops
2825
from tensorflow.python.framework import test_util
2926
from tensorflow.python.framework import dtypes
@@ -36,7 +33,6 @@
3633
from tensorflow.python.ops import init_ops
3734
from tensorflow.python.ops import math_ops
3835
from tensorflow.python.ops import nn
39-
from tensorflow.python.ops import random_ops
4036
from tensorflow.python.ops import variable_scope
4137
from tensorflow.python.ops import variables
4238
from tensorflow.python.platform import googletest
@@ -735,68 +731,6 @@ def testPipelineFirstLargeConstant(self):
735731
expected_value = model.predict(input_data, batch_size=1)
736732
self.assertAllClose(expected_value, actual_value, atol=1e-05)
737733

738-
def testOutlinedFunctionInFunction(self):
739-
@ipu.outlined_function
740-
def identity(x):
741-
return x
742-
743-
@def_function.function(experimental_compile=True)
744-
def f(x):
745-
with backprop.GradientTape() as tape:
746-
tape.watch(x)
747-
z = identity(x)
748-
return tape.gradient(z, x)
749-
750-
config = ipu.config.IPUConfig()
751-
config.configure_ipu_system()
752-
strategy = ipu.ipu_strategy.IPUStrategy()
753-
754-
with strategy.scope():
755-
x = random_ops.random_normal((1, 10))
756-
dfdx = strategy.run(f, [x])
757-
self.assertAllEqual(dfdx, np.ones((1, 10)))
758-
759-
def testKerasCustomLayerWithOutlinedFunction(self):
760-
class CustomLayer(keras.layers.Layer):
761-
def __init__(self, **kwargs):
762-
self.dense = keras.layers.Dense(units=4)
763-
super().__init__(**kwargs)
764-
765-
def build(self, input_shape):
766-
self.dense.build(input_shape)
767-
super().build(input_shape)
768-
769-
def call(self, inputs): # pylint: disable=arguments-differ
770-
@ipu.outlined_function
771-
def inner_call():
772-
y = self.dense(inputs)
773-
return y
774-
775-
return inner_call()
776-
777-
# Configure the IPU device.
778-
config = ipu.config.IPUConfig()
779-
config.auto_select_ipus = 1
780-
config.configure_ipu_system()
781-
782-
micro_batch_size = 4
783-
ds = dataset_ops.Dataset.from_tensor_slices(
784-
([1.] * micro_batch_size * 4, [2.] * micro_batch_size * 4))
785-
ds = ds.batch(micro_batch_size, drop_remainder=True)
786-
787-
strategy = ipu.ipu_strategy.IPUStrategy()
788-
789-
with strategy.scope():
790-
# Functional model
791-
input_layer = keras.Input(shape=1, batch_size=micro_batch_size)
792-
x = CustomLayer()(input_layer)
793-
model = keras.Model(input_layer, x)
794-
795-
model.compile(optimizer="sgd",
796-
loss=keras.losses.SparseCategoricalCrossentropy())
797-
798-
model.fit(ds, batch_size=micro_batch_size)
799-
800734

801735
if __name__ == "__main__":
802736
googletest.main()

0 commit comments

Comments
 (0)