@@ -169,12 +169,12 @@ Linear Regression in `Eager` mode:
169169#r "nuget: SciSharp.TensorFlow.Redist"
170170#r "nuget: NumSharp"
171171
172- open System
173172open NumSharp
174173open Tensorflow
175- open Tensorflow.Keras
174+ open type Tensorflow.Binding
175+ open type Tensorflow.KerasApi
176176
177- let tf = Binding. New<tensorflow>()
177+ let tf = New<tensorflow>()
178178tf.enable_eager_execution()
179179
180180// Parameters
@@ -194,7 +194,7 @@ let n_samples = train_X.shape.[0]
194194// We can set a fixed init value in order to demo
195195let W = tf.Variable(-0.06f,name = "weight")
196196let b = tf.Variable(-0.73f, name = "bias")
197- let optimizer = KerasApi. keras.optimizers.SGD(learning_rate)
197+ let optimizer = keras.optimizers.SGD(learning_rate)
198198
199199// Run training for the given number of steps.
200200for step = 1 to (training_steps + 1) do
@@ -210,7 +210,7 @@ for step = 1 to (training_steps + 1) do
210210 let gradients = g.gradient(loss,struct (W,b))
211211
212212 // Update W and b following gradients.
213- optimizer.apply_gradients(Binding. zip(gradients, struct (W,b)))
213+ optimizer.apply_gradients(zip(gradients, struct (W,b)))
214214
215215 if (step % display_step) = 0 then
216216 let pred = W * train_X + b
0 commit comments