@@ -33,6 +33,14 @@ In comparison to other projects, like for instance [TensorFlowSharp](https://www
3333| tf.net 0.15 | x | x | |
3434| tf.net 0.14 | x | | |
3535
36+ Read the docs & book [ The Definitive Guide to Tensorflow.NET] ( https://tensorflownet.readthedocs.io/en/latest/FrontCover.html ) .
37+
38+ There are many examples reside at [ TensorFlow.NET Examples] ( https://github.com/SciSharp/TensorFlow.NET-Examples ) .
39+
40+ Troubleshooting of running example or installation, please refer [ here] ( tensorflowlib/README.md ) .
41+
42+ #### C# Example
43+
3644Install TF.NET and TensorFlow binary through NuGet.
3745``` sh
3846# ## install tensorflow C#/F# binding
@@ -63,6 +71,15 @@ int training_steps = 1000;
6371float learning_rate = 0 . 01 f ;
6472int display_step = 100 ;
6573
74+ // Sample data
75+ NDArray train_X , train_Y ;
76+ int n_samples ;
77+ train_X = np .array (3 . 3 f , 4 . 4 f , 5 . 5 f , 6 . 71 f , 6 . 93 f , 4 . 168 f , 9 . 779 f , 6 . 182 f , 7 . 59 f , 2 . 167 f ,
78+ 7 . 042 f , 10 . 791 f , 5 . 313 f , 7 . 997 f , 5 . 654 f , 9 . 27 f , 3 . 1 f );
79+ train_Y = np .array (1 . 7 f , 2 . 76 f , 2 . 09 f , 3 . 19 f , 1 . 694 f , 1 . 573 f , 3 . 366 f , 2 . 596 f , 2 . 53 f , 1 . 221 f ,
80+ 2 . 827 f , 3 . 465 f , 1 . 65 f , 2 . 904 f , 2 . 42 f , 2 . 94 f , 1 . 3 f );
81+ n_samples = train_X .shape [0 ];
82+
6683// We can set a fixed init value in order to demo
6784var W = tf .Variable (- 0 . 06 f , name : " weight" );
6885var b = tf .Variable (- 0 . 73 f , name : " bias" );
@@ -142,11 +159,65 @@ model.fit(x_train[new Slice(0, 1000)], y_train[new Slice(0, 1000)],
142159 validation_split : 0 . 2 f );
143160```
144161
145- Read the docs & book [ The Definitive Guide to Tensorflow.NET ] ( https://tensorflownet.readthedocs.io/en/latest/FrontCover.html ) .
162+ #### F# Example
146163
147- There are many examples reside at [ TensorFlow.NET Examples] ( https://github.com/SciSharp/TensorFlow.NET-Examples ) .
164+ Linear Regression in ` Eager ` mode:
165+
166+ ``` fsharp
167+ #r "nuget: TensorFlow.Net"
168+ #r "nuget: TensorFlow.Keras"
169+ #r "nuget: SciSharp.TensorFlow.Redist"
170+ #r "nuget: NumSharp"
171+
172+ open NumSharp
173+ open Tensorflow
174+ open type Tensorflow.Binding
175+ open type Tensorflow.KerasApi
176+
177+ let tf = New<tensorflow>()
178+ tf.enable_eager_execution()
179+
180+ // Parameters
181+ let training_steps = 1000
182+ let learning_rate = 0.01f
183+ let display_step = 100
184+
185+ // Sample data
186+ let train_X =
187+ np.array(3.3f, 4.4f, 5.5f, 6.71f, 6.93f, 4.168f, 9.779f, 6.182f, 7.59f, 2.167f,
188+ 7.042f, 10.791f, 5.313f, 7.997f, 5.654f, 9.27f, 3.1f)
189+ let train_Y =
190+ np.array(1.7f, 2.76f, 2.09f, 3.19f, 1.694f, 1.573f, 3.366f, 2.596f, 2.53f, 1.221f,
191+ 2.827f, 3.465f, 1.65f, 2.904f, 2.42f, 2.94f, 1.3f)
192+ let n_samples = train_X.shape.[0]
193+
194+ // We can set a fixed init value in order to demo
195+ let W = tf.Variable(-0.06f,name = "weight")
196+ let b = tf.Variable(-0.73f, name = "bias")
197+ let optimizer = keras.optimizers.SGD(learning_rate)
198+
199+ // Run training for the given number of steps.
200+ for step = 1 to (training_steps + 1) do
201+ // Run the optimization to update W and b values.
202+ // Wrap computation inside a GradientTape for automatic differentiation.
203+ use g = tf.GradientTape()
204+ // Linear regression (Wx + b).
205+ let pred = W * train_X + b
206+ // Mean square error.
207+ let loss = tf.reduce_sum(tf.pow(pred - train_Y,2)) / (2 * n_samples)
208+ // should stop recording
209+ // compute gradients
210+ let gradients = g.gradient(loss,struct (W,b))
211+
212+ // Update W and b following gradients.
213+ optimizer.apply_gradients(zip(gradients, struct (W,b)))
214+
215+ if (step % display_step) = 0 then
216+ let pred = W * train_X + b
217+ let loss = tf.reduce_sum(tf.pow(pred-train_Y,2)) / (2 * n_samples)
218+ printfn $"step: {step}, loss: {loss.numpy()}, W: {W.numpy()}, b: {b.numpy()}"
219+ ```
148220
149- Troubleshooting of running example or installation, please refer [ here] ( tensorflowlib/README.md ) .
150221
151222### Contribute:
152223
0 commit comments