Skip to content

Commit 26a04bd

Browse files
committed
Add Subtract layer.
1 parent e661b82 commit 26a04bd

File tree

10 files changed

+118
-38
lines changed

10 files changed

+118
-38
lines changed

src/TensorFlowNET.Keras/Engine/Interfaces/ITensorFlowOpLayer.cs

Lines changed: 0 additions & 12 deletions
This file was deleted.

src/TensorFlowNET.Keras/Layers/LayersApi.cs

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -142,14 +142,15 @@ public Conv2D Conv2D(int filters,
142142
public Dense Dense(int units,
143143
Activation activation = null,
144144
IInitializer kernel_initializer = null,
145+
bool use_bias = true,
145146
IInitializer bias_initializer = null,
146147
TensorShape input_shape = null)
147148
=> new Dense(new DenseArgs
148149
{
149150
Units = units,
150151
Activation = activation ?? keras.activations.Linear,
151152
KernelInitializer = kernel_initializer ?? tf.glorot_uniform_initializer,
152-
BiasInitializer = bias_initializer ?? tf.zeros_initializer,
153+
BiasInitializer = bias_initializer ?? (use_bias ? tf.zeros_initializer : null),
153154
InputShape = input_shape
154155
});
155156

@@ -375,6 +376,9 @@ public Rescaling Rescaling(float scale,
375376
public Add Add()
376377
=> new Add(new MergeArgs { });
377378

379+
public Subtract Subtract()
380+
=> new Subtract(new MergeArgs { });
381+
378382
public GlobalAveragePooling2D GlobalAveragePooling2D()
379383
=> new GlobalAveragePooling2D(new Pooling2DArgs { });
380384

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
using System;
2+
using System.Collections.Generic;
3+
using System.Text;
4+
using Tensorflow.Keras.ArgsDefinition;
5+
using static Tensorflow.Binding;
6+
7+
namespace Tensorflow.Keras.Layers
8+
{
9+
public class Subtract : Merge
10+
{
11+
public Subtract(MergeArgs args) : base(args)
12+
{
13+
14+
}
15+
16+
protected override Tensors _merge_function(Tensors inputs)
17+
{
18+
if (len(inputs) != 2)
19+
throw new ValueError($"A `Subtract` layer should be called on exactly 2 inputs");
20+
return inputs[0] - inputs[1];
21+
}
22+
}
23+
}
Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
using NumSharp;
2+
using System;
3+
using System.Collections.Generic;
4+
using System.Linq;
5+
using System.Text;
6+
using Tensorflow;
7+
using Tensorflow.Graphs;
8+
using Tensorflow.Keras.ArgsDefinition;
9+
using Tensorflow.Keras.Engine;
10+
using static Tensorflow.Binding;
11+
12+
namespace Tensorflow.Keras.Layers
13+
{
14+
public class TensorFlowOpLayer : Layer
15+
{
16+
TensorFlowOpLayerArgs args;
17+
Dictionary<int, NDArray> constants => args.Constants;
18+
NodeDef node_def => args.NodeDef;
19+
static string TF_OP_LAYER_NAME_PREFIX = "tf_op_layer_";
20+
public string OpType => node_def.Op;
21+
22+
public TensorFlowOpLayer(TensorFlowOpLayerArgs args)
23+
: base(new LayerArgs
24+
{
25+
Name = TF_OP_LAYER_NAME_PREFIX + args.Name,
26+
Trainable = args.Trainable,
27+
DType = args.DType,
28+
Autocast = false
29+
})
30+
{
31+
this.args = args;
32+
built = true;
33+
}
34+
35+
protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
36+
{
37+
if (tf.Context.executing_eagerly())
38+
return _defun_call(inputs);
39+
return MakOp(inputs);
40+
}
41+
42+
[AutoGraph]
43+
Tensors _defun_call(Tensors inputs)
44+
=> MakOp(inputs);
45+
46+
Tensors MakOp(Tensors inputs)
47+
{
48+
var graph = inputs.graph;
49+
graph.as_default();
50+
foreach (var (index, constant) in enumerate(constants))
51+
{
52+
var value = constant_op.constant(constant, name: node_def.Input[index]);
53+
inputs.Insert(index, value);
54+
}
55+
56+
var (c_op, _) = ops._create_c_op(graph, node_def, inputs.ToArray(), new Operation[0]);
57+
var op = graph._create_op_from_tf_operation(c_op);
58+
op._control_flow_post_processing();
59+
60+
// Record the gradient because custom-made ops don't go through the
61+
// code-gen'd eager call path
62+
var op_type = op.node_def.Op;
63+
64+
tf.Runner.RecordGradient(op_type, op.inputs._inputs, null, op.outputs);
65+
66+
graph.Exit();
67+
return op.outputs;
68+
}
69+
70+
public Layer GetOpLayer(TensorFlowOpLayerArgs args)
71+
=> new TensorFlowOpLayer(args);
72+
}
73+
}

src/TensorFlowNET.Keras/Losses/Huber.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,10 +27,10 @@ public override Tensor Apply(Tensor y_true = null, Tensor y_pred =null, bool fro
2727
Tensor error = math_ops.subtract(y_pred_cast, y_true_cast);
2828
Tensor abs_error = math_ops.abs(error);
2929
Tensor half = ops.convert_to_tensor(0.5, dtype: abs_error.dtype);
30-
return gen_math_ops.mean(array_ops.where_v2(abs_error <= delta,
31-
half * math_ops.pow(error, 2),
30+
return gen_math_ops.mean(array_ops.where_v2(abs_error <= delta,
31+
half * math_ops.pow(error, 2),
3232
half * math_ops.pow(delta, 2) + delta * (abs_error - delta)),
33-
axis : -1);
33+
axis: -1);
3434
}
3535
}
3636
}

src/TensorFlowNET.Keras/Losses/LogCosh.cs

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,8 @@ public override Tensor Apply(Tensor y_true = null, Tensor y_pred =null, bool fro
1919
Tensor y_pred_dispatch = ops.convert_to_tensor(y_pred);
2020
Tensor y_true_cast = gen_math_ops.cast(y_true, y_pred_dispatch.dtype);
2121
Tensor x = y_pred_dispatch - y_true_cast;
22-
23-
return gen_math_ops.mean(x + gen_math_ops.softplus(-2.0 * x) - math_ops.cast(math_ops.log(tf.Variable(2.0)), x.dtype),axis: -1);
24-
2522

23+
return gen_math_ops.mean(x + gen_math_ops.softplus(-2.0 * x) - math_ops.cast(math_ops.log(tf.Variable(2.0)), x.dtype), axis: -1);
2624
}
2725
}
2826
}

src/TensorFlowNET.Keras/Losses/MeanAbsolutePercentageError.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ public override Tensor Apply(Tensor y_true = null, Tensor y_pred =null, bool fro
1818
Tensor y_pred_dispatch = ops.convert_to_tensor(y_pred);
1919
Tensor y_true_cast = gen_math_ops.cast(y_true, y_pred_dispatch.dtype);
2020
Tensor diff = math_ops.abs(y_true_cast - y_pred_dispatch) / gen_math_ops.maximum(math_ops.abs(y_true_cast), gen_math_ops.cast(tf.constant(1e-7), y_pred_dispatch.dtype));
21-
return gen_math_ops.cast(tf.constant(100), y_pred_dispatch.dtype) *gen_math_ops.mean(diff, axis: -1);
21+
return gen_math_ops.cast(tf.constant(100), y_pred_dispatch.dtype) * gen_math_ops.mean(diff, axis: -1);
2222
}
2323
}
2424
}

src/TensorFlowNET.Keras/Losses/MeanSquaredError.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ public override Tensor Apply(Tensor y_true = null, Tensor y_pred =null, bool fro
1717
{
1818
Tensor y_pred_dispatch = ops.convert_to_tensor(y_pred);
1919
Tensor y_true_cast = gen_math_ops.cast(y_true, y_pred_dispatch.dtype);
20-
return gen_math_ops.mean(gen_math_ops.squared_difference(y_pred_dispatch, y_true_cast), axis: -1);
20+
return gen_math_ops.mean(gen_math_ops.squared_difference(y_pred_dispatch, y_true_cast), axis: -1);
2121
}
2222
}
2323
}

src/TensorFlowNET.Keras/Tensorflow.Keras.csproj

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,9 @@
2121
* Support BatchNormalization layer.
2222
* Building keras model in subclass, functional and sequential api
2323
* Implemented backward_function.
24-
* Support model.load_weights.</PackageReleaseNotes>
24+
* Support model.load_weights.
25+
* Add Subtract layer
26+
* Support YOLOv3 model.</PackageReleaseNotes>
2527
<Description>Keras for .NET
2628

2729
Keras is an API designed for human beings, not machines. Keras follows best practices for reducing cognitive load: it offers consistent &amp; simple APIs, it minimizes the number of user actions required for common use cases, and it provides clear &amp; actionable error messages.</Description>
@@ -64,4 +66,8 @@ Keras is an API designed for human beings, not machines. Keras follows best prac
6466
</None>
6567
</ItemGroup>
6668

69+
<ItemGroup>
70+
<Folder Include="Engine\Interfaces\" />
71+
</ItemGroup>
72+
6773
</Project>

src/TensorFlowNET.Keras/Utils/base_layer_utils.cs

Lines changed: 4 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ limitations under the License.
2121
using System.Reflection;
2222
using Tensorflow.Keras.ArgsDefinition;
2323
using Tensorflow.Keras.Engine;
24+
using Tensorflow.Keras.Layers;
2425
using static Tensorflow.Binding;
2526
using static Tensorflow.KerasApi;
2627

@@ -150,33 +151,20 @@ public static void CreateKerasHistoryHelper(Tensors tensors, List<Operation> pro
150151

151152
// recursively
152153
CreateKerasHistoryHelper(layer_inputs, processed_ops, created_layers);
153-
var op_layer = GetLayer<ITensorFlowOpLayer>(new TensorFlowOpLayerArgs
154+
var opLayerArgs = new TensorFlowOpLayerArgs
154155
{
155156
NodeDef = op.node_def,
156157
Constants = constants,
157158
Name = op.name
158-
});
159+
};
160+
var op_layer = new TensorFlowOpLayer(opLayerArgs);
159161
created_layers.Add(op_layer);
160162
op_layer.SetConnectivityMetadata(layer_inputs, op.outputs);
161163
processed_ops.Add(op);
162164
}
163165
}
164166
}
165167

166-
static Layer GetLayer<T>(LayerArgs args)
167-
{
168-
Layer layer = default;
169-
var assemble = Assembly.Load("TensorFlow.Keras.Layers");
170-
foreach (var type in assemble.GetTypes().Where(x => x.GetInterface(typeof(T).Name) != null))
171-
{
172-
layer = (Layer)Activator.CreateInstance(type, new object[] { args });
173-
}
174-
175-
if (layer == null)
176-
throw new NotImplementedException($"Can't find implementation for type {args.GetType().Name}");
177-
return layer;
178-
}
179-
180168
// recusive
181169
static bool uses_keras_history(Tensor op_input)
182170
{

0 commit comments

Comments
 (0)