|
| 1 | +using System; |
| 2 | +using System.Collections.Generic; |
| 3 | +using System.Linq; |
| 4 | +using System.Text; |
| 5 | +using Tensorflow.Layers; |
| 6 | + |
| 7 | +namespace Tensorflow.Keras.Layers |
| 8 | +{ |
| 9 | + public class BatchNormalization : Layer |
| 10 | + { |
| 11 | + private bool _USE_V2_BEHAVIOR = true; |
| 12 | + private float momentum; |
| 13 | + private float epsilon; |
| 14 | + private bool center; |
| 15 | + private bool scale; |
| 16 | + private bool renorm; |
| 17 | + private bool fused; |
| 18 | + private bool _bessels_correction_test_only; |
| 19 | + private int[] axis; |
| 20 | + private string _data_format; |
| 21 | + private IInitializer beta_initializer; |
| 22 | + private IInitializer gamma_initializer; |
| 23 | + private IInitializer moving_mean_initializer; |
| 24 | + private IInitializer moving_variance_initializer; |
| 25 | + private RefVariable gamma; |
| 26 | + private RefVariable beta; |
| 27 | + private RefVariable moving_mean; |
| 28 | + |
| 29 | + public BatchNormalization(int axis = -1, |
| 30 | + float momentum = 0.99f, |
| 31 | + float epsilon = 0.001f, |
| 32 | + bool center = true, |
| 33 | + bool scale = true, |
| 34 | + IInitializer beta_initializer = null, |
| 35 | + IInitializer gamma_initializer = null, |
| 36 | + IInitializer moving_mean_initializer = null, |
| 37 | + IInitializer moving_variance_initializer = null, |
| 38 | + bool renorm = false, |
| 39 | + float renorm_momentum = 0.99f, |
| 40 | + bool trainable = true, |
| 41 | + string name = null) : base(trainable: trainable, |
| 42 | + name: name) |
| 43 | + { |
| 44 | + this.axis = new int[] { axis }; |
| 45 | + this.momentum = momentum; |
| 46 | + this.epsilon = epsilon; |
| 47 | + this.center = center; |
| 48 | + this.scale = scale; |
| 49 | + if (beta_initializer == null) |
| 50 | + beta_initializer = tf.zeros_initializer; |
| 51 | + if (gamma_initializer == null) |
| 52 | + gamma_initializer = tf.ones_initializer; |
| 53 | + if (moving_mean_initializer == null) |
| 54 | + moving_mean_initializer = tf.zeros_initializer; |
| 55 | + if (moving_variance_initializer == null) |
| 56 | + moving_variance_initializer = tf.ones_initializer; |
| 57 | + this.beta_initializer = beta_initializer; |
| 58 | + this.gamma_initializer = gamma_initializer; |
| 59 | + this.moving_mean_initializer = moving_mean_initializer; |
| 60 | + this.moving_variance_initializer = moving_variance_initializer; |
| 61 | + this.renorm = renorm; |
| 62 | + this.fused = true; |
| 63 | + this.supports_masking = true; |
| 64 | + this._bessels_correction_test_only = true; |
| 65 | + } |
| 66 | + |
| 67 | + protected override void build(TensorShape input_shape) |
| 68 | + { |
| 69 | + var ndims = input_shape.NDim; |
| 70 | + foreach (var (idx, x) in Python.enumerate(axis)) |
| 71 | + if (x < 0) |
| 72 | + axis[idx] = ndims + x; |
| 73 | + |
| 74 | + if (fused) |
| 75 | + if (Enumerable.SequenceEqual(axis, new int[] { 3 })) |
| 76 | + _data_format = "NHWC"; |
| 77 | + |
| 78 | + var param_dtype = _dtype == TF_DataType.DtInvalid ? TF_DataType.TF_FLOAT : _dtype; |
| 79 | + var param_shape = new int[] { input_shape.Dimensions[axis[0]] }; |
| 80 | + |
| 81 | + if (scale) |
| 82 | + gamma = add_weight("gamma", |
| 83 | + param_shape, |
| 84 | + dtype: param_dtype, |
| 85 | + initializer: gamma_initializer, |
| 86 | + trainable: true); |
| 87 | + else |
| 88 | + throw new NotImplementedException("add_weight gamma"); |
| 89 | + |
| 90 | + if (center) |
| 91 | + beta = add_weight("beta", |
| 92 | + param_shape, |
| 93 | + dtype: param_dtype, |
| 94 | + initializer: beta_initializer, |
| 95 | + trainable: true); |
| 96 | + else |
| 97 | + throw new NotImplementedException("add_weight beta"); |
| 98 | + |
| 99 | + if(_scope != null) |
| 100 | + { |
| 101 | + |
| 102 | + } |
| 103 | + |
| 104 | + moving_mean = add_weight("moving_mean", |
| 105 | + param_shape, |
| 106 | + dtype: param_dtype); |
| 107 | + } |
| 108 | + } |
| 109 | +} |
0 commit comments