Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Commit 39114bb

Browse files
authored
Add default (identity) activations to the Dense layer initializers. (#26)
This change follows along with the defaulted values for Conv and other layers.
1 parent 9b3b609 commit 39114bb

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

Sources/DeepLearning/Layer.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@ public extension Dense where Scalar.RawSignificand: FixedWidthInteger {
211211
init<G: RandomNumberGenerator>(
212212
inputSize: Int,
213213
outputSize: Int,
214-
activation: @escaping Activation,
214+
activation: @escaping Activation = identity,
215215
generator: inout G
216216
) {
217217
self.init(weight: Tensor(glorotUniform: [Int32(inputSize), Int32(outputSize)],
@@ -220,7 +220,7 @@ public extension Dense where Scalar.RawSignificand: FixedWidthInteger {
220220
activation: activation)
221221
}
222222

223-
init(inputSize: Int, outputSize: Int, activation: @escaping Activation) {
223+
init(inputSize: Int, outputSize: Int, activation: @escaping Activation = identity) {
224224
self.init(inputSize: inputSize, outputSize: outputSize, activation: activation,
225225
generator: &PhiloxRandomNumberGenerator.global)
226226
}

0 commit comments

Comments
 (0)