Skip to content
This repository was archived by the owner on Jul 1, 2023. It is now read-only.

Commit 45c2bcd

Browse files
committed
Remove TF-31 workaround.
1 parent 0198076 commit 45c2bcd

File tree

1 file changed

+0
-5
lines changed

1 file changed

+0
-5
lines changed

Sources/DeepLearning/Layer.swift

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -196,11 +196,6 @@ public struct Dense<Scalar: TensorFlowFloatingPoint>: Layer {
196196
public typealias Activation = @differentiable (Tensor<Scalar>) -> Tensor<Scalar>
197197
@noDerivative public let activation: Activation
198198

199-
// FIXME(SR-9716): Remove this once the bug is fixed or worked around.
200-
public var allKeyPaths: [PartialKeyPath<Dense>] {
201-
return [\Dense.weight, \Dense.bias]
202-
}
203-
204199
@differentiable(wrt: (self, input))
205200
public func applied(to input: Tensor<Scalar>, in _: Context) -> Tensor<Scalar> {
206201
return activation(matmul(input, weight) + bias)

0 commit comments

Comments
 (0)