We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 24bd64a commit 2fb0e22Copy full SHA for 2fb0e22
labml_nn/lora/__init__.py
@@ -79,7 +79,7 @@ def __init__(self, in_features: int, out_features: int, bias: bool,
79
# Matrix $A \in \mathbb{R}^{r \times k}$
80
self.lora_a = nn.Parameter(torch.empty((r, in_features)))
81
# Matrix $B \in \mathbb{R}^{d \times r}$, we keep $A$ and $B$ transposed
82
- self.lora_b = nn.Parameter(torch.empty((outfeatures, r)))
+ self.lora_b = nn.Parameter(torch.empty((out_features, r)))
83
84
with torch.no_grad():
85
# Initialize $A$ similar to a weight matrix in a normal linear layer
0 commit comments