mirror of
https://github.com/meta-llama/llama.git
synced 2026-01-15 08:22:55 -03:00
update the code to use the module's __call__
This commit is contained in:
@@ -403,10 +403,10 @@ class TransformerBlock(nn.Module):
|
||||
torch.Tensor: Output tensor after applying attention and feedforward layers.
|
||||
|
||||
"""
|
||||
h = x + self.attention.forward(
|
||||
h = x + self.attention(
|
||||
self.attention_norm(x), start_pos, freqs_cis, mask
|
||||
)
|
||||
out = h + self.feed_forward.forward(self.ffn_norm(h))
|
||||
out = h + self.feed_forward(self.ffn_norm(h))
|
||||
return out
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user