From 1e8375848d3a3ebaccab83fd670b880864cf9409 Mon Sep 17 00:00:00 2001 From: wangzhihong Date: Thu, 21 Mar 2024 10:09:34 +0800 Subject: [PATCH] update the code to use the module's __call__ --- llama/model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llama/model.py b/llama/model.py index c78570f..562fcad 100755 --- a/llama/model.py +++ b/llama/model.py @@ -403,10 +403,10 @@ class TransformerBlock(nn.Module): torch.Tensor: Output tensor after applying attention and feedforward layers. """ - h = x + self.attention.forward( + h = x + self.attention( self.attention_norm(x), start_pos, freqs_cis, mask ) - out = h + self.feed_forward.forward(self.ffn_norm(h)) + out = h + self.feed_forward(self.ffn_norm(h)) return out