Update llms/hf_llm/models.py

Co-authored-by: Awni Hannun <awni.hannun@gmail.com>
This commit is contained in:
Anchen
2024-01-07 02:24:54 +11:00
committed by GitHub
parent 35dcab90ef
commit a30273f606

View File

@@ -83,9 +83,9 @@ class Attention(nn.Module):
self.v_proj = nn.Linear(dim, n_kv_heads * head_dim, bias=False)
self.o_proj = nn.Linear(n_heads * head_dim, dim, bias=False)
rope_scale = (
1 / args.rope_scaling.get("factor", 1.0)
1 / args.rope_scaling["factor"]
if args.rope_scaling is not None
and args.rope_scaling.get("type") == "linear"
and args.rope_scaling["type"] == "linear"
else 1
)
self.rope = nn.RoPE(