Move position biases to attention module

This commit is contained in:
Juarez Bochi
2023-12-15 11:30:17 -05:00
parent d0497ddc0b
commit 330f024d1c
2 changed files with 33 additions and 50 deletions

View File

@@ -13,10 +13,6 @@ SHARED_REPLACEMENT_PATTERNS = [
(".layer.1.layer_norm.", ".ln2."),
(".layer.2.layer_norm.", ".ln3."),
(".final_layer_norm.", ".ln."),
(
".layers.0.layer.0.SelfAttention.relative_attention_bias.",
".position_bias.relative_attention_bias."
),
]
ENCODER_REPLACEMENT_PATTERNS = [