Skip to content

Commit

Permalink
use LlamaConfig(head_dim)
Browse files Browse the repository at this point in the history
Signed-off-by: Austin Liu <[email protected]>
  • Loading branch information
austin362667 committed Jan 16, 2025
1 parent c2168a5 commit 460cfaf
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions test/transformers/test_rope.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import torch

from test.utils import supports_bfloat16
from transformers.models.llama.configuration_llama import LlamaConfig
from transformers.models.llama.modeling_llama import LlamaRotaryEmbedding
from transformers.models.llama.modeling_llama import apply_rotary_pos_emb

Expand Down Expand Up @@ -57,7 +58,7 @@ def test_correctness(
atol,
rtol,
):
rotary_emb = LlamaRotaryEmbedding(head_dim, device=device)
rotary_emb = LlamaRotaryEmbedding(LlamaConfig(head_dim=head_dim), device=device)

_tensor_q = torch.randn((bsz, seq_len, num_q_heads, head_dim), device=device).transpose(1, 2).to(dtype)

Expand Down Expand Up @@ -133,7 +134,7 @@ def test_functional_correctness(
k1 = _k.clone().requires_grad_(True)
k2 = _k.clone().requires_grad_(True)

rotary_emb = LlamaRotaryEmbedding(head_dim, device=device)
rotary_emb = LlamaRotaryEmbedding(LlamaConfig(head_dim=head_dim), device=device)

pos_ids = torch.arange(seq_len, device=device, dtype=torch.long).unsqueeze(0)
if expand_position_ids:
Expand Down

0 comments on commit 460cfaf

Please sign in to comment.