Skip to content

Commit

Permalink
Fix code format
Browse files Browse the repository at this point in the history
  • Loading branch information
yeonsily committed Feb 13, 2025
1 parent 0bb1c58 commit c660333
Showing 1 changed file with 4 additions and 2 deletions.
6 changes: 4 additions & 2 deletions vllm/model_executor/models/roberta.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,8 @@ def forward(
pos_list.append(position_ids[offset])
token_list.append(input_ids[offset])

for index, (positions, tokens, seq_len) in enumerate(zip(pos_list, token_list, seq_lens)):
for index, (positions, tokens,
seq_len) in enumerate(zip(pos_list, token_list, seq_lens)):
# Verify assumption that incoming position are
# always a sequence from 0 to N.
expected_pos = torch.arange(positions.size()[0],
Expand All @@ -100,7 +101,8 @@ def forward(
valid_input_mask = expected_pos < seq_len.to('cpu')
expected_pos = expected_pos * valid_input_mask
assert torch.equal(positions.to('cpu'), expected_pos)
position_ids[index] = create_position_ids_from_input_ids(tokens, self.padding_idx, seq_len)
position_ids[index] = create_position_ids_from_input_ids(
tokens, self.padding_idx, seq_len)

# Position embeddings.
position_embeddings = self.position_embeddings(position_ids)
Expand Down

0 comments on commit c660333

Please sign in to comment.