Skip to content

Commit

Permalink
[Misc] Keep only one implementation of the create_dummy_prompt functi…
Browse files Browse the repository at this point in the history
…on. (#4716)
  • Loading branch information
AllenDou committed May 10, 2024
1 parent 208b71b commit e965d46
Showing 1 changed file with 4 additions and 32 deletions.
36 changes: 4 additions & 32 deletions tests/test_sequence.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,8 @@
import time
from typing import Optional

import pytest

from vllm import SamplingParams
from vllm.lora.request import LoRARequest
from vllm.sequence import (SamplerOutput, Sequence, SequenceData,
SequenceGroup, SequenceGroupOutput, SequenceOutput)


def create_dummy_prompt(
request_id: str,
prompt_length: int,
block_size: Optional[int] = None,
lora_request: Optional[LoRARequest] = None,
use_beam_search: bool = False,
best_of: int = 1,
) -> SequenceGroup:
if not block_size:
block_size = prompt_length

# Create dummy prompt sequence with tokens 0...block_size-1
# and prompt "0 ... block_size".
prompt_tokens = list(range(prompt_length))
prompt_str = " ".join([str(t) for t in prompt_tokens])
prompt = Sequence(int(request_id), prompt_str, prompt_tokens, block_size)
seq_group = SequenceGroup(
request_id, [prompt],
SamplingParams(use_beam_search=use_beam_search, best_of=best_of),
time.time(), lora_request)

return seq_group
from tests.core.utils import create_dummy_prompt
from vllm.sequence import (SamplerOutput, SequenceData, SequenceGroupOutput,
SequenceOutput)


@pytest.fixture
Expand Down Expand Up @@ -102,7 +74,7 @@ def test_sequence_data_prefill():


def test_sequence_group_stage():
seq_group = create_dummy_prompt("1", 12)
_, seq_group = create_dummy_prompt("1", 12)
assert seq_group.is_prefill() is True
seq_group.update_num_computed_tokens(6)
assert seq_group.is_prefill() is True
Expand Down

0 comments on commit e965d46

Please sign in to comment.