Skip to content

Commit 3225729

Browse files
authored
[CI/Build] Remove the flaky gpt-oss lora test (vllm-project#27966)
Signed-off-by: Jee Jee Li <pandaleefree@gmail.com>
1 parent ba464e6 commit 3225729

File tree

1 file changed

+0
-5
lines changed

1 file changed

+0
-5
lines changed

tests/lora/test_gptoss_tp.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@
3232
###Response:<|end|><|start|>assistant<|channel|>final<|message|>""" # noqa: E501
3333

3434
EXPECTED_LORA_OUTPUT = [
35-
"SELECT AVG(Working_Horses) FROM farm WHERE Total_Horses > 5000;",
3635
"SELECT AVG(Working_Horses) FROM farm WHERE Total_Horses > 5000;",
3736
"SELECT MAX(Cows) AS Max_Cows, MIN(Cows) AS Min_Cows FROM farm;",
3837
"SELECT MAX(Cows) AS Max_Cows, MIN(Cows) AS Min_Cows FROM farm;",
@@ -41,9 +40,6 @@
4140

4241
def generate_and_test(llm: vllm.LLM, lora_path: str, lora_id: int) -> None:
4342
prompts = [
44-
PROMPT_TEMPLATE.format(
45-
context="What is the average number of working horses of farms with more than 5000 total number of horses?" # noqa: E501
46-
), # noqa: E501
4743
PROMPT_TEMPLATE.format(
4844
context="Give the average number of working horses on farms with more than 5000 total horses." # noqa: E501
4945
), # noqa: E501
@@ -67,7 +63,6 @@ def generate_and_test(llm: vllm.LLM, lora_path: str, lora_id: int) -> None:
6763
generated_text = output.outputs[0].text.strip()
6864
generated_texts.append(generated_text)
6965
print(f"Prompt: {prompt!r}, Generated text: {generated_text!r}")
70-
7166
for i in range(len(EXPECTED_LORA_OUTPUT)):
7267
assert generated_texts[i].startswith(EXPECTED_LORA_OUTPUT[i])
7368

0 commit comments

Comments
 (0)