Skip to content
This repository was archived by the owner on Oct 11, 2024. It is now read-only.

Commit cea9f6b

Browse files
DarkLight1337Robert Shaw
authored andcommitted
[CI/Build] [3/3] Reorganize entrypoints tests (vllm-project#5966)
1 parent 445b0d3 commit cea9f6b

17 files changed

+20
-50
lines changed

.buildkite/test-pipeline.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -89,8 +89,8 @@ steps:
8989
mirror_hardwares: [amd]
9090

9191
commands:
92-
- pytest -v -s entrypoints -m llm
93-
- pytest -v -s entrypoints -m openai
92+
- pytest -v -s entrypoints/llm
93+
- pytest -v -s entrypoints/openai
9494

9595
- label: Examples Test
9696
working_dir: "/vllm-workspace/examples"

pyproject.toml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,5 @@ skip_gitignore = true
6969
[tool.pytest.ini_options]
7070
markers = [
7171
"skip_global_cleanup",
72-
"llm: run tests for vLLM API only",
73-
"openai: run tests for OpenAI API only",
7472
"vlm: run tests for vision language models only",
7573
]

tests/entrypoints/llm/__init__.py

Whitespace-only changes.

tests/entrypoints/test_llm_encode.py renamed to tests/entrypoints/llm/test_encode.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
pytest.skip("TEST_ENTRYPOINTS=DISABLE, skipping entrypoints group",
1111
allow_module_level=True)
1212

13-
from ..conftest import cleanup
13+
from ...conftest import cleanup
1414

1515
MODEL_NAME = "intfloat/e5-mistral-7b-instruct"
1616

@@ -30,8 +30,6 @@
3030
[1000, 1003, 1001, 1002],
3131
]
3232

33-
pytestmark = pytest.mark.llm
34-
3533

3634
@pytest.fixture(scope="module")
3735
def llm():

tests/entrypoints/test_llm_generate.py renamed to tests/entrypoints/llm/test_generate.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from tests.nm_utils.utils_skip import should_skip_test_group
77
from vllm import LLM, RequestOutput, SamplingParams
88

9-
from ..conftest import cleanup
9+
from ...conftest import cleanup
1010

1111
if should_skip_test_group(group_name="TEST_ENTRYPOINTS"):
1212
pytest.skip("TEST_ENTRYPOINTS=DISABLE, skipping entrypoints group",
@@ -28,8 +28,6 @@
2828
[0, 3, 1, 2],
2929
]
3030

31-
pytestmark = pytest.mark.llm
32-
3331

3432
@pytest.fixture(scope="module")
3533
def llm():

tests/entrypoints/test_llm_generate_multiple_loras.py renamed to tests/entrypoints/llm/test_generate_multiple_loras.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from vllm import LLM
1010
from vllm.lora.request import LoRARequest
1111

12-
from ..conftest import cleanup
12+
from ...conftest import cleanup
1313

1414
if should_skip_test_group(group_name="TEST_ENTRYPOINTS"):
1515
pytest.skip("TEST_ENTRYPOINTS=DISABLE, skipping entrypoints group",
@@ -26,8 +26,6 @@
2626

2727
LORA_NAME = "typeof/zephyr-7b-beta-lora"
2828

29-
pytestmark = pytest.mark.llm
30-
3129

3230
@pytest.fixture(scope="module")
3331
def llm():

tests/entrypoints/openai/__init__.py

Whitespace-only changes.

tests/entrypoints/test_openai_chat.py renamed to tests/entrypoints/openai/test_chat.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from huggingface_hub import snapshot_download
1515
from openai import BadRequestError
1616

17-
from ..utils import RemoteOpenAIServer
17+
from ...utils import RemoteOpenAIServer
1818

1919
# any model with a chat template should work here
2020
MODEL_NAME = "HuggingFaceH4/zephyr-7b-beta"
@@ -69,8 +69,6 @@
6969
"Swift", "Kotlin"
7070
]
7171

72-
pytestmark = pytest.mark.openai
73-
7472

7573
@pytest.fixture(scope="module")
7674
def zephyr_lora_files():

tests/entrypoints/test_openai_completion.py renamed to tests/entrypoints/openai/test_completion.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616

1717
from vllm.transformers_utils.tokenizer import get_tokenizer
1818

19-
from ..utils import RemoteOpenAIServer
19+
from ...utils import RemoteOpenAIServer
2020

2121
# any model with a chat template should work here
2222
MODEL_NAME = "HuggingFaceH4/zephyr-7b-beta"
@@ -71,8 +71,6 @@
7171
"Swift", "Kotlin"
7272
]
7373

74-
pytestmark = pytest.mark.openai
75-
7674

7775
@pytest.fixture(scope="module")
7876
def zephyr_lora_files():

tests/entrypoints/test_openai_embedding.py renamed to tests/entrypoints/openai/test_embedding.py

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,21 +2,15 @@
22
import pytest
33
import ray
44

5-
<<<<<<< HEAD
65
from tests.nm_utils.utils_skip import should_skip_test_group
7-
8-
from ..utils import VLLM_PATH, RemoteOpenAIServer
9-
=======
10-
from ..utils import RemoteOpenAIServer
11-
>>>>>>> dd793d1d ([Hardware][AMD][CI/Build][Doc] Upgrade to ROCm 6.1, Dockerfile improvements, test fixes (#5422))
6+
from ...utils import RemoteOpenAIServer
127

138
if should_skip_test_group(group_name="TEST_ENTRYPOINTS"):
149
pytest.skip("TEST_ENTRYPOINTS=DISABLE, skipping entrypoints group",
1510
allow_module_level=True)
1611

17-
EMBEDDING_MODEL_NAME = "intfloat/e5-mistral-7b-instruct"
1812

19-
pytestmark = pytest.mark.openai
13+
EMBEDDING_MODEL_NAME = "intfloat/e5-mistral-7b-instruct"
2014

2115

2216
@pytest.fixture(scope="module")

0 commit comments

Comments
 (0)