Skip to content

Commit 9580816

Browse files
committed
+ tests
1 parent 4f48162 commit 9580816

File tree

8 files changed

+180
-173
lines changed

8 files changed

+180
-173
lines changed

tests/python_tests/samples/conftest.py

Lines changed: 34 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
# - "name": the model's name or path
2727
# - "convert_args": a list of arguments for the conversion command
2828
MODELS: Dict[str, Dict[str, Any]] = {
29-
"TinyLlama-1.1B-Chat-v1.0": {
29+
"TinyLlama-1.1B-Chat-v1.0": {
3030
"name": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
3131
"convert_args": ['--weight-format', 'fp16']
3232
},
@@ -46,7 +46,7 @@
4646
"SmolLM2-360M": {
4747
"name": "HuggingFaceTB/SmolLM2-360M",
4848
"convert_args": ['--trust-remote-code']
49-
},
49+
},
5050
"WhisperTiny": {
5151
"name": "openai/whisper-tiny",
5252
"convert_args": ['--trust-remote-code', '--weight-format', 'fp16']
@@ -84,11 +84,11 @@
8484
"LCM_Dreamshaper_v7-int8-ov": {
8585
"name": "OpenVINO/LCM_Dreamshaper_v7-int8-ov",
8686
"convert_args": []
87-
},
87+
},
8888
"llava-1.5-7b-hf": {
8989
"name": "llava-hf/llava-1.5-7b-hf",
9090
"convert_args": ['--trust-remote-code', '--weight-format', 'fp16']
91-
},
91+
},
9292
"llava-v1.6-mistral-7b-hf": {
9393
"name": "llava-hf/llava-v1.6-mistral-7b-hf",
9494
"convert_args": ['--trust-remote-code', '--weight-format', 'fp16']
@@ -129,6 +129,10 @@
129129
"name": "katuni4ka/tiny-random-llava",
130130
"convert_args": ["--trust-remote-code", "--task", "image-text-to-text"]
131131
},
132+
"tiny-random-qwen2vl": {
133+
"name": "katuni4ka/tiny-random-qwen2vl",
134+
"convert_args": ["--trust-remote-code", "--task", "image-text-to-text"]
135+
},
132136
"bge-small-en-v1.5": {
133137
"name": "BAAI/bge-small-en-v1.5",
134138
"convert_args": ["--trust-remote-code"]
@@ -164,7 +168,8 @@
164168
"cat.png": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/diffusers/cat.png",
165169
"cat": "https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/d5fbbd1a-d484-415c-88cb-9986625b7b11",
166170
"3283_1447_000.tar.gz": "https://huggingface.co/datasets/facebook/multilingual_librispeech/resolve/main/data/mls_polish/train/audio/3283_1447_000.tar.gz",
167-
"cmu_us_awb_arctic-wav-arctic_a0001.bin": "https://huggingface.co/datasets/Xenova/cmu-arctic-xvectors-extracted/resolve/main/cmu_us_awb_arctic-wav-arctic_a0001.bin"
171+
"cmu_us_awb_arctic-wav-arctic_a0001.bin": "https://huggingface.co/datasets/Xenova/cmu-arctic-xvectors-extracted/resolve/main/cmu_us_awb_arctic-wav-arctic_a0001.bin",
172+
"video0.pm4": "https://storage.openvinotoolkit.org/repositories/openvino_notebooks/data/data/video/Coco%20Walking%20in%20Berkeley.mp4"
168173
}
169174

170175
SAMPLES_PY_DIR = Path(
@@ -182,23 +187,24 @@
182187
)
183188
)
184189

190+
185191
@pytest.fixture(scope="session", autouse=True)
186192
def setup_and_teardown(request, tmp_path_factory):
187193
"""Fixture to set up and tear down the temporary directories."""
188-
189-
ov_cache = get_ov_cache_dir(tmp_path_factory.mktemp("ov_cache"))
194+
195+
ov_cache = get_ov_cache_dir(tmp_path_factory.mktemp("ov_cache"))
190196
downloaded_models_dir = get_ov_cache_downloaded_models_dir()
191197
converted_models_dir = get_ov_cache_converted_models_dir()
192198
test_data = ov_cache / "test_data"
193-
199+
194200
logger.info(f"Creating directories: {downloaded_models_dir}, {converted_models_dir}, and {test_data}")
195201
test_data.mkdir(parents=True, exist_ok=True)
196-
202+
197203
request.config.cache.set("OV_CACHE", str(ov_cache))
198204
request.config.cache.set("TEST_DATA", str(test_data))
199-
205+
200206
yield
201-
207+
202208
if os.environ.get("CLEANUP_CACHE", "false").lower() != "false":
203209
if os.path.exists(ov_cache):
204210
logger.info(f"Removing temporary directory: {ov_cache}")
@@ -213,9 +219,9 @@ def download_gguf_model(model: Dict[str, Any], model_path: str) -> None:
213219
model_name = model["name"]
214220
model_gguf_filename = model["gguf_filename"]
215221
dest_dir = Path(model_path)
216-
222+
217223
manager = AtomicDownloadManager(dest_dir)
218-
224+
219225
def download_to_temp(temp_path: Path) -> None:
220226
command = ["huggingface-cli", "download", model_name, model_gguf_filename, "--local-dir", str(temp_path)]
221227
logger.info(f"Downloading command: {' '.join(command)}")
@@ -325,26 +331,27 @@ def download_to_temp(temp_path: Path) -> None:
325331
command = ["huggingface-cli", "download", model_name, "--local-dir", str(temp_path)]
326332
logger.info(f"Downloading command: {' '.join(command)}")
327333
retry_request(lambda: subprocess.run(command, check=True, capture_output=True, text=True, env=sub_env))
328-
334+
329335
manager.execute(download_to_temp)
330-
336+
331337
yield str(model_path)
332-
338+
333339
if os.environ.get("CLEANUP_CACHE", "false").lower() == "true":
334340
if model_cache.exists():
335341
logger.info(f"Removing downloaded model: {model_cache}")
336342
shutil.rmtree(model_cache)
337343

344+
338345
@pytest.fixture(scope="session")
339346
def download_test_content(request):
340347
"""Download the test content from the given URL and return the file path or extracted folder."""
341-
348+
342349
test_data = request.config.cache.get("TEST_DATA", None)
343-
350+
344351
file_name = request.param
345352
file_url = TEST_FILES[file_name]
346353
file_path = os.path.join(test_data, file_name)
347-
354+
348355
if not os.path.exists(file_path):
349356
logger.info(f"Downloading test content from {file_url} to {file_path}...")
350357
os.makedirs(os.path.dirname(file_path), exist_ok=True)
@@ -384,9 +391,9 @@ def download_test_content(request):
384391
@pytest.fixture(scope="session")
385392
def generate_test_content(request):
386393
"""Generate an image of lines and return the file path."""
387-
394+
388395
test_data = request.config.cache.get("TEST_DATA", None)
389-
396+
390397
file_name = request.param
391398
file_path = os.path.join(test_data, file_name)
392399
if not os.path.exists(file_path):
@@ -412,24 +419,24 @@ def generate_test_content(request):
412419
@pytest.fixture(scope="session")
413420
def generate_image_generation_jsonl(request):
414421
"""Generate a JSONL file for image generation prompts."""
415-
422+
416423
test_data = request.config.cache.get("TEST_DATA", None)
417424
file_name, json_entries = request.param
418425
file_path = os.path.join(test_data, file_name)
419-
426+
420427
if not os.path.exists(file_path):
421428
os.makedirs(os.path.dirname(file_path), exist_ok=True)
422-
429+
423430
with open(file_path, "w", encoding="utf-8") as f:
424431
for entry in json_entries:
425432
f.write(json.dumps(entry) + "\n")
426-
433+
427434
logger.info(f"Generated image generation JSONL file at {file_path}")
428435
else:
429436
logger.info(f"Image generation JSONL file already exists at {file_path}")
430-
437+
431438
yield file_path
432-
439+
433440
# Cleanup the JSONL file after tests
434441
if os.environ.get("CLEANUP_CACHE", "false").lower() == "true":
435442
if os.path.exists(file_path):

0 commit comments

Comments
 (0)