2121# - "name": the model's name or path
2222# - "convert_args": a list of arguments for the conversion command
2323MODELS = {
24- "TinyLlama-1.1B-Chat-v1.0" : {
24+ "TinyLlama-1.1B-Chat-v1.0" : {
2525 "name" : "TinyLlama/TinyLlama-1.1B-Chat-v1.0" ,
2626 "convert_args" : ['--weight-format' , 'fp16' ]
2727 },
4141 "SmolLM2-360M" : {
4242 "name" : "HuggingFaceTB/SmolLM2-360M" ,
4343 "convert_args" : ['--trust-remote-code' ]
44- },
44+ },
4545 "WhisperTiny" : {
4646 "name" : "openai/whisper-tiny" ,
4747 "convert_args" : ['--trust-remote-code' , '--weight-format' , 'fp16' ]
7979 "LCM_Dreamshaper_v7-int8-ov" : {
8080 "name" : "OpenVINO/LCM_Dreamshaper_v7-int8-ov" ,
8181 "convert_args" : []
82- },
82+ },
8383 "llava-1.5-7b-hf" : {
8484 "name" : "llava-hf/llava-1.5-7b-hf" ,
8585 "convert_args" : ['--trust-remote-code' , '--weight-format' , 'fp16' ]
86- },
86+ },
8787 "llava-v1.6-mistral-7b-hf" : {
8888 "name" : "llava-hf/llava-v1.6-mistral-7b-hf" ,
8989 "convert_args" : ['--trust-remote-code' , '--weight-format' , 'fp16' ]
161161 "3283_1447_000.tar.gz" : "https://huggingface.co/datasets/facebook/multilingual_librispeech/resolve/main/data/mls_polish/train/audio/3283_1447_000.tar.gz" ,
162162 "cmu_us_awb_arctic-wav-arctic_a0001.bin" : "https://huggingface.co/datasets/Xenova/cmu-arctic-xvectors-extracted/resolve/main/cmu_us_awb_arctic-wav-arctic_a0001.bin"
163163}
164+ TEST_VIDEOS = {
165+ "video/v0.pm4" : "https://storage.openvinotoolkit.org/repositories/openvino_notebooks/data/data/video/Coco%20Walking%20in%20Berkeley.mp4"
166+ }
164167
165168SAMPLES_PY_DIR = Path (os .environ .get ("SAMPLES_PY_DIR" , os .path .abspath (os .path .join (os .path .dirname (__file__ ), "../../../samples/python" ))))
166169SAMPLES_CPP_DIR = Path (os .environ .get ("SAMPLES_CPP_DIR" , os .getcwd ()))
167170SAMPLES_C_DIR = os .environ .get ("SAMPLES_C_DIR" , os .getcwd ())
168171SAMPLES_JS_DIR = Path (os .environ .get ("SAMPLES_JS_DIR" , os .path .abspath (os .path .join (os .path .dirname (__file__ ), "../../../samples/js" ))))
169172
173+
170174@pytest .fixture (scope = "session" , autouse = True )
171175def setup_and_teardown (request , tmp_path_factory ):
172176 """Fixture to set up and tear down the temporary directories."""
173-
174- ov_cache = get_ov_cache_dir (tmp_path_factory .mktemp ("ov_cache" ))
177+
178+ ov_cache = get_ov_cache_dir (tmp_path_factory .mktemp ("ov_cache" ))
175179 models_dir = os .path .join (ov_cache , "test_models" )
176180 test_data = os .path .join (ov_cache , "test_data" )
177-
181+
178182 logger .info (f"Creating directories: { models_dir } and { test_data } " )
179183 os .makedirs (models_dir , exist_ok = True )
180184 os .makedirs (test_data , exist_ok = True )
181-
185+
182186 request .config .cache .set ("OV_CACHE" , str (ov_cache ))
183187 request .config .cache .set ("MODELS_DIR" , str (models_dir ))
184188 request .config .cache .set ("TEST_DATA" , str (test_data ))
185-
189+
186190 yield
187-
191+
188192 if os .environ .get ("CLEANUP_CACHE" , "false" ).lower () != "false" :
189193 if os .path .exists (ov_cache ):
190194 logger .info (f"Removing temporary directory: { ov_cache } " )
@@ -269,9 +273,9 @@ def download_model(request):
269273 command = ["huggingface-cli" , "download" , model_name , "--local-dir" , model_path ]
270274 logger .info (f"Downloading command: { ' ' .join (command )} " )
271275 retry_request (lambda : subprocess .run (command , check = True , capture_output = True , text = True , env = sub_env ))
272-
276+
273277 yield model_path
274-
278+
275279 # Cleanup the model after tests
276280 if os .environ .get ("CLEANUP_CACHE" , "false" ).lower () == "true" :
277281 if os .path .exists (model_cache ):
@@ -281,13 +285,13 @@ def download_model(request):
281285@pytest .fixture (scope = "session" )
282286def download_test_content (request ):
283287 """Download the test content from the given URL and return the file path or extracted folder."""
284-
288+
285289 test_data = request .config .cache .get ("TEST_DATA" , None )
286-
290+
287291 file_name = request .param
288292 file_url = TEST_FILES [file_name ]
289293 file_path = os .path .join (test_data , file_name )
290-
294+
291295 if not os .path .exists (file_path ):
292296 logger .info (f"Downloading test content from { file_url } to { file_path } ..." )
293297 os .makedirs (os .path .dirname (file_path ), exist_ok = True )
@@ -327,9 +331,9 @@ def download_test_content(request):
327331@pytest .fixture (scope = "session" )
328332def generate_test_content (request ):
329333 """Generate an image of lines and return the file path."""
330-
334+
331335 test_data = request .config .cache .get ("TEST_DATA" , None )
332-
336+
333337 file_name = request .param
334338 file_path = os .path .join (test_data , file_name )
335339 if not os .path .exists (file_path ):
@@ -355,24 +359,24 @@ def generate_test_content(request):
355359@pytest .fixture (scope = "session" )
356360def generate_image_generation_jsonl (request ):
357361 """Generate a JSONL file for image generation prompts."""
358-
362+
359363 test_data = request .config .cache .get ("TEST_DATA" , None )
360364 file_name , json_entries = request .param
361365 file_path = os .path .join (test_data , file_name )
362-
366+
363367 if not os .path .exists (file_path ):
364368 os .makedirs (os .path .dirname (file_path ), exist_ok = True )
365-
369+
366370 with open (file_path , "w" , encoding = "utf-8" ) as f :
367371 for entry in json_entries :
368372 f .write (json .dumps (entry ) + "\n " )
369-
373+
370374 logger .info (f"Generated image generation JSONL file at { file_path } " )
371375 else :
372376 logger .info (f"Image generation JSONL file already exists at { file_path } " )
373-
377+
374378 yield file_path
375-
379+
376380 # Cleanup the JSONL file after tests
377381 if os .environ .get ("CLEANUP_CACHE" , "false" ).lower () == "true" :
378382 if os .path .exists (file_path ):
@@ -387,3 +391,21 @@ def run_gc_after_test():
387391 """
388392 yield
389393 gc .collect ()
394+
395+ @pytest .fixture (scope = "session" )
396+ def download_test_video ():
397+ video_v0_url = TEST_VIDEOS ["video/v0.pm4" ]
398+ response = requests .get (video_v0_url , stream = True )
399+ response .raise_for_status ()
400+
401+ temp_dir = tempfile .mkdtemp ()
402+ video_path = os .path .join (temp_dir , "v0.mp4" )
403+ with open (video_path , 'wb' ) as f :
404+ for chunk in response .iter_content (chunk_size = 8192 ):
405+ f .write (chunk )
406+ yield video_path
407+
408+ if os .path .exists (video_path ):
409+ os .remove (video_path )
410+ os .rmdir (temp_dir )
411+
0 commit comments