From 77e7d162964a19f1ca8189e402325b813397386f Mon Sep 17 00:00:00 2001 From: Your Name Date: Mon, 28 Jul 2025 09:18:42 +0200 Subject: [PATCH 1/8] Add: Based ffmpeg average end to end latency test --- tests/validation/mtl_engine/ffmpeg_app.py | 245 ++++++++++++++++++ tests/validation/mtl_engine/media_files.py | 59 +++++ .../ffmpeg/test_ffmpeg_end_to_end_latency.py | 87 +++++++ 3 files changed, 391 insertions(+) create mode 100755 tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py diff --git a/tests/validation/mtl_engine/ffmpeg_app.py b/tests/validation/mtl_engine/ffmpeg_app.py index f2aa05d7d..f4f498725 100755 --- a/tests/validation/mtl_engine/ffmpeg_app.py +++ b/tests/validation/mtl_engine/ffmpeg_app.py @@ -739,6 +739,54 @@ def check_output_rgb24(rx_output: str, number_of_sessions: int): return ok_cnt == number_of_sessions +def check_output_video_mp4(output_file: str, video_size: str, host, build: str): + # Check output file size + try: + stat_proc = run(f"stat -c '%s' {output_file}", host=host) + if stat_proc.return_code == 0: + output_file_size = int(stat_proc.stdout_text.strip()) + logger.info(f"Output file size: {output_file_size} bytes for {output_file}") + log_to_file( + f"Output file size: {output_file_size} bytes for {output_file}", + host, + build, + ) + else: + logger.info(f"Could not get output file size for {output_file}") + log_to_file(f"Could not get output file size for {output_file}", host, build) + return False + except Exception as e: + logger.info(f"Error checking output file size: {e}") + log_to_file(f"Error checking output file size: {e}", host, build) + return False + + # Use ffprobe to check for a video stream and resolution + ffprobe_proc = run( + f"ffprobe -v error -show_streams {output_file}", host=host + ) + + codec_name_match = re.search(r"codec_name=([^\n]+)", ffprobe_proc.stdout_text) + width_match = re.search(r"width=(\d+)", ffprobe_proc.stdout_text) + height_match = re.search(r"height=(\d+)", ffprobe_proc.stdout_text) + + if codec_name_match and width_match and height_match: + codec_name = codec_name_match.group(1) + width = width_match.group(1) + height = height_match.group(1) + result = f"{width}x{height}" == video_size + logger.info( + f"MP4 check result: {result} (codec: {codec_name}, size: {width}x{height})" + ) + log_to_file( + f"MP4 check result: {result} (codec: {codec_name}, size: {width}x{height})", + host, + build, + ) + return result + else: + logger.info("MP4 check failed") + log_to_file("MP4 check failed", host, build) + return False def create_empty_output_files( output_format: str, number_of_files: int = 1, host=None, build: str = "" @@ -989,3 +1037,200 @@ def decode_video_format_to_st20p(video_format: str) -> tuple: else: log_fail(f"Invalid video format: {video_format}") return None + +def check_latency_from_script(script_path, recv_file, latency_jpg, expected_latency, host): + """ + Runs the latency measurement script and checks if the measured latency is within expectation. + Returns True if passed, False if failed. + """ + logger.info("Installing all dependencies for script...") + #run("python3 -m pip install opencv-python matplotlib pytesseract", host=host, enable_sudo=True) + + logger.info("Checking the end-to-end latency...") + script_cmd = f"python3 {script_path} {recv_file} {latency_jpg}" + result = run(script_cmd, host=host, enable_sudo=True) + stdout = result.stdout_text + if isinstance(stdout, list): + stdout = "\n".join(stdout) + logger.info(f"Latency script output:\n{stdout}") + + passed = False + match = re.search(r"Average End-to-End Latency:\s*([\d.]+)\s*ms", stdout) + if match: + avg_latency_ms = float(match.group(1)) + logger.info(f"Extracted average latency: {avg_latency_ms} ms") + if avg_latency_ms <= expected_latency: + logger.info(f"Test passed: average latency {avg_latency_ms} ms is within expected {expected_latency} ms") + passed = True + else: + log_fail(f"Test failed: average latency {avg_latency_ms} ms exceeds expected {expected_latency} ms") + passed = False + else: + log_fail("Could not extract average latency from script output.") + passed = False + + if not passed: + log_fail("test failed") + return passed + +def cleanup_output_files(cleanup_pattern): + """ + Removes all files matching the given glob pattern. + Logs actions and errors. + """ + import glob + import os + import logging + + output_files = glob.glob(cleanup_pattern) + if not output_files: + logging.info(f"No output files found for cleanup with pattern: {cleanup_pattern}") + for output_file in output_files: + try: + if os.path.exists(output_file): + os.remove(output_file) + logging.info(f"Removed output file: {output_file}") + else: + logging.info(f"Output file already removed or does not exist: {output_file}") + except Exception as file_exc: + logging.warning(f"Could not remove output file {output_file}: {file_exc}") + +def execute_test_latency_single_or_dual( + test_time: int, + build: str, + hosts, + type_: str, + video_format: str, + pg_format: str, + video_url: str, + output_format: str, + multiple_sessions: bool = False, + tx_is_ffmpeg: bool = True, + capture_cfg=None, + dual: bool = False, +): + """ + Runs latency test using either single host or dual host setup. + If dual=True, RX and TX run on separate hosts. + """ + init_test_logging() + + if dual: + rx_host = list(hosts.values())[0] + tx_host = list(hosts.values())[1] + rx_nic_port_list = rx_host.vfs + tx_nic_port_list = tx_host.vfs + else: + rx_host = tx_host = list(hosts.values())[0] + rx_nic_port_list = tx_nic_port_list = rx_host.vfs + + video_size, fps = decode_video_format_16_9(video_format) + + # Drawtext filter strings for timestamp overlays + drawtext_rx = ( + "drawtext=fontsize=40:" + "text='Rx timestamp %{localtime\\\\:%H\\\\\\\\\\:%M\\\\\\\\\\:%S\\\\\\\\\\:%3N}':" + "x=10:y=70:fontcolor=white:box=1:boxcolor=black:boxborderw=10" + ) + drawtext_tx = ( + "drawtext=fontsize=40:" + "text='Tx timestamp %{localtime\\\\:%H\\\\\\\\\\:%M\\\\\\\\\\:%S\\\\\\\\\\:%3N}':" + "x=10:y=10:fontcolor=white:box=1:boxcolor=black:boxborderw=10" + ) + + rx_vf = f' -vf "{drawtext_rx}" ' + tx_vf = f' -vf "{drawtext_tx}" ' + + output_files = create_empty_output_files(output_format, 1, rx_host, build) + rx_output_opts = "" + rx_input_flag = "-" + + # Output options for ffmpeg RX depending on format + if output_format == "yuv": + rx_output_opts = f" -f rawvideo -pix_fmt yuv422p10le -video_size {video_size}" + elif output_format == "mp4": + rx_output_opts = f" -vcodec mpeg4 -qscale:v 3 " + + # RX command with drawtext filter + rx_cmd = ( + f"ffmpeg -p_port {rx_nic_port_list[0]} -p_sip {ip_dict['rx_interfaces']} " + f"-p_rx_ip {ip_dict['rx_sessions']} -udp_port 20000 -payload_type 112 " + f"-fps {fps} -pix_fmt yuv422p10le -video_size {video_size} " + f"-f mtl_st20p -i {rx_input_flag}" + f"{rx_vf}" + f"{rx_output_opts} " + f"{output_files[0]} -y" + ) + + # TX command with drawtext filter and readrate + tx_fps_filter = "" + readrate = f" -readrate {(fps/25)/2} " # Reduce readrate by half to simulate sending from partially empty buffers + tx_cmd = ( + f"ffmpeg -video_size {video_size} -f rawvideo{readrate} -pix_fmt yuv422p10le " + f"-i {video_url} {tx_vf}{tx_fps_filter} -p_port {tx_nic_port_list[1]} " + f"-p_sip {ip_dict['tx_interfaces']} -p_tx_ip {ip_dict['tx_sessions']} " + f"-udp_port 20000 -payload_type 112 -f mtl_st20p -" + ) + + logger.info(f"RX Command: {rx_cmd}") + logger.info(f"TX Command: {tx_cmd}") + log_to_file(f"RX Command: {rx_cmd}", rx_host, build) + log_to_file(f"TX Command: {tx_cmd}", tx_host, build) + + # Start RX pipeline + rx_proc = run( + rx_cmd, + cwd=build, + timeout=test_time + 60, + testcmd=True, + host=rx_host, + background=True, + enable_sudo=True, + ) + + # Start TX pipeline + tx_proc = run( + tx_cmd, + cwd=build, + timeout=test_time + 60, + testcmd=True, + host=tx_host, + background=True, + enable_sudo=True, + ) + + try: + # ... run test ... + logger.info(f"Running test for {test_time} seconds...") + time.sleep(test_time) + finally: + # Ensure processes are terminated and waited on + for proc in [tx_proc, rx_proc]: + if proc: + try: + proc.terminate() + proc.wait(timeout=5) + except Exception: + try: + proc.kill() + proc.wait(timeout=5) + except Exception: + pass + + # Validate output file + passed = False + match output_format: + case "yuv": + passed = check_output_video_yuv(output_files[0], rx_host, build, video_url) + case "h264": + passed = check_output_video_h264( + output_files[0], video_size, rx_host, build, video_url + ) + case "mp4": + passed = check_output_video_mp4( + output_files[0], video_size, rx_host, build + ) + + if not passed: + log_fail("test failed") + return passed diff --git a/tests/validation/mtl_engine/media_files.py b/tests/validation/mtl_engine/media_files.py index f07b30cc4..e3b8289a5 100755 --- a/tests/validation/mtl_engine/media_files.py +++ b/tests/validation/mtl_engine/media_files.py @@ -566,3 +566,62 @@ "fps": "25", }, ) + +yuv_files_end_to_end = dict( + i720p25={ + "filename": "HDR_BBC_v4_008_Penguin1_1280x720_10bit_25Hz_P422_180frames.yuv", + "file_format": "YUV422RFC4175PG2BE10", + "format": "YUV_422_10bit", + "width": 1280, + "height": 720, + "fps": "25", + }, + i720p60={ + "filename": "Netflix_Crosswalk_1280x720_10bit_60Hz_P422_yuv422p10be_To_yuv422YCBCR10be.yuv", + "file_format": "YUV422RFC4175PG2BE10", + "format": "YUV_422_10bit", + "width": 1280, + "height": 720, + "fps": "60", + }, + i1080p25={ + "filename": "HDR_BBC_v4_008_Penguin1_1920x1080_10bit_25Hz_P422_180frames.yuv", + "file_format": "YUV422PLANAR10LE", + "format": "YUV_422_10bit", + "fps": "25", + "width": 1920, + "height": 1080, + }, + i1080p60={ + "filename": "Netflix_Crosswalk_1920x1080_10bit_60Hz_P422_yuv422p10be_To_yuv422YCBCR10be.yuv", + "file_format": "YUV422RFC4175PG2BE10", + "format": "YUV_422_10bit", + "width": 1920, + "height": 1080, + "fps": "60", + }, + i2160p119={ + "filename": "Netflix_Crosswalk_3840x2160_10bit_60Hz_P422_To_yuv422p10be_To_yuv422YCBCR10be.yuv", + "file_format": "YUV422RFC4175PG2BE10", + "format": "YUV_422_10bit", + "width": 3840, + "height": 2160, + "fps": "11988/100", + }, + i2160p30={ + "filename": "Plalaedit_Pedestrian_10bit_3840x2160_30Hz_P420_To_yuv422p10be_To_yuv422YCBCR10be.yuv", + "file_format": "YUV422RFC4175PG2BE10", + "format": "YUV_422_10bit", + "width": 3840, + "height": 2160, + "fps": "30", + }, + i2160p60={ + "filename": "Netflix_Crosswalk_3840x2160_10bit_60Hz_P422_To_yuv422p10be_To_yuv422YCBCR10be.yuv", + "file_format": "YUV422RFC4175PG2BE10", + "format": "YUV_422_10bit", + "width": 3840, + "height": 2160, + "fps": "60", + }, +) diff --git a/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py b/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py new file mode 100755 index 000000000..adf59c537 --- /dev/null +++ b/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py @@ -0,0 +1,87 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright(c) 2024-2025 Intel Corporation + +import os +import pytest +import logging +import subprocess +import glob +import re +import time +from mtl_engine import ffmpeg_app +from mtl_engine.media_files import yuv_files_end_to_end +from mtl_engine.ffmpeg_app import check_latency_from_script +from mtl_engine.ffmpeg_app import cleanup_output_files + + +# Parametrize test for different video formats and time multipliers +@pytest.mark.parametrize( + "video_format,test_time_multiplier,expected_latency", + [ + ("i1080p25", 1, 1620), + ("i1080p60", 1, 1660), + ("i720p25", 1, 1620), + ("i720p60", 1, 1620), + ("i2160p119", 1, 1670), + ("i2160p30", 1, 1660), + ("i2160p60", 1, 1660), + ], +) +# Parametrize test for different output formats +@pytest.mark.parametrize("output_format", ["mp4"]) +def test_ffmpeg_end_to_end_latency( + hosts, + test_time, + build, + media, + nic_port_list, + video_format, + test_time_multiplier, + output_format, + expected_latency, +): + # Select the first host from the hosts dictionary + host = list(hosts.values()) + + + # Cleanup before test + cleanup_pattern = f"{build}/tests/test_ffmpeg_end_to_end_latency_*_out_0.{output_format}" + cleanup_output_files(cleanup_pattern) + time.sleep(1) + + # Get the video file info for the selected video format + video_file = yuv_files_end_to_end[video_format] + + # Run the main FFmpeg test (TX and RX pipelines) + ffmpeg_app.execute_test_latency_single_or_dual( + test_time=test_time * test_time_multiplier, + build=build, + hosts=hosts, + type_="frame", + video_format=video_format, + pg_format=video_file["format"], + video_url=os.path.join(media, video_file["filename"]), + output_format=output_format, + dual=False, + ) + + # Find the output file generated by the test + pattern = f"{build}/tests/test_ffmpeg_end_to_end_latency_*_out_0.{output_format}" + recv_files = glob.glob(pattern) + if not recv_files: + raise FileNotFoundError(f"No output file found matching: {pattern}") + # Select the most recently modified output file + recv_file = max(recv_files, key=os.path.getmtime) + + # Path for the latency measurement output image + latency_jpg = f"{build}/tests/ffmpeg_latency_{video_format}_latency.jpg" + script_path = f"{build}/tests/tools/latency_measurement/text_detection.py" + + # Run the latency measurement script and check if latency is within expected bounds + check_latency_from_script(script_path, recv_file, latency_jpg, expected_latency, host=list(hosts.values())[0]) + + # Cleanup after test + cleanup_pattern = f"{build}/tests/test_ffmpeg_end_to_end_latency_*_out_0.{output_format}" + cleanup_output_files(cleanup_pattern) + + time.sleep(5) From 99a4f733cc691e62abf855542cdedd1ef83fc30b Mon Sep 17 00:00:00 2001 From: Your Name Date: Mon, 28 Jul 2025 09:42:54 +0200 Subject: [PATCH 2/8] Fix linting errors --- tests/validation/mtl_engine/ffmpeg_app.py | 4 ++-- .../single/ffmpeg/test_ffmpeg_end_to_end_latency.py | 9 +-------- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/tests/validation/mtl_engine/ffmpeg_app.py b/tests/validation/mtl_engine/ffmpeg_app.py index f4f498725..e6457cb6e 100755 --- a/tests/validation/mtl_engine/ffmpeg_app.py +++ b/tests/validation/mtl_engine/ffmpeg_app.py @@ -1138,8 +1138,8 @@ def execute_test_latency_single_or_dual( "x=10:y=10:fontcolor=white:box=1:boxcolor=black:boxborderw=10" ) - rx_vf = f' -vf "{drawtext_rx}" ' - tx_vf = f' -vf "{drawtext_tx}" ' + rx_vf = f' -vf "{drawtext_rx}"' + tx_vf = f' -vf "{drawtext_tx}"' output_files = create_empty_output_files(output_format, 1, rx_host, build) rx_output_opts = "" diff --git a/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py b/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py index adf59c537..fe1a4da51 100755 --- a/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py +++ b/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py @@ -3,15 +3,11 @@ import os import pytest -import logging -import subprocess import glob -import re import time from mtl_engine import ffmpeg_app +from mtl_engine.ffmpeg_app import check_latency_from_script, cleanup_output_files from mtl_engine.media_files import yuv_files_end_to_end -from mtl_engine.ffmpeg_app import check_latency_from_script -from mtl_engine.ffmpeg_app import cleanup_output_files # Parametrize test for different video formats and time multipliers @@ -40,9 +36,6 @@ def test_ffmpeg_end_to_end_latency( output_format, expected_latency, ): - # Select the first host from the hosts dictionary - host = list(hosts.values()) - # Cleanup before test cleanup_pattern = f"{build}/tests/test_ffmpeg_end_to_end_latency_*_out_0.{output_format}" From 44d9b2f00d79a18821c6d58dbf613a885dd0ccad Mon Sep 17 00:00:00 2001 From: Your Name Date: Mon, 28 Jul 2025 10:13:52 +0200 Subject: [PATCH 3/8] lint --- tests/validation/mtl_engine/ffmpeg_app.py | 50 ++++++++++++------- .../ffmpeg/test_ffmpeg_end_to_end_latency.py | 23 ++++++--- 2 files changed, 48 insertions(+), 25 deletions(-) diff --git a/tests/validation/mtl_engine/ffmpeg_app.py b/tests/validation/mtl_engine/ffmpeg_app.py index e6457cb6e..dee050fcd 100755 --- a/tests/validation/mtl_engine/ffmpeg_app.py +++ b/tests/validation/mtl_engine/ffmpeg_app.py @@ -739,6 +739,7 @@ def check_output_rgb24(rx_output: str, number_of_sessions: int): return ok_cnt == number_of_sessions + def check_output_video_mp4(output_file: str, video_size: str, host, build: str): # Check output file size try: @@ -753,7 +754,9 @@ def check_output_video_mp4(output_file: str, video_size: str, host, build: str): ) else: logger.info(f"Could not get output file size for {output_file}") - log_to_file(f"Could not get output file size for {output_file}", host, build) + log_to_file( + f"Could not get output file size for {output_file}", host, build + ) return False except Exception as e: logger.info(f"Error checking output file size: {e}") @@ -761,9 +764,7 @@ def check_output_video_mp4(output_file: str, video_size: str, host, build: str): return False # Use ffprobe to check for a video stream and resolution - ffprobe_proc = run( - f"ffprobe -v error -show_streams {output_file}", host=host - ) + ffprobe_proc = run(f"ffprobe -v error -show_streams {output_file}", host=host) codec_name_match = re.search(r"codec_name=([^\n]+)", ffprobe_proc.stdout_text) width_match = re.search(r"width=(\d+)", ffprobe_proc.stdout_text) @@ -788,6 +789,7 @@ def check_output_video_mp4(output_file: str, video_size: str, host, build: str): log_to_file("MP4 check failed", host, build) return False + def create_empty_output_files( output_format: str, number_of_files: int = 1, host=None, build: str = "" ) -> list: @@ -1038,14 +1040,16 @@ def decode_video_format_to_st20p(video_format: str) -> tuple: log_fail(f"Invalid video format: {video_format}") return None -def check_latency_from_script(script_path, recv_file, latency_jpg, expected_latency, host): - """ - Runs the latency measurement script and checks if the measured latency is within expectation. - Returns True if passed, False if failed. - """ + +def check_latency_from_script( + script_path, recv_file, latency_jpg, expected_latency, host +): + # Runs the latency measurement script and checks if the measured latency is within expectation. + # Returns True if passed, False if failed. + logger.info("Installing all dependencies for script...") - #run("python3 -m pip install opencv-python matplotlib pytesseract", host=host, enable_sudo=True) - + # run("python3 -m pip install opencv-python matplotlib pytesseract", host=host, enable_sudo=True) + logger.info("Checking the end-to-end latency...") script_cmd = f"python3 {script_path} {recv_file} {latency_jpg}" result = run(script_cmd, host=host, enable_sudo=True) @@ -1060,10 +1064,14 @@ def check_latency_from_script(script_path, recv_file, latency_jpg, expected_late avg_latency_ms = float(match.group(1)) logger.info(f"Extracted average latency: {avg_latency_ms} ms") if avg_latency_ms <= expected_latency: - logger.info(f"Test passed: average latency {avg_latency_ms} ms is within expected {expected_latency} ms") + logger.info( + f"Test passed: average latency {avg_latency_ms} ms is within expected {expected_latency} ms" + ) passed = True else: - log_fail(f"Test failed: average latency {avg_latency_ms} ms exceeds expected {expected_latency} ms") + log_fail( + f"Test failed: average latency {avg_latency_ms} ms exceeds expected {expected_latency} ms" + ) passed = False else: log_fail("Could not extract average latency from script output.") @@ -1073,6 +1081,7 @@ def check_latency_from_script(script_path, recv_file, latency_jpg, expected_late log_fail("test failed") return passed + def cleanup_output_files(cleanup_pattern): """ Removes all files matching the given glob pattern. @@ -1084,17 +1093,22 @@ def cleanup_output_files(cleanup_pattern): output_files = glob.glob(cleanup_pattern) if not output_files: - logging.info(f"No output files found for cleanup with pattern: {cleanup_pattern}") + logging.info( + f"No output files found for cleanup with pattern: {cleanup_pattern}" + ) for output_file in output_files: try: if os.path.exists(output_file): os.remove(output_file) logging.info(f"Removed output file: {output_file}") else: - logging.info(f"Output file already removed or does not exist: {output_file}") + logging.info( + f"Output file already removed or does not exist: {output_file}" + ) except Exception as file_exc: logging.warning(f"Could not remove output file {output_file}: {file_exc}") + def execute_test_latency_single_or_dual( test_time: int, build: str, @@ -1164,7 +1178,7 @@ def execute_test_latency_single_or_dual( # TX command with drawtext filter and readrate tx_fps_filter = "" - readrate = f" -readrate {(fps/25)/2} " # Reduce readrate by half to simulate sending from partially empty buffers + readrate = f" -readrate {(fps/25)/2} " # Reduce readrate by half to simulate sending from partially empty buffers tx_cmd = ( f"ffmpeg -video_size {video_size} -f rawvideo{readrate} -pix_fmt yuv422p10le " f"-i {video_url} {tx_vf}{tx_fps_filter} -p_port {tx_nic_port_list[1]} " @@ -1227,9 +1241,7 @@ def execute_test_latency_single_or_dual( output_files[0], video_size, rx_host, build, video_url ) case "mp4": - passed = check_output_video_mp4( - output_files[0], video_size, rx_host, build - ) + passed = check_output_video_mp4(output_files[0], video_size, rx_host, build) if not passed: log_fail("test failed") diff --git a/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py b/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py index fe1a4da51..a0d4ed531 100755 --- a/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py +++ b/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py @@ -1,10 +1,11 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright(c) 2024-2025 Intel Corporation -import os -import pytest import glob +import os import time + +import pytest from mtl_engine import ffmpeg_app from mtl_engine.ffmpeg_app import check_latency_from_script, cleanup_output_files from mtl_engine.media_files import yuv_files_end_to_end @@ -38,7 +39,9 @@ def test_ffmpeg_end_to_end_latency( ): # Cleanup before test - cleanup_pattern = f"{build}/tests/test_ffmpeg_end_to_end_latency_*_out_0.{output_format}" + cleanup_pattern = ( + f"{build}/tests/test_ffmpeg_end_to_end_latency_*_out_0.{output_format}" + ) cleanup_output_files(cleanup_pattern) time.sleep(1) @@ -69,12 +72,20 @@ def test_ffmpeg_end_to_end_latency( # Path for the latency measurement output image latency_jpg = f"{build}/tests/ffmpeg_latency_{video_format}_latency.jpg" script_path = f"{build}/tests/tools/latency_measurement/text_detection.py" - + # Run the latency measurement script and check if latency is within expected bounds - check_latency_from_script(script_path, recv_file, latency_jpg, expected_latency, host=list(hosts.values())[0]) + check_latency_from_script( + script_path, + recv_file, + latency_jpg, + expected_latency, + host=list(hosts.values())[0], + ) # Cleanup after test - cleanup_pattern = f"{build}/tests/test_ffmpeg_end_to_end_latency_*_out_0.{output_format}" + cleanup_pattern = ( + f"{build}/tests/test_ffmpeg_end_to_end_latency_*_out_0.{output_format}" + ) cleanup_output_files(cleanup_pattern) time.sleep(5) From 59a60490025a0775244c54dfa75f35783379784b Mon Sep 17 00:00:00 2001 From: Your Name Date: Mon, 28 Jul 2025 10:21:30 +0200 Subject: [PATCH 4/8] linter --- tests/validation/mtl_engine/ffmpeg_app.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/validation/mtl_engine/ffmpeg_app.py b/tests/validation/mtl_engine/ffmpeg_app.py index dee050fcd..66afdf397 100755 --- a/tests/validation/mtl_engine/ffmpeg_app.py +++ b/tests/validation/mtl_engine/ffmpeg_app.py @@ -1088,8 +1088,8 @@ def cleanup_output_files(cleanup_pattern): Logs actions and errors. """ import glob - import os import logging + import os output_files = glob.glob(cleanup_pattern) if not output_files: @@ -1163,7 +1163,7 @@ def execute_test_latency_single_or_dual( if output_format == "yuv": rx_output_opts = f" -f rawvideo -pix_fmt yuv422p10le -video_size {video_size}" elif output_format == "mp4": - rx_output_opts = f" -vcodec mpeg4 -qscale:v 3 " + rx_output_opts = " -vcodec mpeg4 -qscale:v 3 " # RX command with drawtext filter rx_cmd = ( From 01fe433c3452ccbae47c4c1e48eba268596ceac4 Mon Sep 17 00:00:00 2001 From: Your Name Date: Tue, 5 Aug 2025 12:25:08 +0200 Subject: [PATCH 5/8] Move cleanup output files to conftest --- tests/validation/conftest.py | 29 +++++++++++++++++++ tests/validation/mtl_engine/ffmpeg_app.py | 27 ----------------- .../ffmpeg/test_ffmpeg_end_to_end_latency.py | 7 ++--- 3 files changed, 32 insertions(+), 31 deletions(-) diff --git a/tests/validation/conftest.py b/tests/validation/conftest.py index 3b0761bcc..ef10bfdab 100755 --- a/tests/validation/conftest.py +++ b/tests/validation/conftest.py @@ -2,6 +2,7 @@ # # Copyright 2024-2025 Intel Corporation # # Media Communications Mesh import datetime +import glob import logging import os import shutil @@ -256,3 +257,31 @@ def log_case(request, caplog: pytest.LogCaptureFixture): ) clear_result_note() + + +@pytest.fixture +def cleanup_output_files(): + """ + Fixture to remove all files matching a given glob pattern. + Usage: cleanup_output_files_fixture(pattern) + """ + + def _cleanup_output(cleanup_pattern): + output_files = glob.glob(cleanup_pattern) + if not output_files: + logging.info( + f"No output files found for cleanup with pattern: {cleanup_pattern}" + ) + for output_file in output_files: + try: + if os.path.exists(output_file): + os.remove(output_file) + logging.info(f"Removed output file: {output_file}") + else: + logging.info( + f"Output file already removed or does not exist: {output_file}" + ) + except Exception as file_exc: + logging.warning(f"Could not remove output file {output_file}: {file_exc}") + + return _cleanup_output \ No newline at end of file diff --git a/tests/validation/mtl_engine/ffmpeg_app.py b/tests/validation/mtl_engine/ffmpeg_app.py index 66afdf397..2c1692cce 100755 --- a/tests/validation/mtl_engine/ffmpeg_app.py +++ b/tests/validation/mtl_engine/ffmpeg_app.py @@ -1082,33 +1082,6 @@ def check_latency_from_script( return passed -def cleanup_output_files(cleanup_pattern): - """ - Removes all files matching the given glob pattern. - Logs actions and errors. - """ - import glob - import logging - import os - - output_files = glob.glob(cleanup_pattern) - if not output_files: - logging.info( - f"No output files found for cleanup with pattern: {cleanup_pattern}" - ) - for output_file in output_files: - try: - if os.path.exists(output_file): - os.remove(output_file) - logging.info(f"Removed output file: {output_file}") - else: - logging.info( - f"Output file already removed or does not exist: {output_file}" - ) - except Exception as file_exc: - logging.warning(f"Could not remove output file {output_file}: {file_exc}") - - def execute_test_latency_single_or_dual( test_time: int, build: str, diff --git a/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py b/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py index a0d4ed531..6da9d88a4 100755 --- a/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py +++ b/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py @@ -7,6 +7,7 @@ import pytest from mtl_engine import ffmpeg_app +from mtl_engine.const import LOG_FOLDER from mtl_engine.ffmpeg_app import check_latency_from_script, cleanup_output_files from mtl_engine.media_files import yuv_files_end_to_end @@ -36,6 +37,7 @@ def test_ffmpeg_end_to_end_latency( test_time_multiplier, output_format, expected_latency, + cleanup_output_files, ): # Cleanup before test @@ -70,7 +72,7 @@ def test_ffmpeg_end_to_end_latency( recv_file = max(recv_files, key=os.path.getmtime) # Path for the latency measurement output image - latency_jpg = f"{build}/tests/ffmpeg_latency_{video_format}_latency.jpg" + latency_jpg = f"{LOG_FOLDER}/ffmpeg_latency_{video_format}_latency.jpg" script_path = f"{build}/tests/tools/latency_measurement/text_detection.py" # Run the latency measurement script and check if latency is within expected bounds @@ -83,9 +85,6 @@ def test_ffmpeg_end_to_end_latency( ) # Cleanup after test - cleanup_pattern = ( - f"{build}/tests/test_ffmpeg_end_to_end_latency_*_out_0.{output_format}" - ) cleanup_output_files(cleanup_pattern) time.sleep(5) From c4ca1f3787c0946a6cb6a116626761ed319ac7ed Mon Sep 17 00:00:00 2001 From: Your Name Date: Tue, 5 Aug 2025 12:26:32 +0200 Subject: [PATCH 6/8] added blank line at the end --- tests/validation/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/validation/conftest.py b/tests/validation/conftest.py index ef10bfdab..dd39daeb9 100755 --- a/tests/validation/conftest.py +++ b/tests/validation/conftest.py @@ -284,4 +284,4 @@ def _cleanup_output(cleanup_pattern): except Exception as file_exc: logging.warning(f"Could not remove output file {output_file}: {file_exc}") - return _cleanup_output \ No newline at end of file + return _cleanup_output From 66e6ddba114e5a2cd2910f2db8a39e1f10657f37 Mon Sep 17 00:00:00 2001 From: Your Name Date: Tue, 5 Aug 2025 12:32:39 +0200 Subject: [PATCH 7/8] Removed unused mtl_engine.ffmpeg_app.cleanup_output_files --- .../tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py b/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py index 6da9d88a4..9b769e3d7 100755 --- a/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py +++ b/tests/validation/tests/single/ffmpeg/test_ffmpeg_end_to_end_latency.py @@ -8,7 +8,7 @@ import pytest from mtl_engine import ffmpeg_app from mtl_engine.const import LOG_FOLDER -from mtl_engine.ffmpeg_app import check_latency_from_script, cleanup_output_files +from mtl_engine.ffmpeg_app import check_latency_from_script from mtl_engine.media_files import yuv_files_end_to_end From 6efa663979b22436eb89f2c855528a347939cce7 Mon Sep 17 00:00:00 2001 From: Your Name Date: Tue, 5 Aug 2025 12:36:51 +0200 Subject: [PATCH 8/8] reformatted --- tests/validation/conftest.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/validation/conftest.py b/tests/validation/conftest.py index dd39daeb9..d3ca7fb20 100755 --- a/tests/validation/conftest.py +++ b/tests/validation/conftest.py @@ -282,6 +282,8 @@ def _cleanup_output(cleanup_pattern): f"Output file already removed or does not exist: {output_file}" ) except Exception as file_exc: - logging.warning(f"Could not remove output file {output_file}: {file_exc}") + logging.warning( + f"Could not remove output file {output_file}: {file_exc}" + ) return _cleanup_output