From 7b92fb60731540f0a934e28e5f92ecfe857aacae Mon Sep 17 00:00:00 2001 From: Artur Date: Fri, 10 Oct 2025 21:08:12 +0300 Subject: [PATCH] [autotest] docker execution --- .gitignore | 7 +- PyScripts/extra.py | 17 +++ PyScripts/gstreamerAutotest.py | 184 ++++++++++++++++++++++----------- PyScripts/latencyParse.py | 30 ++++-- PyScripts/qa.py | 5 + 5 files changed, 172 insertions(+), 71 deletions(-) create mode 100644 PyScripts/extra.py diff --git a/.gitignore b/.gitignore index 192c459..c92c6b2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,7 @@ *.log -container/Drivers/* \ No newline at end of file +container/Drivers/* +__pycache__ +*.yuv +*.mp4 +*.csv +*log*txt \ No newline at end of file diff --git a/PyScripts/extra.py b/PyScripts/extra.py new file mode 100644 index 0000000..2a1bd86 --- /dev/null +++ b/PyScripts/extra.py @@ -0,0 +1,17 @@ +from functools import wraps + +def log_args_decorator(func): + """ + A decorator that logs the arguments passed to a function. + """ + @wraps(func) + def wrapper(*args, **kwargs): + arg_names = func.__code__.co_varnames[:func.__code__.co_argcount] + pos_args = dict(zip(arg_names, args)) + all_args = {**pos_args, **kwargs} + + print(f"Calling function '{func.__name__}' with arguments: {all_args}") + result = func(*args, **kwargs) + print(f"Function '{func.__name__}' returned: {result}") + return result + return wrapper \ No newline at end of file diff --git a/PyScripts/gstreamerAutotest.py b/PyScripts/gstreamerAutotest.py index f7b3564..bc82f27 100644 --- a/PyScripts/gstreamerAutotest.py +++ b/PyScripts/gstreamerAutotest.py @@ -2,17 +2,19 @@ from itertools import product import qa from latencyParse import getLatencyTable +import os, stat, subprocess +import pandas as pd +from extra import log_args_decorator options = { "x264enc": { "bitrate": ["10000", "20000", "5000"], "speed-preset": ["ultrafast", "fast", "medium"], "tune": ["zerolatency"], - "slices-threads": ["true", "false"], + "sliced-threads": ["true", "false"], "b-adapt": ["true", "false"], "rc-lookahead": ["40", "0"], "ref": ["3", "0"] - }, "nvh264enc": { "bitrate": ["10000", "20000", "5000"], @@ -20,43 +22,70 @@ options = { "rc-lookahead": ["0"], "rc-mode": ["2", "0", "5"], "zerolatency": ["true", "false"], - + }, + "nvv4l2h264enc": { + "bitrate": ["10000000", "20000000", "5000000"], + "profile": ["0", "1", "2"], + "preset-id": ["1", "2", "3"], + "control-rate": ["1", "2"], + "tuning-info-id": ["4", "2", "3"] } - # , - # "nvv4l2h264enc": { - # "bitrate": ["10000000", "20000000", "5000000"], - # "profile": ["0", "1", "2"], - # "preset-id": ["1", "2", "3"], - # "control-id": ["1", "2"], - # "tuning-info-id": ["4", "2"] - # } } -videos = [""] +videos = { + "base-daVinci": "./base-daVinci-stereo-left-10.yuv" +} testsource = "videotestsrc pattern=smpte" -videosrc = ["filesrc location=", "! qtdemux ! h264parse ! avdec_h264"] + +videosrc = { + "raw":["filesrc location=", " ! rawvideoparse "], + "h264": ["filesrc location=", " ! decodebin"] + } psnr_check = { "x264enc": "-pixel_format yuv420p -color_range pc", "nvh264enc": "-pixel_format nv12 -color_range tv", - "nvv4l2h264enc": "-pixel_format yuv420p -color_range tv" + "nvv4l2h264enc": "-pixel_format nv12 -color_range tv" } +with_docker = [ "nvv4l2h264enc" ] + formats = { "x264enc": "I420", "nvh264enc": "NV12", - "nvv4l2h264enc": "I420" + "nvv4l2h264enc": "NV12" } profiles = ["baseline", "main"] +encoder_prefix = { + "nvv4l2h264enc": " nvvideoconvert !", + "nvh264enc": "", + "x264enc": "" +} + video_info = { - "video1":"-video_size 1920x1080 -framerate 23.98" + "video1":"-video_size 1920x1080 -framerate 23.98", + "sample-surgery":"-video_size 1280x720 -framerate 29.97", + "base-daVinci": "-video_size 1280x720 -framerate 59.94" +} +gst_video_info = { + "video1":"format=I420,height=1080,width=1920,framerate=24000/1001", + "base-daVinci": "format=2 height=720 width=1280 colorimetry=bt601 framerate=60000/1001" } latency_filename = "latency-traces-autotest.log" +# Step-by-step: +# 1. Generate all combinations for each encoder +# 2. For each combination, create a GStreamer pipeline string +# 3. Start each pipeline with latency tracing enabled +# 3.1 Monitor CPU, GPU and memory usage during each pipeline run (nah, later, maybe) +# 4. Start latency parsing script after each pipeline and store results in a pandas dataframe: +# - two key columns: encoder name, parameters string +# 5. Run PSNR check after each pipeline and add results in the dataframe +# 6. Save dataframe to CSV file class Pipeline: def __init__(self): self.pipeline = "gst-launch-1.0 -e " @@ -72,33 +101,34 @@ class Pipeline: return self def add_source(self, source): - self.pipeline += source + " ! videoconvert ! " + self.pipeline += source + " ! clocksync sync-to-first=true ! videoconvert ! " return self def __add_tee(self, encoder): self.pipeline += "capsfilter caps=video/x-raw,format=" + formats[encoder] + " ! " - self.pipeline += "tee name=t t. ! queue ! filesink location=\"base-autotest.yuv\" " + self.pipeline += "tee name=t t. ! queue max-size-time=5000000000 max-size-bytes=100485760 max-size-buffers=1000 ! filesink location=\"base-autotest.yuv\" " def add_encoder(self, encoder, params): self.__add_tee(encoder) self.options += " ".join(params) + " " - self.pipeline += "t. ! queue ! " + self.pipeline += "t. ! queue max-size-time=5000000000 max-size-bytes=100485760 max-size-buffers=1000 ! " + self.pipeline += encoder_prefix[encoder] self.pipeline += encoder + " " self.pipeline += " ".join(params) + " " return self def add_profile(self, profile): - self.pipeline += "capsfilter caps=\"video/x-h264,profile=" + profile + "\" ! " + self.pipeline += "! capsfilter caps=\"video/x-h264,profile=" + profile + "\" ! " self.options += "profile=" + profile + " " return self def to_file(self, filename): - self.pipeline += "h264parse ! mp4mux ! filesink location=\"" + filename + "\"" + self.pipeline += "h264parse ! mpegtsmux ! filesink location=\"" + filename + "\"" return self -def makeVideoSrc(idx): - return videosrc[0] + videos[idx] + videosrc[1] +def makeVideoSrc(videoName): + return videosrc["raw"][0] + videos[videoName] + videosrc["raw"][1] + gst_video_info[videoName] def generateEncoderStrings(): @@ -133,45 +163,85 @@ def generate_combinations(config_dict): return combinations -# Step-by-step: -# 1. Generate all combinations for each encoder -# 2. For each combination, create a GStreamer pipeline string -# 3. Start each pipeline with latency tracing enabled -# 3.1 Monitor CPU, GPU and memory usage during each pipeline run (nah, later, maybe) -# 4. Start latency parsing script after each pipeline and store results in a pandas dataframe: -# - two key columns: encoder name, parameters string -# 5. Run PSNR check after each pipeline and add results in the dataframe -# 6. Save dataframe to CSV file -import pandas as pd - qualityDataframe = pd.DataFrame() latencyDataframe = pd.DataFrame() +dockerRunString = "sudo -S docker container exec deepstream-gst bash" -def run_pipeline(pipeline): - import subprocess - print("Running pipeline:") - print(pipeline) - with open("pipeline-log.txt", "w") as f: - proc = subprocess.run(pipeline, shell=True, stdout=f, stderr=subprocess.STDOUT, text=True) - print(f"Pipeline finished with return code: {proc.returncode}") - with open("pipeline-log.txt", "r") as f: +def execPermissions(scriptFile = "to_exec.sh"): + current_permissions = os.stat(scriptFile).st_mode + new_permissions = current_permissions | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH + os.chmod(scriptFile, new_permissions) + +def writeToExecFile(contents, file): + with open(file, "w") as f: + f.write(str(contents)) + execPermissions(file) + +def is_docker(func): + def wrapper(pipeline): + script_name = "to_exec.sh" + for encoder in with_docker: + if encoder in pipeline: + writeToExecFile(pipeline, script_name) + pipeline = dockerRunString + f" {script_name}" + + func(pipeline) + return wrapper + +def is_sudo(pipeline): + if pipeline.startswith("sudo"): + return True + return False + +def passwordAuth(proc): + password = os.getenv("UAUTH") + if password is not None: + proc.communicate(password) + +def printLog(file): + with open(file, "r") as f: out = f.read() - print(out) + print(out) + +@is_docker +@log_args_decorator +def run_pipeline(pipeline): + logfile = "pipeline-log.txt" + with open(logfile, "w") as f: + proc = subprocess.Popen(pipeline, shell=True, + stdin=subprocess.PIPE, stdout=f, + stderr=subprocess.STDOUT, text=True) + if is_sudo(pipeline): + passwordAuth(proc) + code = proc.wait() + printLog(logfile) if proc.returncode != 0: raise Exception("Pipeline failed, see log for details") +def time_trace(func): + def wrapper(): + import time + start_time = time.time() + func() + end_time = time.time() + elapsed_time = end_time - start_time + print(f"Total execution time: {elapsed_time} seconds") + return wrapper + +@time_trace def run_autotest(): - global qualityDataframe, latencyDataframe encoders = generateEncoderStrings() for encoder, combinations in encoders.items(): + qualityDataframe = pd.DataFrame() + latencyDataframe = pd.DataFrame() for params in combinations: for profile in profiles: - for idx in range(len(videos)): - filename = "autotest-" + encoder + "-" + profile + "-test-" + str(idx) + ".mp4" + for videoName, videoPath in videos.items(): + filename = "autotest-" + encoder + "-" + profile + "-test-" + videoName + ".mp4" pipeline = Pipeline() pipeline = ( pipeline.add_tracing() - .add_source(makeVideoSrc(idx)) + .add_source(makeVideoSrc(videoName)) .add_encoder(encoder, params.split(" ")) .add_profile(profile) .to_file(filename) @@ -183,18 +253,18 @@ def run_autotest(): print(f"Error occurred: {e}") continue psnr_metrics, ssim_metrics = qa.run_quality_check( - videos[idx], + videoPath, filename, - video_info[videos[idx]] + " " + psnr_check[encoder] + video_info[videoName] + " " + psnr_check[encoder] ) dfPsnr = qa.parse_quality_report(psnr_metrics, ssim_metrics) print("-----") dfLatency = getLatencyTable(latency_filename) columnsQ = pd.MultiIndex.from_tuples( - [(encoder, profile, params, col) for col in dfPsnr.columns] + [(encoder, profile, videoName, params, col) for col in dfPsnr.columns] ) columnsLatency = pd.MultiIndex.from_tuples( - [(encoder, profile, params, col) for col in dfLatency.columns] + [(encoder, profile, videoName, params, col) for col in dfLatency.columns] ) dfPsnr.columns = columnsQ dfLatency.columns = columnsLatency @@ -204,13 +274,7 @@ def run_autotest(): print("Current results:") print(dfPsnr) print(dfLatency) + qualityDataframe.to_csv(f"qualityResults{encoder}.csv") + latencyDataframe.to_csv(f"latencyDataframe{encoder}.csv") -def run_timetracer(): - import time - start_time = time.time() - run_autotest() - end_time = time.time() - elapsed_time = end_time - start_time - print(f"Total execution time: {elapsed_time} seconds") - -run_timetracer() \ No newline at end of file +run_autotest() \ No newline at end of file diff --git a/PyScripts/latencyParse.py b/PyScripts/latencyParse.py index 77aeb13..ab74639 100644 --- a/PyScripts/latencyParse.py +++ b/PyScripts/latencyParse.py @@ -4,16 +4,20 @@ import numpy as np # Idea is next: # on set of experiments we are calculating all latency information -> each element avg, std, max numbers, total is not calculated, because it requires # additional parsing for parallel branches (from tee) -# Ideally we would write data to table +# Ideally we would write data to table idxCache = dict() + def findWord(words, wordToSearch): global idxCache if wordToSearch in idxCache: for idx in idxCache[wordToSearch]: - if words[idx].startswith(wordToSearch): + if idx < len(words) and words[idx].startswith(wordToSearch): return words[idx] + else: + if idx >= len(words): + print(f"ERROR: trying to access index={idx} while: {words}") for word in words: if word.startswith(wordToSearch): idx = words.index(word) @@ -24,9 +28,11 @@ def findWord(words, wordToSearch): return "" # taken with love from GStreamerLatencyPlotter implementation + + def readAndParse(filename): result = dict() - + global idxCache with open(filename, "r") as latencyFile: lines = latencyFile.readlines() for line in lines: @@ -35,12 +41,12 @@ def readAndParse(filename): words = line.split() if not words[len(words) - 1].startswith("ts="): continue - + def findAndRemove(wordToSearch): res = findWord(words, wordToSearch) res = res[res.find(")") + 1:len(res) - 1] return res - + name = findWord(words, "element=(string)") if name == "": name = findWord(words, "src-element=(string)") @@ -49,12 +55,15 @@ def readAndParse(filename): src = findAndRemove("src=(string)") name = name[name.find(")") + 1:len(name) - 1] if name not in result: - result[name] = {"latency":[], "ts":[]} + result[name] = {"latency": [], "ts": []} timeWord = findAndRemove("time=(guint64)") tsWord = findAndRemove("ts=(guint64)") - result[name]["latency"].append(int(timeWord)/1e6) # time=(guint64)=14 - result[name]["ts"].append(int(tsWord)/1e9) # ts=(guint64)=12 + result[name]["latency"].append( + int(timeWord)/1e6) # time=(guint64)=14 + result[name]["ts"].append(int(tsWord)/1e9) # ts=(guint64)=12 + # drop cache for future runs + idxCache = dict() return result @@ -78,9 +87,10 @@ def getLatencyTable(filename): dt_max_latency[column] = dt df_dt_max = pd.Series(dt_max_latency) - resultDf = pd.concat([df_dt_max, max_latency, avg_latency, median_latency, std_latency], axis=1) + resultDf = pd.concat( + [df_dt_max, max_latency, avg_latency, median_latency, std_latency], axis=1) resultDf.columns = ['dTmax', 'max', 'avg', 'median', 'std'] print(resultDf) return resultDf -getLatencyTable("latency_traces-x264enc-kpop-test-10.log") \ No newline at end of file +# getLatencyTable("latency_traces-x264enc-kpop-test-10.log") diff --git a/PyScripts/qa.py b/PyScripts/qa.py index e686a4c..3f1e901 100644 --- a/PyScripts/qa.py +++ b/PyScripts/qa.py @@ -4,6 +4,8 @@ import pandas as pd def run_psnr_check(original, encoded, video_info): out = "" + # bad practice, but idgaf + # -f rawvideo {video_info} options = f"-f rawvideo {video_info} -i {original} -i {encoded} -filter_complex psnr -f null /dev/null" with open("ffmpeg-log.txt", "w") as f: proc = subprocess.run(["ffmpeg", *options.split()], stdout=f, stderr=subprocess.STDOUT, text=True) @@ -13,6 +15,9 @@ def run_psnr_check(original, encoded, video_info): return out def run_ssim_check(original, encoded, video_info): + # bad practice, but idgaf + # -f rawvideo {video_info} + # we don't need additional information with h264 encoded files options = f"-f rawvideo {video_info} -i {original} -i {encoded} -filter_complex ssim -f null /dev/null" with open("ffmpeg-log.txt", "w") as f: proc = subprocess.run(["ffmpeg", *options.split()], stdout=f, stderr=subprocess.STDOUT, text=True)