From 92c785a29a9948ebbe414e0bb4e3dc12898f980a Mon Sep 17 00:00:00 2001 From: Artur Mukhamadiev Date: Sun, 12 Oct 2025 17:34:51 +0300 Subject: [PATCH] [autotest] added sudo access check --- autotestConfig.py | 19 ++++--- extra.py | 40 +++++++++++++- gstreamerAutotest.py | 121 ++++++++++++++++++++++++++++--------------- requirements.txt | 2 + 4 files changed, 131 insertions(+), 51 deletions(-) mode change 100644 => 100755 gstreamerAutotest.py diff --git a/autotestConfig.py b/autotestConfig.py index 890f23d..3a36cc9 100644 --- a/autotestConfig.py +++ b/autotestConfig.py @@ -1,26 +1,27 @@ import yaml + class AutotestConfig: def __init__(self, path='autotest-conf.yaml'): with open(path, 'r') as file: self.data = yaml.safe_load(file) - + @property def options(self): return self.data["options"] - - @property + + @property def videos(self): return self.data["videos"] - + @property def testsource(self): return self.data["testsource"] - + @property def videosrc(self): return self.data["videosrc"] - + @property def psnr_check(self): return self.data.get("psnr_check", {}) @@ -59,4 +60,8 @@ class AutotestConfig: @property def docker_run_string(self): - return self.data["docker_run_string"] \ No newline at end of file + return self.data["docker_run_string"] + + @property + def results_dir(self): + return self.data.get("results_dir", "results/") diff --git a/extra.py b/extra.py index 2a1bd86..35e043f 100644 --- a/extra.py +++ b/extra.py @@ -1,5 +1,6 @@ from functools import wraps + def log_args_decorator(func): """ A decorator that logs the arguments passed to a function. @@ -14,4 +15,41 @@ def log_args_decorator(func): result = func(*args, **kwargs) print(f"Function '{func.__name__}' returned: {result}") return result - return wrapper \ No newline at end of file + return wrapper + + +def sudo_check(file): + import pipelineExec as pe + import subprocess + import logging + import emoji + import os + + uauth_pass = os.getenv("UAUTH") + if uauth_pass is None: + logging.fatal(emoji.emojize(f""" + :warning: Please, create .env file with UAUTH variable. Before running {file}. + UAUTH variable should match your password for sudo access to the docker. + example: + {os.getcwd()}$ cat .env + UAUTH=123 + """) + ) + raise Exception(emoji.emojize( + ":cross_mark: Password isn't set properly")) + else: + logging.debug(emoji.emojize(f":warning: pass:{uauth_pass} :warning:")) + + proc = subprocess.Popen('sudo -S ls', shell=True, + stdin=subprocess.PIPE, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, text=True) + pe.passwordAuth(proc) + code = proc.wait() + if proc.returncode != 0: + logging.fatal(emoji.emojize(f""" + :cross_mark: Password isn't correct in UAUTH + error={proc.returncode} + """)) + raise Exception(emoji.emojize(":cross_mark: Password isn't correct")) + + logging.info(emoji.emojize(":check_mark_button: Sudo access verified")) diff --git a/gstreamerAutotest.py b/gstreamerAutotest.py old mode 100644 new mode 100755 index 3e67040..0a12516 --- a/gstreamerAutotest.py +++ b/gstreamerAutotest.py @@ -2,16 +2,23 @@ from itertools import product import qa from latencyParse import getLatencyTable -import os, stat, subprocess +import os +import stat +import subprocess import pandas as pd -from extra import log_args_decorator +from extra import log_args_decorator, sudo_check from autotestConfig import AutotestConfig +from dotenv import load_dotenv +import emoji import logging # Configure logging to show informational messages -logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') +logging.basicConfig(level=logging.DEBUG, + format='%(asctime)s - %(levelname)s - %(message)s') config = None + + def get_config(): global config if config is None: @@ -27,6 +34,8 @@ def get_config(): # - two key columns: encoder name, parameters string # 5. Run PSNR check after each pipeline and add results in the dataframe # 6. Save dataframe to CSV file + + class Pipeline: def __init__(self): self.pipeline = "gst-launch-1.0 -e " @@ -47,23 +56,24 @@ class Pipeline: def __add_tee(self, encoder): pass - #self.pipeline += "tee name=t t. ! queue max-size-time=5000000000 max-size-bytes=100485760 max-size-buffers=1000 ! filesink location=\"base-autotest.yuv\" " + # self.pipeline += "tee name=t t. ! queue max-size-time=5000000000 max-size-bytes=100485760 max-size-buffers=1000 ! filesink location=\"base-autotest.yuv\" " def add_encoder(self, encoder, params): self.pipeline += get_config().videoconvert[encoder] + " ! " - self.pipeline += "capsfilter caps=video/x-raw,format=" + get_config().formats[encoder] + " ! " - #self.__add_tee(encoder) + self.pipeline += "capsfilter caps=video/x-raw,format=" + \ + get_config().formats[encoder] + " ! " + # self.__add_tee(encoder) self.options += " ".join(params) + " " - #self.pipeline += "t. ! queue max-size-time=5000000000 max-size-bytes=100485760 max-size-buffers=1000 ! " + # self.pipeline += "t. ! queue max-size-time=5000000000 max-size-bytes=100485760 max-size-buffers=1000 ! " self.pipeline += encoder + " " self.pipeline += " ".join(params) + " " return self - + def add_profile(self, profile): self.pipeline += "! capsfilter caps=\"video/x-h264,profile=" + profile + "\" ! " self.options += "profile=" + profile + " " return self - + def to_file(self, filename): self.pipeline += "h264parse ! mpegtsmux ! filesink location=\"" + filename + "\"" return self @@ -71,11 +81,11 @@ class Pipeline: def makeVideoSrc(videoName): return ( - get_config().videosrc["raw"][0] + - get_config().videos[videoName] + - get_config().videosrc["raw"][1] + + get_config().videosrc["raw"][0] + + get_config().videos[videoName] + + get_config().videosrc["raw"][1] + get_config().gst_video_info[videoName] - ) + ) def generateEncoderStrings(): @@ -110,19 +120,19 @@ def generate_combinations(config_dict): return combinations -qualityDataframe = pd.DataFrame() -latencyDataframe = pd.DataFrame() -def execPermissions(scriptFile = "to_exec.sh"): +def execPermissions(scriptFile="to_exec.sh"): current_permissions = os.stat(scriptFile).st_mode new_permissions = current_permissions | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH os.chmod(scriptFile, new_permissions) + def writeToExecFile(contents, file): with open(file, "w") as f: f.write(str(contents)) execPermissions(file) + def is_docker(func): def wrapper(pipeline): script_name = "to_exec.sh" @@ -130,39 +140,45 @@ def is_docker(func): if encoder in pipeline: writeToExecFile(pipeline, script_name) pipeline = get_config().docker_run_string + f" {script_name}" - + func(pipeline) return wrapper + def is_sudo(pipeline): if pipeline.startswith("sudo"): - return True + return True return False + def passwordAuth(proc): password = os.getenv("UAUTH") if password is not None: proc.communicate(password) + def printLog(file): with open(file, "r") as f: out = f.read() print(out) + @is_docker @log_args_decorator def run_pipeline(pipeline): logfile = "pipeline-log.txt" with open(logfile, "w") as f: - proc = subprocess.Popen(pipeline, shell=True, - stdin=subprocess.PIPE, stdout=f, - stderr=subprocess.STDOUT, text=True) + proc = subprocess.Popen(pipeline, shell=True, + stdin=subprocess.PIPE, stdout=f, + stderr=subprocess.STDOUT, text=True) if is_sudo(pipeline): - passwordAuth(proc) - code = proc.wait() + passwordAuth(proc) + code = proc.wait() printLog(logfile) if proc.returncode != 0: - raise Exception("Pipeline failed, see log for details") + raise Exception(emoji.emojize( + ":cross_mark: Pipeline failed, see log for details")) + def time_trace(func): def wrapper(): @@ -171,11 +187,16 @@ def time_trace(func): func() end_time = time.time() elapsed_time = end_time - start_time - print(f"Total execution time: {elapsed_time} seconds") + print(emoji.emojize( + f":alarm_clock: Total execution time: {elapsed_time} seconds")) return wrapper + @time_trace def run_autotest(): + qualityDataframe = pd.DataFrame() + latencyDataframe = pd.DataFrame() + encoders = generateEncoderStrings() for encoder, combinations in encoders.items(): qualityDataframe = pd.DataFrame() @@ -184,45 +205,59 @@ def run_autotest(): for profile in get_config().profiles: for videoName, videoPath in get_config().videos.items(): for _ in range(get_config().repeats): - filename = "autotest-" + encoder + "-" + profile + "-test-" + videoName + ".mp4" + filename = "autotest-" + encoder + "-" + \ + profile + "-test-" + videoName + ".mp4" pipeline = Pipeline() - pipeline = ( + pipeline = ( pipeline.add_tracing() .add_source(makeVideoSrc(videoName)) .add_encoder(encoder, params.split(" ")) .add_profile(profile) .to_file(filename) ) - print(pipeline.pipeline) + logging.debug(pipeline.pipeline) try: run_pipeline(pipeline.pipeline) except Exception as e: - print(f"Error occurred: {e}") + logging.error(emoji.emojize( + f":cross_mark: Error occurred: {e}")) continue psnr_metrics, ssim_metrics = qa.run_quality_check( - videoPath, - filename, - get_config().video_info[videoName] + " " + get_config().psnr_check[encoder] - ) - dfPsnr = qa.parse_quality_report(psnr_metrics, ssim_metrics) + videoPath, + filename, + get_config().video_info[videoName] + + " " + get_config().psnr_check[encoder] + ) + dfPsnr = qa.parse_quality_report( + psnr_metrics, ssim_metrics) print("-----") - dfLatency = getLatencyTable(get_config().latency_filename) + dfLatency = getLatencyTable( + get_config().latency_filename) columnsQ = pd.MultiIndex.from_tuples( - [(encoder, profile, videoName, params, col) for col in dfPsnr.columns] + [(encoder, profile, videoName, params, col) + for col in dfPsnr.columns] ) columnsLatency = pd.MultiIndex.from_tuples( - [(encoder, profile, videoName, params, col) for col in dfLatency.columns] + [(encoder, profile, videoName, params, col) + for col in dfLatency.columns] ) dfPsnr.columns = columnsQ dfLatency.columns = columnsLatency - qualityDataframe = pd.concat([qualityDataframe, dfPsnr], axis=1) - latencyDataframe = pd.concat([latencyDataframe, dfLatency], axis=1) - print("=====") + qualityDataframe = pd.concat( + [qualityDataframe, dfPsnr], axis=1) + latencyDataframe = pd.concat( + [latencyDataframe, dfLatency], axis=1) + print("="*50) print("Current results:") print(dfPsnr) print(dfLatency) - qualityDataframe.to_csv(f"qualityResults{encoder}.csv") - latencyDataframe.to_csv(f"latencyDataframe{encoder}.csv") + qualityDataframe.to_csv( + get_config().results_dir + f"qualityResults{encoder}.csv") + latencyDataframe.to_csv( + get_config().results_dir + f"latencyDataframe{encoder}.csv") + if __name__ == "__main__": - run_autotest() \ No newline at end of file + load_dotenv() + sudo_check(__file__) + run_autotest() diff --git a/requirements.txt b/requirements.txt index 0b770d5..3ef2559 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,6 @@ contourpy==1.3.3 cycler==0.12.1 +emoji==2.15.0 fonttools==4.60.1 kiwisolver==1.4.9 matplotlib==3.10.7 @@ -9,6 +10,7 @@ pandas==2.3.3 pillow==11.3.0 pyparsing==3.2.5 python-dateutil==2.9.0.post0 +python-dotenv==1.1.1 pytz==2025.2 PyYAML==6.0.3 six==1.17.0