[logging] moved fully to logging in gstreamerAutotest.log

This commit is contained in:
Artur Mukhamadiev 2025-10-12 18:28:19 +03:00
parent c10ee32918
commit 900aca9bd5
5 changed files with 41 additions and 32 deletions

View File

@ -1,20 +1,20 @@
repeats: 3
options:
# x264enc:
# bitrate: [ "10000", "20000", "5000" ]
# speed-preset: [ "ultrafast", "fast", "medium" ]
# tune: [ "zerolatency" ]
# sliced-threads: [ "true", "false" ]
# b-adapt: [ "true", "false" ]
# rc-lookahead: [ "40", "0" ]
# ref: [ "3", "0" ]
# nvh264enc:
# bitrate: [ "10000", "20000", "5000" ]
# preset: [ "4", "5", "1" ]
# rc-lookahead: [ "0" ]
# rc-mode: [ "2", "0", "5" ]
# zerolatency: [ "true", "false" ]
x264enc:
bitrate: [ "10000", "20000", "5000" ]
speed-preset: [ "ultrafast", "fast", "medium" ]
tune: [ "zerolatency" ]
sliced-threads: [ "true", "false" ]
b-adapt: [ "true", "false" ]
rc-lookahead: [ "40", "0" ]
ref: [ "3", "0" ]
nvh264enc:
bitrate: [ "10000", "20000", "5000" ]
preset: [ "4", "5", "1" ]
rc-lookahead: [ "0" ]
rc-mode: [ "2", "0", "5" ]
zerolatency: [ "true", "false" ]
nvv4l2h264enc:
bitrate: [ "10000000", "20000000", "5000000" ]
profile: [ "0", "1", "2" ]

View File

@ -1,4 +1,5 @@
from functools import wraps
import logging
def log_args_decorator(func):
@ -11,9 +12,10 @@ def log_args_decorator(func):
pos_args = dict(zip(arg_names, args))
all_args = {**pos_args, **kwargs}
print(f"Calling function '{func.__name__}' with arguments: {all_args}")
logging.debug(
f"Calling function '{func.__name__}' with arguments: {all_args}")
result = func(*args, **kwargs)
print(f"Function '{func.__name__}' returned: {result}")
logging.info(f"Function '{func.__name__}' returned: {result}")
return result
return wrapper

View File

@ -1,5 +1,6 @@
#!/usr/bin/python
from itertools import product
from tqdm import tqdm
import qa
from latencyParse import getLatencyTable
import os
@ -15,10 +16,12 @@ import logging
# Configure logging to show informational messages
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s - %(levelname)s - %(message)s')
format='%(asctime)s - %(levelname)s - %(message)s',
filename=f"{os.path.basename(__file__).split('.')[0]}.log")
config = None
def parse_args():
parser = argparse.ArgumentParser(prog=__file__)
parser.add_argument('-c', '--config',
@ -27,6 +30,7 @@ def parse_args():
help="Path to autotest configuration yaml file")
return parser.parse_args()
def get_config():
global config
if config is None:
@ -168,7 +172,7 @@ def passwordAuth(proc):
def printLog(file):
with open(file, "r") as f:
out = f.read()
print(out)
logging.debug(f"\n{out}")
@is_docker
@ -195,7 +199,7 @@ def time_trace(func):
func()
end_time = time.time()
elapsed_time = end_time - start_time
print(emoji.emojize(
logging.info(emoji.emojize(
f":alarm_clock: Total execution time: {elapsed_time} seconds"))
return wrapper
@ -204,15 +208,14 @@ def time_trace(func):
def run_autotest():
qualityDataframe = pd.DataFrame()
latencyDataframe = pd.DataFrame()
encoders = generateEncoderStrings()
for encoder, combinations in encoders.items():
for encoder, combinations in tqdm(encoders.items(), desc="Encoder loop"):
qualityDataframe = pd.DataFrame()
latencyDataframe = pd.DataFrame()
for params in combinations:
for params in tqdm(combinations, desc="params combination loop", leave=False):
for profile in get_config().profiles:
for videoName, videoPath in get_config().videos.items():
for _ in range(get_config().repeats):
for videoName, videoPath in tqdm(get_config().videos.items(), desc="videos loop", leave=False):
for _ in tqdm(range(get_config().repeats), desc="repeat loop", leave=False):
filename = "autotest-" + encoder + "-" + \
profile + "-test-" + videoName + ".mp4"
pipeline = Pipeline()
@ -238,7 +241,7 @@ def run_autotest():
)
dfPsnr = qa.parse_quality_report(
psnr_metrics, ssim_metrics)
print("-----")
logging.info("-----")
dfLatency = getLatencyTable(
get_config().latency_filename)
columnsQ = pd.MultiIndex.from_tuples(
@ -255,10 +258,10 @@ def run_autotest():
[qualityDataframe, dfPsnr], axis=1)
latencyDataframe = pd.concat(
[latencyDataframe, dfLatency], axis=1)
print("="*50)
print("Current results:")
print(dfPsnr)
print(dfLatency)
logging.info("="*50)
logging.info("Current results:")
logging.info(f"\n{dfPsnr}")
logging.info(f"\n{dfLatency}")
qualityDataframe.to_csv(
get_config().results_dir + f"qualityResults{encoder}.csv")
latencyDataframe.to_csv(

View File

@ -1,6 +1,7 @@
#!/usr/bin/python3
import pandas as pd
import numpy as np
import logging
# Idea is next:
# on set of experiments we are calculating all latency information -> each element avg, std, max numbers, total is not calculated, because it requires
# additional parsing for parallel branches (from tee)
@ -17,7 +18,8 @@ def findWord(words, wordToSearch):
return words[idx]
else:
if idx >= len(words):
print(f"ERROR: trying to access index={idx} while: {words}")
logging.error(
f"ERROR: trying to access index={idx} while: {words}")
for word in words:
if word.startswith(wordToSearch):
idx = words.index(word)
@ -70,7 +72,7 @@ def readAndParse(filename):
def getLatencyTable(filename):
parsed = readAndParse(filename)
df = pd.DataFrame(parsed)
print(df)
logging.debug(f"\n{df}")
latency_row = df.loc['latency']
ts_list = df.loc['ts']
@ -90,8 +92,9 @@ def getLatencyTable(filename):
resultDf = pd.concat(
[df_dt_max, max_latency, avg_latency, median_latency, std_latency], axis=1)
resultDf.columns = ['dTmax', 'max', 'avg', 'median', 'std']
print(resultDf)
logging.debug(f"\n{resultDf}")
return resultDf
if __name__ == "__main__":
getLatencyTable("latency_traces-x264enc-kpop-test-10.log")

View File

@ -14,4 +14,5 @@ python-dotenv==1.1.1
pytz==2025.2
PyYAML==6.0.3
six==1.17.0
tqdm==4.67.1
tzdata==2025.2