2020-02-04 18:30:49 +00:00
|
|
|
#!/usr/bin/python3
|
|
|
|
import sys,os,functools,re
|
|
|
|
|
|
|
|
convert = {
|
|
|
|
"dcall-lightning-server-single": "donar",
|
|
|
|
"dcall-lightning-server": "donar",
|
|
|
|
"dcall-dup2-server-single": "torfone",
|
|
|
|
"dcall-dup2-server": "torfone",
|
|
|
|
"dcall-simple-server-single": "simple",
|
|
|
|
"dcall-simple-server": "simple"
|
|
|
|
}
|
|
|
|
|
|
|
|
def extract_info(inf, s):
|
|
|
|
try:
|
|
|
|
with open(inf) as f:
|
|
|
|
full = ''.join(f.readlines())
|
|
|
|
w = re.search(r'identifier=jan_dcall_(\w+)', full)
|
|
|
|
if not w: return False
|
|
|
|
s['current']['mode'] = w.groups()[0]
|
|
|
|
x = re.search(r'server= (\S+) \d+ ((\S+) (\d+))?', full)
|
|
|
|
if x:
|
|
|
|
s['current']['strat'] = convert[x.groups()[0]]
|
|
|
|
if x.groups()[2] != None:
|
|
|
|
y = re.search(r'tick_tock=(\d)', x.groups()[2])
|
|
|
|
if y:
|
2020-02-05 08:41:19 +00:00
|
|
|
s['current']['strat'] += "-alternate" if y.groups()[0] == '1' else "-double-send"
|
2020-02-04 18:30:49 +00:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
print("parse error for",inf)
|
|
|
|
return False
|
|
|
|
except Exception as e:
|
|
|
|
print("read error", inf, e)
|
|
|
|
return False
|
|
|
|
|
|
|
|
def categorize(f, s):
|
|
|
|
return True
|
|
|
|
|
|
|
|
def extract_dcall(f, s):
|
|
|
|
s['current']['success_buf'] = 0
|
|
|
|
s['current']['failed_buf'] = 0
|
|
|
|
try:
|
|
|
|
with open(f) as f:
|
|
|
|
for line in f:
|
|
|
|
time = line.split()[0]
|
|
|
|
minutes = int(time.split(':')[1])
|
|
|
|
if minutes <= 1 or minutes >= 13: continue
|
|
|
|
if 'Using buffer of size 160' in line:
|
|
|
|
s['current']['success_buf'] += 1
|
|
|
|
elif 'Using NULL buffer' in line:
|
|
|
|
s['current']['failed_buf'] += 1
|
2020-02-05 08:12:21 +00:00
|
|
|
#print(s['current'])
|
2020-02-04 18:30:49 +00:00
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
|
|
print("read error", inf, e)
|
|
|
|
return False
|
|
|
|
|
2020-02-05 08:12:21 +00:00
|
|
|
def aggregate_failed_pct(s):
|
2020-02-05 08:37:33 +00:00
|
|
|
k = (s['current']['strat'], s['current']['mode'])
|
|
|
|
|
2020-02-05 08:12:21 +00:00
|
|
|
if k not in s['failed_1pct']: s['failed_1pct'][k] = 0
|
|
|
|
if s['current']['success_buf'] < 16500 * 0.99: s['failed_1pct'][k] += 1
|
2020-02-05 08:37:33 +00:00
|
|
|
|
|
|
|
if k not in s['failed_5pct']: s['failed_5pct'][k] = 0
|
|
|
|
if s['current']['success_buf'] < 16500 * 0.95: s['failed_5pct'][k] += 1
|
|
|
|
|
|
|
|
if k not in s['run_per_strat']: s['run_per_strat'][k] = 0
|
|
|
|
s['run_per_strat'][k] += 1
|
|
|
|
|
2020-02-05 08:12:21 +00:00
|
|
|
return True
|
|
|
|
|
2020-02-04 18:30:49 +00:00
|
|
|
def extract_folder(f, s):
|
|
|
|
return \
|
|
|
|
extract_info(f + '/info.txt', s) and \
|
2020-02-05 08:37:33 +00:00
|
|
|
extract_dcall(f + '/log/client-dcall-gstreamer.log', s) and \
|
2020-02-05 08:12:21 +00:00
|
|
|
aggregate_failed_pct(s)
|
2020-02-04 18:30:49 +00:00
|
|
|
|
|
|
|
def extract(p, s):
|
|
|
|
item_count = functools.reduce(lambda acc, prev: acc + 1, os.listdir(p), 0)
|
|
|
|
|
|
|
|
counter = 0
|
|
|
|
print("extracting...")
|
|
|
|
for folder in os.listdir(p):
|
|
|
|
s['current'] = { 'identifier': folder}
|
|
|
|
True and \
|
|
|
|
extract_folder(p + '/' + folder, s) and \
|
|
|
|
categorize(folder, s) or \
|
|
|
|
print(f"An error occured with {folder}")
|
|
|
|
counter += 1
|
|
|
|
progress = round(counter / item_count * 100)
|
|
|
|
print(f"{progress}%", end="\r")
|
|
|
|
print("done")
|
|
|
|
|
2020-02-05 08:37:33 +00:00
|
|
|
def output_res(s):
|
|
|
|
with open('dcall_drop.csv', 'w') as f:
|
|
|
|
f.write("mode,strat,threshold,ratio\n")
|
|
|
|
for thr_name, thr_val in [('failed_1pct', 0.01), ('failed_5pct', 0.05)]:
|
|
|
|
for key,val in s[thr_name].items():
|
|
|
|
mode, strat = key
|
|
|
|
ratio = val / s['run_per_strat'][key]
|
|
|
|
f.write(f"{mode},{strat},{thr_val},{ratio}\n")
|
|
|
|
|
|
|
|
state = {'failed_1pct': {}, 'failed_5pct': {}, 'run_per_strat': {}}
|
2020-02-04 18:30:49 +00:00
|
|
|
extract(sys.argv[1], state)
|
2020-02-05 08:37:33 +00:00
|
|
|
output_res(state)
|
2020-02-04 18:30:49 +00:00
|
|
|
|