File: graph_latency.py

package info (click to toggle)
thunderbird 1%3A52.8.0-1~deb8u1
  • links: PTS, VCS
  • area: main
  • in suites: jessie
  • size: 1,710,120 kB
  • sloc: cpp: 5,081,109; ansic: 2,051,982; python: 458,727; java: 241,615; xml: 193,367; asm: 178,649; sh: 81,881; makefile: 24,703; perl: 16,874; objc: 4,389; yacc: 1,816; ada: 1,697; lex: 1,257; pascal: 1,251; cs: 879; exp: 499; php: 436; lisp: 258; awk: 152; sed: 51; ruby: 47; csh: 27
file content (104 lines) | stat: -rw-r--r-- 2,623 bytes parent folder | download | duplicates (14)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
#!/usr/bin/env python
# graph_latency.py - graph media latency
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

# needs matplotlib (sudo aptitude install python-matplotlib)

import matplotlib.pyplot as plt
from matplotlib import rc
import sys
from pprint import pprint
import re


# FIX!  needs to be sum of a single mediastreamtrack and any output overhead for it
# So there is one sum per MST
def compute_sum(data):
    'Compute the sum for each timestamp. This expects the output of parse_data.'
    last_values = {}
    out = ([],[])

    for i in data:
        if i[0] not in last_values.keys():
          last_values[i[0]] = 0
        last_values[i[0]] = float(i[3])
        print last_values
        out[0].append(i[2])
        out[1].append(sum(last_values.values()))
    return out


def clean_data(raw_data):
    '''
    Remove the PR_LOG cruft at the beginning of each line and returns a list of
    tuple.
    '''
    out = []
    for line in raw_data:
        match = re.match(r'(.*)#(.*)', line)
        if match:
	    continue
	else:
            out.append(line.split(": ")[1])
    return out

# returns a list of tuples
def parse_data(raw_lines):
    '''
    Split each line by , and put every bit in a tuple.
    '''
    out = []
    for line in raw_lines:
        out.append(line.split(','))
    return out

if len(sys.argv) == 3:
    name = sys.argv[1]
    channels = int(sys.argv[2])
else:
    print sys.argv[0] + "latency_log"

try:
    f = open(sys.argv[1])
except:
    print "cannot open " + name

raw_lines = f.readlines()
lines = clean_data(raw_lines)
data = parse_data(lines)

final_data = {}

for tupl in data:
    name = tupl[0]
    if tupl[1] != 0:
        name = name+tupl[1]
    if name not in final_data.keys():
        final_data[name] = ([], [])
# sanity-check values
    if float(tupl[3]) < 10*1000:
        final_data[name][0].append(float(tupl[2]))
        final_data[name][1].append(float(tupl[3]))

#overall = compute_sum(data)
#final_data["overall"] = overall

pprint(final_data)

fig = plt.figure()
for i in final_data.keys():
    plt.plot(final_data[i][0], final_data[i][1], label=i)

plt.legend()
plt.suptitle("Latency in ms (y-axis) against time in ms (x-axis).")

size = fig.get_size_inches()
# make it gigantic so we can see things. sometimes, if the graph is too big,
# this errors. reduce the factor so it stays under 2**15.
fig.set_size_inches((size[0]*10, size[1]*2))
name = sys.argv[1][:-4] + ".pdf"
fig.savefig(name)