File: logstat.awk

package info (click to toggle)
db5.3 5.3.28%2Bdfsg2-1
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 158,500 kB
  • sloc: ansic: 448,411; java: 111,824; tcl: 80,544; sh: 44,264; cs: 33,697; cpp: 21,604; perl: 14,557; xml: 10,799; makefile: 4,077; javascript: 1,998; yacc: 1,003; awk: 965; sql: 801; erlang: 342; python: 216; php: 24; asm: 14
file content (36 lines) | stat: -rw-r--r-- 666 bytes parent folder | download | duplicates (13)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
# $Id$
#
# Output accumulated log record count/size statistics.
BEGIN {
	l_file = 0;
	l_offset = 0;
}

/^\[/{
	gsub("[][:	 ]", " ", $1)
	split($1, a)

	if (a[1] == l_file) {
		l[a[3]] += a[2] - l_offset
		++n[a[3]]
	} else
		++s[a[3]]

	l_file = a[1]
	l_offset = a[2]
}

END {
	# We can't figure out the size of the first record in each log file,
	# use the average for other records we found as an estimate.
	for (i in s)
		if (s[i] != 0 && n[i] != 0) {
			l[i] += s[i] * (l[i]/n[i])
			n[i] += s[i]
			delete s[i]
		}
	for (i in l)
		printf "%s: %d (n: %d, avg: %.2f)\n", i, l[i], n[i], l[i]/n[i]
	for (i in s)
		printf "%s: unknown (n: %d, unknown)\n", i, s[i]
}