File: logstat.awk

package info (click to toggle)
mysql-dfsg-5.0 5.0.32-7etch12
  • links: PTS
  • area: main
  • in suites: etch
  • size: 89,332 kB
  • ctags: 94,781
  • sloc: cpp: 436,297; ansic: 409,141; sh: 40,574; tcl: 30,484; perl: 27,872; yacc: 8,236; makefile: 5,532; java: 4,610; xml: 3,914; pascal: 3,462; sql: 2,673; awk: 1,338; asm: 1,061; sed: 772
file content (36 lines) | stat: -rw-r--r-- 717 bytes parent folder | download | duplicates (12)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
# $Id: logstat.awk,v 1.1 2002/05/10 15:19:13 bostic Exp $
#
# Output accumulated log record count/size statistics.
BEGIN {
	l_file = 0;
	l_offset = 0;
}

/^\[/{
	gsub("[][:	 ]", " ", $1)
	split($1, a)

	if (a[1] == l_file) {
		l[a[3]] += a[2] - l_offset
		++n[a[3]]
	} else
		++s[a[3]]

	l_file = a[1]
	l_offset = a[2]
}

END {
	# We can't figure out the size of the first record in each log file,
	# use the average for other records we found as an estimate.
	for (i in s)
		if (s[i] != 0 && n[i] != 0) {
			l[i] += s[i] * (l[i]/n[i])
			n[i] += s[i]
			delete s[i]
		}
	for (i in l)
		printf "%s: %d (n: %d, avg: %.2f)\n", i, l[i], n[i], l[i]/n[i]
	for (i in s)
		printf "%s: unknown (n: %d, unknown)\n", i, s[i]
}