File: trytond-stat

package info (click to toggle)
tryton-server 7.0.40-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 7,748 kB
  • sloc: python: 53,502; xml: 5,194; sh: 803; sql: 217; makefile: 28
file content (143 lines) | stat: -rwxr-xr-x 4,892 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
#!/usr/bin/env python3
# PYTHON_ARGCOMPLETE_OK
# This file is part of Tryton.  The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
import curses
import datetime as dt
import math
import os
import sys
from collections import defaultdict

try:
    import argcomplete
except ImportError:
    argcomplete = None

DIR = os.path.abspath(os.path.normpath(os.path.join(__file__,
    '..', '..', 'trytond')))
if os.path.isdir(DIR):
    sys.path.insert(0, os.path.dirname(DIR))

import trytond.commandline as commandline
from trytond.config import config

parser = commandline.get_parser_stat()
if argcomplete:
    argcomplete.autocomplete(parser)
options = parser.parse_args()
config.update_etc(options.configfile)

import trytond.status as status


def main(stdscr):
    global reverse
    stdscr.nodelay(1)
    reverse = True
    processes = {}
    status_pad = curses.newpad(1, 1)
    cache_pad = curses.newpad(1, 1)

    def refresh_status():
        now = dt.datetime.now()
        height, width = stdscr.getmaxyx()

        def expired(process):
            return process['expire'] > now

        def format_status(since, id_, request):
            since = str(dt.timedelta(seconds=int(since)))
            pid, node = id_.split('@', 1)
            if len(node) > 12:
                node = node[:5] + '…' + node[:6]
            return f"{pid:>5} {node:<12} {since:>18} {request}"
        status_pad.clear()
        status = [format_status(*i) for i in sorted(
                ((msg['since'], p['id'], msg['request'])
                    for p in filter(expired, processes.values())
                    for msg in p['status']),
                reverse=reverse)]
        prow = min(len(status) + 1, height // 2)
        pcol = max(max(map(len, status), default=0), width)
        status_pad.resize(len(status) + 1, pcol + 1)
        for i, line in enumerate(status, 1):
            status_pad.addnstr(i, 0, line.ljust(pcol), pcol)
        status_pad.addnstr(
            0, 0, "{pid:>5} {node:^12} {since:>18} {request} ({n})".format(
                pid="pid",
                node="node",
                since="TIME" + ('↑' if reverse else '↓'),
                request="request",
                n=len(status),
                ).upper().ljust(pcol), pcol, curses.A_REVERSE)
        status_pad.noutrefresh(0, 0, 0, 0, prow, width - 1)

        def ratio(cache):
            if cache['hit'] or cache['miss']:
                return cache['hit'] / (cache['hit'] + cache['miss'])
            return 0

        def format_cache(name, hit, miss, ratio, size):
            return f"{hit:{size}d} {miss:{size}d} {ratio * 100:6.2f} {name}"

        cache_pad.clear()
        cache_stats = defaultdict(lambda: defaultdict(lambda: 0))
        for p in filter(expired, processes.values()):
            for cache in p['caches']:
                stats = cache_stats[cache['name']]
                stats['name'] = cache['name']
                stats['hit'] += cache['hit']
                stats['miss'] += cache['miss']
        for cache in cache_stats.values():
            cache['ratio'] = ratio(cache)
        try:
            size = math.ceil(math.log10(
                    max(s['hit'] + s['miss'] for s in cache_stats.values())))
        except ValueError:
            size = 1
        size = max(size, 4)
        caches = [format_cache(size=size, **cache) for cache in sorted(
                cache_stats.values(), key=lambda c: (c['ratio'], c['miss']),
                reverse=reverse)]
        crow = max(len(caches) + 1, (height - prow))
        ccol = max(max(map(len, caches), default=0), width)
        cache_pad.resize(crow + 1, ccol + 1)
        for i, line in enumerate(caches, 1):
            cache_pad.addnstr(i, 0, line.ljust(ccol), ccol)
        cache_pad.addstr(
            0, 0,
            "{hit:>{size}} {miss:>{size}} {ratio:>6} {name} ({n})".format(
                size=size,
                hit="hit",
                miss="miss",
                ratio="% " + ('↑' if reverse else '↓'),
                name="name",
                n=len(caches),
                ).upper().ljust(ccol), curses.A_REVERSE)
        cache_pad.noutrefresh(0, 0, prow, 0, height - 1, width - 1)

    def refresh():
        global reverse
        refresh_status()
        stdscr.refresh()

        key = stdscr.getch()
        if key == ord('q'):
            sys.exit()
        elif key == ord('r'):
            reverse = not reverse
            refresh()

    def update(data=None):
        if data:
            pid = data['id']
            data['expire'] = dt.datetime.now() + dt.timedelta(seconds=10)
            processes[pid] = data
        refresh()
    refresh()
    return status.listen(config.get('database', 'path'), update)


if not curses.wrapper(main):
    sys.stderr.write("status not supported on this platform\n")