1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135
|
#
# This file is part of pysnmp-apps software.
#
# Copyright (c) 2005-2016, Ilya Etingof <ilya@glas.net>
# License: http://pysnmp.sf.net/license.html
#
import sys
from pysnmp_apps.cli import spark
# AST
class ConfigToken:
# Abstract grammar token
def __init__(self, type, attr=None):
self.type = type
self.attr = attr
def __eq__(self, other): return self.type == other
def __ne__(self, other): return self.type != other
def __lt__(self, other): return self.type < other
def __le__(self, other): return self.type <= other
def __gt__(self, other): return self.type > other
def __ge__(self, other): return self.type >= other
def __repr__(self): return self.attr or self.type
def __str__(self):
if self.attr is None:
return '%s' % self.type
else:
return '%s(%s)' % (self.type, self.attr)
class ConfigNode:
# AST node class -- N-ary tree
def __init__(self, type, attr=None):
self.type, self.attr = type, attr
self._kids = []
def __getitem__(self, i):
return self._kids[i]
def __len__(self):
return len(self._kids)
if sys.version_info[0] < 3:
def __setslice__(self, low, high, seq):
self._kids[low:high] = seq
else:
def __setitem__(self, idx, seq):
self._kids[idx] = seq
def __eq__(self, other): return self.type == other
def __ne__(self, other): return self.type != other
def __lt__(self, other): return self.type < other
def __le__(self, other): return self.type <= other
def __gt__(self, other): return self.type > other
def __ge__(self, other): return self.type >= other
def __str__(self):
if self.attr is None:
return self.type
else:
return '%s(%s)' % (self.type, self.attr)
# Scanner
class __ScannerTemplate(spark.GenericScanner):
def tokenize(self, input):
self.rv = []
spark.GenericScanner.tokenize(self, input)
return self.rv
class __FirstLevelScanner(__ScannerTemplate):
def t_string(self, s):
r' [!#\$%&\'\(\)\*\+,\.//0-9<=>\?@A-Z\\\^_`a-z\{\|\}~][!#\$%&\'\(\)\*\+,\-\.//0-9<=>\?@A-Z\\\^_`a-z\{\|\}~]* '
self.rv.append(ConfigToken('string', s))
class __SecondLevelScanner(__FirstLevelScanner):
def t_semicolon(self, s):
r' : '
self.rv.append(ConfigToken('semicolon'))
def t_lparen(self, s):
r' \[ '
self.rv.append(ConfigToken('lparen'))
def t_rparen(self, s):
r' \] '
self.rv.append(ConfigToken('rparen'))
def t_quote(self, s):
r' \" '
self.rv.append(ConfigToken('quote'))
def t_whitespace(self, s):
r' \s+ '
self.rv.append(ConfigToken('whitespace'))
ScannerTemplate = __SecondLevelScanner
# Parser
class ParserTemplate(spark.GenericASTBuilder):
initialSymbol = None
def __init__(self, startSymbol=None):
if startSymbol is None:
startSymbol = self.initialSymbol
spark.GenericASTBuilder.__init__(self, ConfigNode, startSymbol)
def terminal(self, token):
# Reduce to homogeneous AST.
return ConfigNode(token.type, token.attr)
# Generator
class GeneratorTemplate(spark.GenericASTTraversal):
def __init__(self): pass # Skip superclass constructor
def typestring(self, node):
return node.type
def preorder(self, client, node):
try:
name = 'n_' + self.typestring(node)
if hasattr(self, name):
func = getattr(self, name)
func(client, node)
else:
self.default(client, node)
except spark.GenericASTTraversalPruningException:
return client
for kid in node:
self.preorder(client, kid)
name = name + '_exit'
if hasattr(self, name):
func = getattr(self, name)
func(client, node)
return client
def default(self, client, node): pass
|