1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120
|
#!/usr/bin/python
"""
Inguma Penetration Testing Toolkit
Copyright (c) 2006, 2007 Joxean Koret, joxeankoret [at] yahoo.es
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; version 2
of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
# Fuzzing data
strings = ("A", "%s", "%n", "%x", "%n", "%d", "A\x00",
"localhost", "127.0.0.l"
"\\\\", "C:\\", "\\\\?\\PIPE", "/.", "../", "\\\\.\\TEST")
numbers = (-2, -1, 0, 1, 2, 3, 4, 6, 8, 16, 24, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 2147483647, 4294967294)
sizes = (1, 4, 100, 512, 1024, 2048, 4096, 8192, 10000, 16384, 32000, 64000)
separators = [" ", ".", "/", "&", "=", "?", ":", "\r", "\n", "\x00", "@", "-", "_", "*" , "\\", "(", ")", "[", "]", "!", "|",
"#", "$", "%", "<", ">", ";"]
ignorechars = [" ", "<", ">", '"', "\r", "\n", "?", "(", ")"]
TOKEN_TYPE_TOKEN = 0
TOKEN_TYPE_INJECT = 1
TOKEN_TYPE_APPEND = 0
def tokenizePacket(pkt):
"""
Split a packet into tokens
"""
ret = []
tmp = ""
for x in pkt:
if x in separators:
if tmp != "":
ret.append(tmp)
ret.append(x)
tmp = ""
else:
tmp += x
if tmp != "":
ret.append(tmp)
return ret
def token2str(tkn):
"""
Convert a token list into a Python string
"""
#x = "".join(tkn) Doesn't work if some element is not an string
x = ""
for a in tkn:
x += str(a)
return x
def fuzzCallback(func, cmd, idx, var=0):
"""
Callback to use when writting fuzzers
"""
mtokens = tokenizePacket(cmd)
tokens = mtokens
j = 0
for i in range(int(idx), len(mtokens)):
tokens = tokenizePacket(cmd)
tmp = ""
if tokens[i] in separators:
if tokens[i] in ignorechars:
continue
x = 0
for num in numbers:
x+= 1
if x < var:
continue
print "Fuzzing var %d:%d" % (i, x)
tmp = tokens
tmp[i] = num
func(token2str(tmp), i)
j += 1
for size in sizes:
for stmt in strings:
x += 1
if x < var:
continue
print "Fuzzing var %d:%d:%d" % (i, x, size)
tmp = tokens
tmp[i] = stmt*size
func(token2str(tmp), i)
j += 1
for char in range(0, 255):
x += 1
if x < var:
continue
print "Fuzzing var %d:%d:%d" % (i, x, size)
tmp = tokens
tmp[i] = chr(char)*size
func(token2str(tmp), i)
j += 1
|