1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141
|
#!/usr/bin/env python
"""
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import cookielib
import re
import socket
import sys
import urllib
import urllib2
import ConfigParser
from operator import itemgetter
TIMEOUT = 10
CONFIG_FILE = 'sqlharvest.cfg'
TABLES_FILE = 'tables.txt'
USER_AGENT = 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; AskTB5.3)'
SEARCH_URL = 'http://www.google.com/m?source=mobileproducts&dc=gorganic'
MAX_FILE_SIZE = 2 * 1024 * 1024 # if a result (.sql) file for downloading is more than 2MB in size just skip it
QUERY = 'CREATE TABLE ext:sql'
REGEX_URLS = r';u=([^"]+?)&q='
REGEX_RESULT = r'(?i)CREATE TABLE\s*(/\*.*\*/)?\s*(IF NOT EXISTS)?\s*(?P<result>[^\(;]+)'
def main():
tables = dict()
cookies = cookielib.CookieJar()
cookie_processor = urllib2.HTTPCookieProcessor(cookies)
opener = urllib2.build_opener(cookie_processor)
opener.addheaders = [("User-Agent", USER_AGENT)]
conn = opener.open(SEARCH_URL)
page = conn.read() # set initial cookie values
config = ConfigParser.ConfigParser()
config.read(CONFIG_FILE)
if not config.has_section("options"):
config.add_section("options")
if not config.has_option("options", "index"):
config.set("options", "index", "0")
i = int(config.get("options", "index"))
try:
with open(TABLES_FILE, 'r') as f:
for line in f.xreadlines():
if len(line) > 0 and ',' in line:
temp = line.split(',')
tables[temp[0]] = int(temp[1])
except:
pass
socket.setdefaulttimeout(TIMEOUT)
files, old_files = None, None
try:
while True:
abort = False
old_files = files
files = []
try:
conn = opener.open("%s&q=%s&start=%d&sa=N" % (SEARCH_URL, QUERY.replace(' ', '+'), i * 10))
page = conn.read()
for match in re.finditer(REGEX_URLS, page):
files.append(urllib.unquote(match.group(1)))
if len(files) >= 10:
break
abort = (files == old_files)
except KeyboardInterrupt:
raise
except Exception, msg:
print msg
if abort:
break
sys.stdout.write("\n---------------\n")
sys.stdout.write("Result page #%d\n" % (i + 1))
sys.stdout.write("---------------\n")
for sqlfile in files:
print sqlfile
try:
req = urllib2.Request(sqlfile)
response = urllib2.urlopen(req)
if "Content-Length" in response.headers:
if int(response.headers.get("Content-Length")) > MAX_FILE_SIZE:
continue
page = response.read()
found = False
counter = 0
for match in re.finditer(REGEX_RESULT, page):
counter += 1
table = match.group("result").strip().strip("`\"'").replace('"."', ".").replace("].[", ".").strip('[]')
if table and not any(_ in table for _ in ('>', '<', '--', ' ')):
found = True
sys.stdout.write('*')
if table in tables:
tables[table] += 1
else:
tables[table] = 1
if found:
sys.stdout.write("\n")
except KeyboardInterrupt:
raise
except Exception, msg:
print msg
else:
i += 1
except KeyboardInterrupt:
pass
finally:
with open(TABLES_FILE, 'w+') as f:
tables = sorted(tables.items(), key=itemgetter(1), reverse=True)
for table, count in tables:
f.write("%s,%d\n" % (table, count))
config.set("options", "index", str(i + 1))
with open(CONFIG_FILE, 'w+') as f:
config.write(f)
if __name__ == "__main__":
main()
|