diff --git a/gather.py b/gather.py index 180b7d4..210ccbd 100755 --- a/gather.py +++ b/gather.py @@ -164,17 +164,26 @@ def baseInfo(url): except: pass + req = urllib2.Request(url) + req.add_header('Proxy-Connection', 'Keep-Alive') + req.add_header('Accept', '*/*') + req.add_header('User-Agent', ' Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0') + req.add_header('Command', 'stop-debug') req.get_method = lambda: 'DEBUG' try: response = urllib2.urlopen(req, timeout = 15) - print '* Support Debug Method' - #print response.read() + if response.read().find(r'OK') != -1: + print '* Support Debug Method' except Exception, e: pass # if hasattr(e, 'code'): # if not (e.code == 501 or e.code == 405 or e.code == 403): # print 'DEBUG: ', e + req = urllib2.Request(url) + req.add_header('Proxy-Connection', 'Keep-Alive') + req.add_header('Accept', '*/*') + req.add_header('User-Agent', ' Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0') req.get_method = lambda: 'TRACE' try: response = urllib2.urlopen(req, timeout = 15) @@ -247,7 +256,7 @@ def querySiteFile(url): queryWeight(urlP.hostname) if options & 32: print '\n============================== site file ==============================\n' - querySiteFile(url) + querySiteFile(url[:url.find('/', 8)]) if options & 8: print '\n============================== nmap ==============================\n' sys.stdout.flush() diff --git a/httpdebug.py b/httpdebug.py new file mode 100755 index 0000000..04ee535 --- /dev/null +++ b/httpdebug.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- + +# +# filename: httptrace.py +# written by 老妖@wooyun +# date: 2014-06-06 +# +############################################################################### + +import sys, os, getopt, types +import urllib2 +import cookielib +from lxml import etree +import sys,socket +import json +import urlparse +import re + + +def trace(url): + req = urllib2.Request(url) + req.add_header('Proxy-Connection', 'Keep-Alive') + req.add_header('Accept', '*/*') + req.add_header('User-Agent', ' Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0') + req.get_method = lambda: 'DEBUG' + req.add_header('Command', 'stop-debug') + try: + response = urllib2.urlopen(req, timeout = 15) + html = response.read() + print response.info() + print html + except Exception, e: + print e + +if __name__ == '__main__': + import locale + reload(sys) + sys.setdefaultencoding(locale.getpreferredencoding()) + + cookieJar = cookielib.CookieJar() + opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookieJar)) + urllib2.install_opener(opener) + + opts, args = getopt.getopt(sys.argv[1:], "") + for op, vaule in opts: + pass + + url = args[0] + if url[:7] != 'http://' and url[:8] != 'https://': + url = 'http://' + url + + trace(url) +