Skip to content

Commit

Permalink
detecting http debug verb
Browse files Browse the repository at this point in the history
  • Loading branch information
Brock committed Nov 21, 2014
1 parent 23af4af commit 415b1aa
Show file tree
Hide file tree
Showing 2 changed files with 66 additions and 3 deletions.
15 changes: 12 additions & 3 deletions gather.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,17 +164,26 @@ def baseInfo(url):
except:
pass

req = urllib2.Request(url)
req.add_header('Proxy-Connection', 'Keep-Alive')
req.add_header('Accept', '*/*')
req.add_header('User-Agent', ' Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0')
req.add_header('Command', 'stop-debug')
req.get_method = lambda: 'DEBUG'
try:
response = urllib2.urlopen(req, timeout = 15)
print '* Support Debug Method'
#print response.read()
if response.read().find(r'OK') != -1:
print '* Support Debug Method'
except Exception, e:
pass
# if hasattr(e, 'code'):
# if not (e.code == 501 or e.code == 405 or e.code == 403):
# print 'DEBUG: ', e

req = urllib2.Request(url)
req.add_header('Proxy-Connection', 'Keep-Alive')
req.add_header('Accept', '*/*')
req.add_header('User-Agent', ' Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0')
req.get_method = lambda: 'TRACE'
try:
response = urllib2.urlopen(req, timeout = 15)
Expand Down Expand Up @@ -247,7 +256,7 @@ def querySiteFile(url):
queryWeight(urlP.hostname)
if options & 32:
print '\n============================== site file ==============================\n'
querySiteFile(url)
querySiteFile(url[:url.find('/', 8)])
if options & 8:
print '\n============================== nmap ==============================\n'
sys.stdout.flush()
Expand Down
54 changes: 54 additions & 0 deletions httpdebug.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

#
# filename: httptrace.py
# written by 老妖@wooyun
# date: 2014-06-06
#
###############################################################################

import sys, os, getopt, types
import urllib2
import cookielib
from lxml import etree
import sys,socket
import json
import urlparse
import re


def trace(url):
req = urllib2.Request(url)
req.add_header('Proxy-Connection', 'Keep-Alive')
req.add_header('Accept', '*/*')
req.add_header('User-Agent', ' Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0')
req.get_method = lambda: 'DEBUG'
req.add_header('Command', 'stop-debug')
try:
response = urllib2.urlopen(req, timeout = 15)
html = response.read()
print response.info()
print html
except Exception, e:
print e

if __name__ == '__main__':
import locale
reload(sys)
sys.setdefaultencoding(locale.getpreferredencoding())

cookieJar = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookieJar))
urllib2.install_opener(opener)

opts, args = getopt.getopt(sys.argv[1:], "")
for op, vaule in opts:
pass

url = args[0]
if url[:7] != 'http://' and url[:8] != 'https://':
url = 'http://' + url

trace(url)

0 comments on commit 415b1aa

Please sign in to comment.