-
Notifications
You must be signed in to change notification settings - Fork 73
/
Copy pathwebtech.py
96 lines (74 loc) · 2.79 KB
/
webtech.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# webtech.py
#
# Copyright 2019 bingo <bingo@hacklab>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
#
from concurrent.futures import ThreadPoolExecutor as executor # pip install futures
from Wappalyzer import Wappalyzer, WebPage # pip install python-Wappalyzer
import urllib3, argparse
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) # disable Python SSL warnings !
green = "\033[32m"
blue = "\033[34m"
red = "\033[31m"
bold = "\033[1m"
end = "\033[0m"
wappalyzer = Wappalyzer.latest()
print(blue+"""
__ __ _ _____ _
\ \ / /__| |_|_ _|__ ___| |__
\ \ /\ / / _ \ '_ \| |/ _ \/ __| '_ \
\ V V / __/ |_) | | __/ (__| | | |
\_/\_/ \___|_.__/|_|\___|\___|_| |_| @bing0o
Web Technologies Detector
"""+end)
def check(out, ig, url):
if not url.startswith('http'):
url = 'http://' + url
try:
webpage = WebPage.new_from_url(url)
tech = wappalyzer.analyze(webpage)
print("[+] " + str(url) + " | " + green + bold + " - ".join(tech) + end)
if out != 'None':
with open(out, 'a') as f:
f.write(url + " | " + " - ".join(tech) + "\n")
f.close()
except Exception as e:
if ig == 'True':
pass
else:
print(red+"Error: " + end + "[ " + bold + str(url) + end + " ] > " + str(e))
parser = argparse.ArgumentParser()
parser.add_argument("-w", "--wordlist", help="Domains List File", type=str, required=True)
parser.add_argument("-t", "--thread", help="Theads Number - (Default: 10)", type=int)
parser.add_argument("-i", "--ignore", help="To Ignore The Errors", action='store_true')
parser.add_argument("-o", "--output", help="Save The Results To a File", type=str)
args = parser.parse_args()
links = str(args.wordlist)
threads = str(args.thread)
ig = str(args.ignore)
out = str(args.output)
lines = len(open(links).readlines())
if threads == 'None':
threads = 10
print(blue +"["+red+"+"+blue+"] File: " + end + links)
print(blue +"["+red+"+"+blue+"] Length: " + end + str(lines))
print(blue +"["+red+"+"+blue+"] Output: " + end + str(out))
print(blue +"["+red+"+"+blue+"] Threads: " + end + str(threads))
print(blue +"["+red+"+"+blue+"] Ignore: " + end + str(ig))
print(red+"\n[+] Results:\n"+end)
urls = open(links,'r')
with executor(max_workers=int(threads)) as exe:
[exe.submit(check, out, ig, url.strip('\n')) for url in urls]