forked from initstring/lyricpass
-
Notifications
You must be signed in to change notification settings - Fork 0
/
lyricpass.py
executable file
·245 lines (188 loc) · 6.87 KB
/
lyricpass.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
#!/usr/bin/env python3
"""
Utility to scrape lyrics from https://lyrics.com
Usage:
lyricspass.py -a <artist>
lyricpass.py -i <file with multiple artists>
Example:
python lyricpass.py -a "Rob Zombie"
python lyricpass.py -i /tmp/artists.txt
Outputs two files:
raw-lyrics.txt <everything>
wordlist.txt <cleaned passphrases>
Tool by initstring. If you're into cracking complex passwords, check out
github.com/initstring/passphrase-wordlist for more fun!
"""
import argparse
import urllib.request
import datetime
import os
import sys
import re
SITE = "https://www.lyrics.com/"
LYRIC_FILE = "raw-lyrics-{:%Y-%m-%d-%H.%M.%S}".format(datetime.datetime.now())
PASS_FILE = "wordlist-{:%Y-%m-%d-%H.%M.%S}".format(datetime.datetime.now())
def parse_args():
"""
Handle user-passed parameters
"""
desc = "Scrape song lyrics from wikia.com"
parser = argparse.ArgumentParser(description=desc)
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-a", "--artist", type=str, action="store",
help="Single artist to scrape")
group.add_argument("-i", "--infile", type=str, action="store",
help="File containing one artist per line to scrape")
parser.add_argument("--min", type=int, default=8,
help="Minimum passphrase length. Default=8")
parser.add_argument("--max", type=int, default=40,
help="Minimum passphrase length. Default=40")
args = parser.parse_args()
if args.infile:
if not os.access(args.infile, os.R_OK):
print("[!] Cannot access input file, exiting")
sys.exit()
return args
def make_phrases(line, args):
"""
Cleans raw lyrics into usable passphrases
"""
clean_lines = []
final_lines = []
# Allow only letters, numbers, spaces, and some punctuation
allowed_chars = re.compile("[^a-zA-Z0-9 '&]")
# Lowercase everything and deal with common punctuation
line = line.lower()
line = re.sub(r'[-_]', ' ', line)
# The following lines attempt to remove accented characters, as the
# tool is focused on Engligh-language passwords.
line = re.sub('[àáâãäå]', 'a', line)
line = re.sub('[èéêë]', 'e', line)
line = re.sub('[ìíîï]', 'i', line)
line = re.sub('[òóôõö]', 'o', line)
line = re.sub('[ùúûü]', 'u', line)
line = re.sub('[ñ]', 'n', line)
# Gets rid of any remaining special characters in the name
line = allowed_chars.sub('', line)
# Shrinks down multiple spaces
line = re.sub(r'\s\s+', ' ', line)
# If line has an apostrophe make a duplicate without
if "'" in line:
clean_lines.append(re.sub("'", "", line))
# Making duplicating phrases including and / &
if ' and ' in line:
clean_lines.append(re.sub(' and ', ' & ', line))
if '&' in line:
newline = re.sub('&', ' and ', line)
newline = re.sub(r'\s+', ' ', newline).strip()
clean_lines.append(newline)
# Add what is left to the list
clean_lines.append(line)
# Only keep items in the acceptable length
for item in clean_lines:
if args.max >= len(item) >= args.min:
final_lines.append(item)
return final_lines
def parse_artists(args):
"""
Return a list of song artists for parsing
"""
whitelist = re.compile('[^a-zA-Z0-9-+]')
artists = []
if args.artist:
raw_artists = [args.artist,]
else:
with open(args.infile, encoding="utf-8", errors="ignore") as infile:
raw_artists = infile.readlines()
for artist in raw_artists:
artist = artist.replace(" ", "+")
artist = whitelist.sub("", artist)
if artist not in artists:
artists.append(artist)
return artists
def build_urls(artist):
"""
Creates a list of song URLs for a specific artist
"""
not_found = "We couldn't find any artists matching your query"
query_url = SITE + "/artist.php?name=" + artist
song_ids = []
regex = re.compile(r'href="/lyric/(.*?)/')
with urllib.request.urlopen(query_url) as response:
html = response.read().decode()
# The songs are stored by a unique ID
song_ids = re.findall(regex, html)
if not_found in html:
print("[!] Artist {} not found, skipping".format(artist))
# Clear out the "suggested" songs it finds in this scenario
song_ids = []
elif not song_ids:
print("[!] No songs found for {}, skipping".format(artist))
else:
print("[+] Found {} songs for artists {}"
.format(len(song_ids), artist))
# The "print" URL shows us the easiest to decode version of the song
url_list = [SITE + "db-print.php?id=" + id for id in song_ids]
return url_list
def write_data(outfile, data):
"""
Generic helper function to write text to a file
"""
with open(outfile, "a") as open_file:
for line in data:
if line:
open_file.write(line + '\n')
def scrape_lyrics(url_list):
"""
Scrapes raw lyric data from a list of URLs
"""
regex = re.compile(r"<pre.*?>(.*?)</pre>", re.DOTALL)
newline = re.compile(r"\r\n|\n")
deduped_lyrics = set()
current = 1
total = len(url_list)
for url in url_list:
print("Checking song {}/{}... \r".format(current, total), end="")
with urllib.request.urlopen(url) as response:
html = response.read().decode()
lyrics = re.findall(regex, html)
# We should always have a match... but if not, skip this url
if not lyrics:
print("\n[!] Found no lyrics at {}".format(url))
continue
lyrics = re.split(newline, lyrics[0])
write_data(LYRIC_FILE, lyrics)
deduped_lyrics.update(lyrics)
current += 1
return deduped_lyrics
def main():
"""
Main program function
"""
args = parse_args()
artists = parse_artists(args)
raw_words = set()
final_phrases = set()
# First, we grab all the lyrics for a given artist.
# The scrape_lyrics function will write the raw lyrics to an output
# file as it goes, which may come in handy if the program exits early
# due to an error.
for artist in artists:
print("[+] Looking up artist {}".format(artist))
url_list = build_urls(artist)
if not url_list:
continue
raw_words.update(scrape_lyrics(url_list))
# Now we will apply some rules to clean all the raw lyrics into a base
# passphrase file that can be used for cracking.
for lyric in raw_words:
phrases = make_phrases(lyric, args)
final_phrases.update(phrases)
# Write out the cleaned passphrases to a file
write_data(PASS_FILE, final_phrases)
print("[+] All done!")
print("")
print("Raw lyrics: {}".format(LYRIC_FILE))
print("Passphrases: {}".format(PASS_FILE))
if __name__ == '__main__':
main()