-
Notifications
You must be signed in to change notification settings - Fork 8
/
views.py
executable file
·127 lines (105 loc) · 4.15 KB
/
views.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from django.http import JsonResponse
from urllib.parse import urlparse
# from urlparse import urlparse
# for sending requests and parsing response
import requests
from bs4 import BeautifulSoup
# for extracting keywords from error messages
from rake_nltk import Rake
# Constants
RUN_URL = u'https://api.hackerearth.com/v3/code/run/'
CLIENT_SECRET = '41662da152e210d7610787a8596e45cda8638dde'
def get_domain(url):
"""
Parameters:
url[string] => uniform resource locator of website
--------------------------------------------
Returns:
parsed_uri.netloc [string] => the domain name in url
Logic:
Given a url return its domain
"""
parsed_uri = urlparse(url)
return(parsed_uri.netloc)
def home(request):
"""
Parameters:
request[HttpRequest]
--------------------------------------------
Returns:
render(request, 'init.html') [HttpResponse] => init.html is returned as HttpResponse
Logic:
it takes the inputted code from frontend request and sends it to hackerearth API
if the code doesn't compile, then it finds the necessary keyword from error messages
and searches for it on google with regex matching and suggests debug links
"""
if request.method == 'POST':
# POST goes here . is_ajax is must to capture ajax requests.
if request.is_ajax():
lang = request.POST.get('lang')
source = request.POST.get('source')
inputl = request.POST.get('input')
data = {"lang": lang, "source": source, "input": inputl}
data = {
'client_secret': CLIENT_SECRET,
'async': 0,
'source': source,
'lang': lang,
'input': inputl,
'time_limit': 5,
'memory_limit': 262144,
}
# Post data to HackerEarth API
s = requests.Session()
s.mount("http://", requests.adapters.HTTPAdapter(max_retries=5))
s.mount("https://", requests.adapters.HTTPAdapter(max_retries=5))
r = s.post(RUN_URL, data=data)
key_words = []
compile_status = r.json()['compile_status'].strip()
current_json = r.json()
if compile_status != 'OK':
rk = Rake()
rk.extract_keywords_from_text(compile_status)
for keyword in rk.get_ranked_phrases():
if 'hackerearth' in keyword:
continue
key_words.append(keyword)
# filter extra information
if len(key_words) >= 3:
key_words = key_words[-2:]
key_words = list(reversed(key_words))
key_words.append(compile_status)
links = []
desc = []
import re
for word in key_words:
page = s.get("https://www.google.co.in/search?q=" + word)
soup = BeautifulSoup(page.content, 'lxml')
for link in soup.find_all("a", href=re.compile("(?<=/url\?q=)(htt.*://.*)")):
debug_url = link["href"].replace("/url?q=", "").split('&')[0]
if 'webcache.googleusercontent.com' in debug_url:
continue
links.append(debug_url)
desc.append(link.text + ":" + get_domain(debug_url))
current_json['debug_urls'] = links[:10]
current_json['descriptions'] = desc[:10]
return JsonResponse(current_json, safe=False)
# A normal get request goes here
return render(request, 'init.html')
def codeplay(request):
"""
Parameters:
request[HttpRequest]
--------------------------------------------
Returns:
render(request, 'codeplay.html') [HttpResponse]
=> codeplay.html is returned as HttpResponse
Logic:
re-renders the codeplay page
"""
return render(request, 'codeplay.html')
<<<<<<< HEAD
=======
>>>>>>> 739182d824e1351b831845c2984c481daab71932