Skip to content

Commit

Permalink
Improve the process control and shutdown process
Browse files Browse the repository at this point in the history
  • Loading branch information
shenril committed Aug 19, 2019
1 parent 0073e9c commit 90613c8
Show file tree
Hide file tree
Showing 8 changed files with 40 additions and 26 deletions.
1 change: 1 addition & 0 deletions lib/modules/attacks/bruteforce/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,5 @@ def process(self, start_url, crawled_urls):
pass
except KeyboardInterrupt:
executor.shutdown()
raise

1 change: 1 addition & 0 deletions lib/modules/attacks/bruteforce/backdoor.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,4 @@ def process(self, start_url, crawled_urls):
pass
except KeyboardInterrupt:
executor.shutdown()
raise
1 change: 1 addition & 0 deletions lib/modules/attacks/bruteforce/bdir.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,5 @@ def process(self, start_url, crawled_urls):
pass
except KeyboardInterrupt:
executor.shutdown()
raise

1 change: 1 addition & 0 deletions lib/modules/attacks/bruteforce/bfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,4 +41,5 @@ def process(self, start_url, crawled_urls):
pass
except KeyboardInterrupt:
executor.shutdown()
raise

1 change: 1 addition & 0 deletions lib/modules/attacks/bruteforce/dir.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,4 +43,5 @@ def process(self, start_url, crawled_urls):
pass
except KeyboardInterrupt:
executor.shutdown()
raise

1 change: 1 addition & 0 deletions lib/modules/attacks/bruteforce/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,5 @@ def process(self, start_url, crawled_urls):
pass
except KeyboardInterrupt:
executor.shutdown()
raise

1 change: 1 addition & 0 deletions lib/modules/attacks/bruteforce/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,5 @@ def process(self, start_url, crawled_urls):
pass
except KeyboardInterrupt:
executor.shutdown()
raise

59 changes: 33 additions & 26 deletions lib/modules/crawler/crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,29 +32,36 @@ def parse_items(self, links):


def crawl(url, user_agent):
output = Services.get("output")

# Settings for the crawler
settings = get_project_settings()
settings.set("USER_AGENT", user_agent)
settings.set("LOG_LEVEL", "CRITICAL")
settings.set("RETRY_ENABLED", False)
settings.set("CONCURRENT_REQUESTS", 15)

# Create the process that will perform the crawl
output.info("Start crawling the target website")
process = CrawlerProcess(settings)
allowed_domains.append(str(urlparse(url).hostname))
process.crawl(SitadelSpider, start_urls=[str(url)], allowed_domains=allowed_domains)
process.start()

# Clean the results
clean_urls = []
for u in urls:
try:
new_url = urlparse(u).geturl()
clean_urls.append(new_url)
except ValueError:
continue

return clean_urls
try:
output = Services.get("output")

# Settings for the crawler
settings = get_project_settings()
settings.set("USER_AGENT", user_agent)
settings.set("LOG_LEVEL", "CRITICAL")
settings.set("RETRY_ENABLED", False)
settings.set("CONCURRENT_REQUESTS", 15)

# Create the process that will perform the crawl
output.info("Start crawling the target website")
process = CrawlerProcess(settings)
allowed_domains.append(str(urlparse(url).hostname))
process.crawl(
SitadelSpider, start_urls=[str(url)], allowed_domains=allowed_domains
)
process.start()

# Clean the results
clean_urls = []
for u in urls:
try:
new_url = urlparse(u).geturl()
clean_urls.append(new_url)
except ValueError:
continue

return clean_urls
except KeyboardInterrupt:
process.stop()
raise

0 comments on commit 90613c8

Please sign in to comment.