diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml deleted file mode 100644 index f90c73c..0000000 --- a/.github/workflows/codeql.yml +++ /dev/null @@ -1,76 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. -# -# ******** NOTE ******** -# We have attempted to detect the languages in your repository. Please check -# the `language` matrix defined below to confirm you have the correct set of -# supported CodeQL languages. -# -name: "CodeQL" - -on: - push: - branches: [ "main" ] - pull_request: - # The branches below must be a subset of the branches above - branches: [ "main" ] - schedule: - - cron: '42 11 * * 2' - -jobs: - analyze: - name: Analyze - runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: [ 'python' ] - # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] - # Use only 'java' to analyze code written in Java, Kotlin or both - # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both - # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v2 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - - # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs - # queries: security-extended,security-and-quality - - - # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v2 - - # ℹī¸ Command-line programs to run using the OS shell. - # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - - # If the Autobuild fails above, remove it and uncomment the following three lines. - # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. - - # - run: | - # echo "Run, Build Application using script" - # ./location_of_script_within_repo/buildscript.sh - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 - with: - category: "/language:${{matrix.language}}" diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3273994 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +data/icon/ +Scraped/ \ No newline at end of file diff --git a/Bloody Proxy Scraper V2.py b/Bloody Proxy Scraper V2.py deleted file mode 100644 index f6f444b..0000000 --- a/Bloody Proxy Scraper V2.py +++ /dev/null @@ -1,317 +0,0 @@ -try: - import os, time, requests, pystyle, random, datetime, json, webbrowser - - from pystyle import Write, Colors, Colorate, Center, Add - from console.utils import set_title - from colorama import Fore - from win10toast import ToastNotifier - toast = ToastNotifier() - - from data.sources import http_urls, socks4_urls, socks5_urls, all_urls - from data.plugins.common import cls, qotls, pause, __version__, MainUpdater, CommonUpdater, SkidUpdater, VersionFileRemover, PythonInstallerDeleter, ChangelogsUpdater - from data.plugins.antiskid import AntiSkid -except: - print("Error: An import was not found. Run setup.bat and try again.") - print("If you don't have Python please install it by running tpython_installer.bat") - os.system("pause >nul") - exit(1) - -# Load Configuration -config = json.load(open('config.json', 'r', encoding='utf-8')) -try: - notifications = config["notifications"] - autoupdate = config["autoupdate"] -except: - print("settings.json is not up to date. Please copy paste the new json to your current.") - print("Press any key to open settings.json in browser") - os.system("pause >nul") - webbrowser.open("https://github.com/NoobToolzz/Bloody-Proxy-Scraper/blob/main/config.json") - exit(1) - -cls() - -# Anti-Skid -__author__ = 'NoNoobz' - -if __author__ != '\x4e\x6f\x4e\x6f\x6f\x62\x7a' or __author__ != '\u004E\u006F\u004E\u006F\u006F\u0062\u007A': - AntiSkid() - -# Banner Stuff -# h_h2 = ["Halal", "Haram"] -now = datetime.datetime.now() -timenow = now.strftime("%H:%M:%S") - -banner_ascii = f""" -██████╗ ██████╗ ███████╗ -██╔══██╗██╔══██╗██╔════╝ -██████╔╝██████╔╝███████╗ -██╔══██╗██╔═══╝ ╚════██║ -██████╔╝██║ ███████║ -╚═════╝ ╚═╝ ╚══════╝ v{__version__} - -""" - -text = f""" -Made by {__author__} -Started at: {timenow} -Quote of the launch: {random.choice(qotls)} -""" -banner = Add.Add(banner_ascii, text, center=True) - -# Check if version.txt and Python installer exists. If it does, delete it. -if os.path.isfile("data/version.txt"): - VersionFileRemover() -if os.path.isfile("tpython_installer.bat"): - PythonInstallerDeleter() - -Write.Print("------------------------------------------------------------------------------------------------------------------------", Colors.purple_to_blue, interval=0) -print(Colorate.Horizontal(Colors.purple_to_blue, Center.XCenter(banner))) -Write.Print("------------------------------------------------------------------------------------------------------------------------", Colors.rainbow, interval=0) -if autoupdate: - # Check for updates to main file, and both files in data/plugins (if you have updater enabled in config ofc) - print(f"{Fore.YELLOW}[{Fore.RESET}INFO{Fore.YELLOW}]{Fore.RESET} Checking for updates . . .") - MainUpdater() - print(f"{Fore.GREEN}[{Fore.RESET}SUCCESS{Fore.GREEN}]{Fore.RESET} Updated main file") - time.sleep(0.5) - CommonUpdater() - print(f"{Fore.GREEN}[{Fore.RESET}SUCCESS{Fore.GREEN}]{Fore.RESET} Updated data/plugins/common.py") - time.sleep(0.5) - SkidUpdater() - print(f"{Fore.GREEN}[{Fore.RESET}SUCCESS{Fore.GREEN}]{Fore.RESET} Updated data/plugins/antiskid.py") - ChangelogsUpdater() - print(f"{Fore.GREEN}[{Fore.RESET}INFO{Fore.GREEN}]{Fore.RESET} Written changelogs to data/changelogs.txt") - time.sleep(2) -else: - # Writes changelogs to changelogs.txt in data folder (If you don't have updates enabled in) - print(f"{Fore.RED}[{Fore.RESET}ERROR{Fore.RED}]{Fore.RESET} Skipping updates as they are disabled in config.json") - time.sleep(1) - print(f"{Fore.YELLOW}[{Fore.RESET}INFO{Fore.YELLOW}]{Fore.RESET} Writing changelogs . . .") - time.sleep(0.5) - ChangelogsUpdater() - print(f"{Fore.GREEN}[{Fore.RESET}SUCCESS{Fore.GREEN}]{Fore.RESET} Written changelogs to data/changelogs.txt") - time.sleep(2) - -cls() -# os.system('mode 85, 25') -Write.Print("------------------------------------------------------------------------------------------------------------------------", Colors.purple_to_blue, interval=0) -print(Colorate.Horizontal(Colors.purple_to_blue, Center.XCenter(banner))) -Write.Print("------------------------------------------------------------------------------------------------------------------------", Colors.rainbow, interval=0) -time.sleep(1) -optional_cooldown = Write.Input("Do you want a cooldown between each scrape? (y/n): ", Colors.purple_to_blue, interval=0) -if optional_cooldown == "y" or optional_cooldown == "Y" or optional_cooldown == "yes" or optional_cooldown == "Yes" or optional_cooldown == "YES": - Write.Print("\n[WARNING] Do not put any decimals in the cooldown\n", Colors.red_to_yellow, interval=0) - cooldown_input = Write.Input("How much do you want the cooldown to be? (in seconds): ", Colors.purple_to_blue, interval=0) - if "." in cooldown_input: - Write.Print("\nDetected a decimal point in the cooldown, removing\n", Colors.red_to_yellow, interval=0) - cooldown = cooldown_input.replace(".", "") - Write.Print(f"New cooldown: {cooldown}\n", Colors.green_to_white, interval=0) - if "0." in cooldown_input: - Write.Print("\nDetected a decimal in the cooldown, removing\n", Colors.red_to_yellow, interval=0) - cooldown = cooldown_input.replace("0.", "") - Write.Print(f"New cooldown: {cooldown}\n", Colors.green_to_white, interval=0) -else: - pass -if notifications: - toast.show_toast(f"Bloody Proxy Scraper v{__version__}", - "Started to scrape proxies.", - icon_path="data\icons\logo.ico", - duration=2) -else: - pass -Write.Print("------------------------------------------------------------------------------------------------------------------------\n", Colors.rainbow, interval=0) -set_title(f"Bloody Proxy Scraper v{__version__} | Scraping Proxies . . .") -Write.Print("[?] Scraping Proxies . . .\n", Colors.red_to_yellow, interval=0) - -# Opening/Adding proxy -# Edit: Proxy files are opened when proxies are being written. You can safely remove the #'s below without errors if you wish. -# http = open('proxies-http.txt','wb') -# socks4 = open('proxies-socks4.txt','wb') -# socks5 = open('proxies-socks5.txt','wb') -# allp = open('proxies-all.txt','wb') -# scraped_sites = 0 - -# Scrape HTTP(s) proxies from their sources -http_proxies = [] -for url in http_urls: - try: - scrape_http = requests.get(url) - # Process the scraped proxies here - set_title(f"Bloody Proxy Scraper v{__version__} | Scraped HTTP Proxies from {url}!") - Write.Print(f"\n[{timenow}] | [?] Scraped HTTP Proxies from {url}\n", Colors.green_to_blue, interval=0) - http_proxies.extend(scrape_http.text.strip().split('\n')) - if optional_cooldown == "y" or optional_cooldown == "Y" or optional_cooldown == "yes" or optional_cooldown == "Yes" or optional_cooldown == "YES": - time.sleep(int(cooldown)) - else: - pass - except requests.exceptions.RequestException as e: - # Handle any errors that occur during the request - Write.Print(f"[{timenow}] | [!] Error scraping proxies from {url}: {e}\n", Colors.red_to_yellow, interval=0) -set_title(f"Bloody Proxy Scraper v{__version__} | Scraped HTTP Proxies!") -Write.Print(f"[{timenow}] | [?] Scraped HTTP(S) Proxies!\n", Colors.green_to_white, interval=0) -Write.Print("------------------------------------------------------------------------------------------------------------------------\n", Colors.rainbow, interval=0) -if notifications: - toast.show_toast(f"Bloody Proxy Scraper v{__version__}", - "Scraped HTTP(S) Proxies!", - icon_path="data\icons\logo.ico", - duration=1) -else: - pass -time.sleep(1) - -# Scrape SOCKS4 proxies from their sources -socks4_proxies = [] -for url in socks4_urls: - try: - scrape_socks4 = requests.get(url) - # Process the scraped proxies here - set_title(f"Bloody Proxy Scraper v{__version__} | Scraped SOCKS4 Proxies from {url}!") - Write.Print(f"\n[{timenow}] | [?] Scraped SOCKS4 Proxies from {url}\n", Colors.green_to_blue, interval=0) - socks4_proxies.extend(scrape_socks4.text.strip().split('\n')) - if optional_cooldown == "y" or optional_cooldown == "Y" or optional_cooldown == "yes" or optional_cooldown == "Yes" or optional_cooldown == "YES": - time.sleep(int(cooldown)) - else: - pass - except requests.exceptions.RequestException as e: - # Handle any errors that occur during the request - Write.Print(f"[{timenow}] | [!] Error scraping proxies from {url}: {e}\n", Colors.red_to_yellow, interval=0) -set_title(f"Bloody Proxy Scraper v{__version__} | Scraped SOCKS4 Proxies!") -Write.Print(f"[{timenow}] | [?] Scraped SOCKS4 Proxies!\n", Colors.green_to_white, interval=0) -Write.Print("------------------------------------------------------------------------------------------------------------------------\n", Colors.rainbow, interval=0) -if notifications: - toast.show_toast(f"Bloody Proxy Scraper v{__version__}", - "Scraped SOCKS4 Proxies!", - icon_path="data\icons\logo.ico", - duration=1) -else: - pass -time.sleep(1) - -# Scrape SOCKS5 proxies from their sources -socks5_proxies = [] -for url in socks5_urls: - try: - scrape_socks5 = requests.get(url) - # Process the scraped proxies here - set_title(f"Bloody Proxy Scraper v{__version__} | Scraped SOCKS5 Proxies from {url}!") - Write.Print(f"\n[{timenow}] | [?] Scraped SOCKS5 Proxies from {url}\n", Colors.green_to_blue, interval=0) - socks5_proxies.extend(scrape_socks5.text.strip().split('\n')) - if optional_cooldown == "y" or optional_cooldown == "Y" or optional_cooldown == "yes" or optional_cooldown == "Yes" or optional_cooldown == "YES": - time.sleep(int(cooldown)) - else: - pass - except requests.exceptions.RequestException as e: - # Handle any errors that occur during the request - Write.Print(f"[{timenow}] | [!] Error scraping proxies from {url}: {e}\n", Colors.red_to_yellow, interval=0) -set_title(f"Bloody Proxy Scraper v{__version__} | Scraped SOCKS5 Proxies!") -Write.Print(f"[{timenow}] | [?] Scraped SOCKS5 Proxies!\n", Colors.green_to_white, interval=0) -Write.Print("------------------------------------------------------------------------------------------------------------------------\n", Colors.rainbow, interval=0) -if notifications: - toast.show_toast(f"Bloody Proxy Scraper v{__version__}", - "Scraped SOCKS5 Proxies!", - icon_path="data\icons\logo.ico", - duration=1) -else: - pass -time.sleep(1) - -# Scrape ALL proxies from their sources -all_proxies = [] -for url in all_urls: - try: - scrape_all = requests.get(url) - # Process the scraped proxies here - set_title(f"Bloody Proxy Scraper v{__version__} | Scraped ALL Proxies from {url}!") - Write.Print(f"\n[{timenow}] | [?] Scraped ALL Proxies from {url}\n", Colors.green_to_blue, interval=0) - all_proxies.extend(scrape_all.text.strip().split('\n')) - if optional_cooldown == "y" or optional_cooldown == "Y" or optional_cooldown == "yes" or optional_cooldown == "Yes" or optional_cooldown == "YES": - time.sleep(int(cooldown)) - else: - pass - except requests.exceptions.RequestException as e: - # Handle any errors that occur during the request - Write.Print(f"[{timenow}] | [!] Error scraping proxies from {url}: {e}\n", Colors.red_to_yellow, interval=0) -set_title(f"Bloody Proxy Scraper v{__version__} | Scraped ALL Proxies!") -Write.Print(f"[{timenow}] | [?] Scraped ALL Proxies!\n", Colors.green_to_white, interval=0) -Write.Print("------------------------------------------------------------------------------------------------------------------------\n", Colors.rainbow, interval=0) -if notifications: - toast.show_toast(f"Bloody Proxy Scraper v{__version__}", - "Scraped ALL Proxies!", - icon_path="data\icons\logo.ico", - duration=1) -else: - pass -time.sleep(1) - -set_title(f"Bloody Proxy Scraper v{__version__} | Finished Scraping Proxies!") -Write.Print(f"[{timenow}] | [!] Finished Scraping Proxies!\n", Colors.green_to_white, interval=0) -Write.Print("------------------------------------------------------------------------------------------------------------------------\n", Colors.rainbow, interval=0) -time.sleep(1) -set_title(f"Bloody Proxy Scraper v{__version__} | Writing Proxies . . .") -Write.Print(f"[{timenow}] | [?] Writing Proxies In Files . . .\n\n", Colors.red_to_yellow, interval=0) -if notifications: - toast.show_toast(f"Bloody Proxy Scraper v{__version__}", - "Writing Proxies...", - icon_path="data\icons\logo.ico", - duration=1) -else: - pass -time.sleep(1) - -# Writing Proxies In Their Files - -# Write HTTP(s) proxies to file -with open('proxies-http.txt','wb') as http: - for proxy in http_proxies: - http.write(proxy.encode('utf-8') + b'\n') -Write.Print(f"[{timenow}] | [?] Wrote HTTP Proxies!\n", Colors.red_to_yellow, interval=0) -time.sleep(0.3) - -# Write SOCKS4 proxies to file -with open('proxies-socks4.txt','wb') as socks4: - for proxy in socks4_proxies: - socks4.write(proxy.encode('utf-8') + b'\n') -Write.Print(f"[{timenow}] | [?] Wrote SOCKS4 Proxies!\n", Colors.red_to_yellow, interval=0) -time.sleep(0.3) - -# Write SOCKS5 proxies to file -with open('proxies-socks5.txt','wb') as socks5: - for proxy in socks5_proxies: - socks5.write(proxy.encode('utf-8') + b'\n') -Write.Print(f"[{timenow}] | [?] Wrote SOCKS5 Proxies!\n", Colors.red_to_yellow, interval=0) -time.sleep(0.3) - -with open('proxies-all.txt','wb') as allp: - for proxy in all_proxies: - allp.write(proxy.encode('utf-8') + b'\n') -Write.Print(f"[{timenow}] | [?] Wrote ALL Proxies!\n", Colors.red_to_yellow, interval=0) -time.sleep(0.3) - -set_title(f"Bloody Proxy Scraper v{__version__} | Wrote Proxies!") -Write.Print(f"[{timenow}] | [!] Finished Writing Proxies In Files!\n", Colors.green_to_white, interval=0) -time.sleep(0.5) -set_title(f"Bloody Proxy Scraper v{__version__} | Closing Files . . .") -Write.Print(f"[{timenow}] | [?] Closing Files . . .\n", Colors.red_to_yellow, interval=0) -# Closing Files -http.close() -socks4.close() -socks5.close() -allp.close() -time.sleep(0.5) - -# Done! -cls() -time.sleep(0.3) -if notifications: - toast.show_toast(f"Bloody Proxy Scraper v{__version__}", - "Finished!", - icon_path="data\icons\logo.ico", - duration=1) -else: - pass -Write.Print("------------------------------------------------------------------------------------------------------------------------", Colors.purple_to_blue, interval=0) -print(Colorate.Horizontal(Colors.purple_to_blue, Center.XCenter(banner))) -Write.Print("------------------------------------------------------------------------------------------------------------------------", Colors.purple_to_blue, interval=0) -Write.Print(f"[{timenow}] | [!] Successfully Scraped And Saved Proxies!\n\n", Colors.green_to_white, interval=0) -Write.Print("Thanks for using my tools <3\n", Colors.red_to_yellow, interval=0.1) -Write.Print("Press any key to continue . . .\n", Colors.green_to_white, interval=0) -pause() diff --git a/README.md b/README.md index 93b381d..6ceb4c4 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@

Welcome to Bloody Proxy Scraper 👋

- Version + Version

> Bloody Proxy Scraper is a requests-based proxy scraper that scrapes proxies from 80+ urls. Scraping over 220k+ proxies in less than 1 minute! @@ -15,6 +15,8 @@ pip install -r requirements.txt ## Usage +First, edit the settings in config.json to your preference. + ```sh -python Bloody Proxy Scraper V2.py +python main.py ``` diff --git a/config.json b/config.json index d9cf68d..b3458a5 100644 --- a/config.json +++ b/config.json @@ -1,4 +1,5 @@ { - "autoupdate": true, - "notifications": false -} + "cooldown_per_scrape": 0, + "check_proxies": true, + "proxy_checking_threads": 50 +} \ No newline at end of file diff --git a/data/icons/logo.ico b/data/icons/logo.ico deleted file mode 100644 index f717f20..0000000 Binary files a/data/icons/logo.ico and /dev/null differ diff --git a/data/plugins/antiskid.py b/data/plugins/antiskid.py deleted file mode 100644 index f5a9df9..0000000 --- a/data/plugins/antiskid.py +++ /dev/null @@ -1,164 +0,0 @@ -# For skids -import re -import os -import time -import base64 -import psutil -import requests -import subprocess - -from pystyle import Write, Colors, Box, Center, Colorate, Anime, Add -from colorama import Fore - -skull = """ - :::!~!!!!!:. - .xUHWH!! !!?M88WHX:. - .X*#M@$!! !X!M$$$$$$WWx:. - :!!!!!!?H! :!$!$$$$$$$$$$8X: - !!~ ~:~!! :~!$!#$$$$$$$$$$8X: - :!~::!H!< ~.U$X!?R$$$$$$$$MM! - ~!~!!!!~~ .:XW$$$U!!?$$$$$$RMM! - !:~~~ .:!M"T#$$$$WX??#MRRMMM! - ~?WuxiW*` `"#$$$$8!!!!??!!! - :X- M$$$$ `"T#$T~!8$WUXU~ - :%` ~#$$$m: ~!~ ?$$$$$$ - :!`.- ~T$$$$8xx. .xWW- ~""##*" -..... -~~:<` ! ~?T#$$@@W@*?$$ /` -W$@@M!!! .!~~ !! .:XUW$W!~ `"~: : -#"~~`.:x%`!! !H: !WM$$$$Ti.: .!WUn+!` -:::~:!!`:X~ .: ?H.!u "$$$B$$$!W:U!T$$M~ -.~~ :X@!.-~ ?@WTWo("*$$$W$TH$! ` -Wi.~!X$?!-~ : ?$$$B$Wu("**$RM! -$R@i.~~ ! : ~$$$$$B$$en:`` -?MXT@Wx.~ : ~"##*$$$$M~"""[:-1] - -antiSkid_text = """ - ___ __ _ _____ __ _ __ - / | ____ / /_(_) / ___// /__(_)___/ / - / /| | / __ \/ __/ /_____\__ \/ //_/ / __ / - / ___ |/ / / / /_/ /_____/__/ / ,< / / /_/ / -/_/ |_/_/ /_/\__/_/ /____/_/|_/_/\__,_/ - - - - Initiating . . . -"""[1:] -antiSkid = Add.Add(skull, antiSkid_text, center=True) - -def AntiSkid(): - os.system('cls' if os.name == 'nt' else 'clear') - Anime.Fade(Center.YCenter(antiSkid), Colors.purple_to_blue, Colorate.Vertical, interval=0.025, time=2) - time.sleep(2) - os.system('cls' if os.name == 'nt' else 'clear') - - # Get Username - username = os.getenv("UserName") - - # Get Hostname - hostname = os.getenv("COMPUTERNAME") - - # Get HWID - hwid = str(subprocess.check_output('wmic csproduct get uuid'), 'utf-8').split('\n')[1].strip() - - # Get MAC Address - interface, addrs = next(iter(psutil.net_if_addrs().items())) - mac = addrs[0].address - - # GET OS - computer_os = subprocess.run('wmic os get Caption', capture_output=True, shell=True).stdout.decode(errors='ignore').strip().splitlines()[2].strip() - - # Get CPU - cpu = subprocess.run(["wmic", "cpu", "get", "Name"], capture_output=True, text=True).stdout.strip().split('\n')[2] - - # Get GPU - gpu = subprocess.run("wmic path win32_VideoController get name", capture_output=True, shell=True).stdout.decode(errors='ignore').splitlines()[2].strip() - - # Get RAM - ram = str(int(int(subprocess.run('wmic computersystem get totalphysicalmemory', capture_output=True, shell=True).stdout.decode(errors='ignore').strip().split()[1]) / 1000000000)) - - # Get IP and Information - ip = requests.get('https://api.ipify.org').text - headers = { - 'authority': 'ipinfo.io', - 'accept': '*/*', - 'accept-language': 'en-GB,en-US;q=0.9,en;q=0.8', - 'content-type': 'application/json', - 'referer': 'https://ipinfo.io/', - 'sec-ch-ua': '".Not/A)Brand";v="99", "Google Chrome";v="103", "Chromium";v="103"', - 'sec-ch-ua-mobile': '?0', - 'sec-ch-ua-platform': '"Windows"', - 'sec-fetch-dest': 'empty', - 'sec-fetch-mode': 'cors', - 'sec-fetch-site': 'same-origin', - 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36', - } - r = requests.get(f'https://ipinfo.io/widget/demo/{ip}', headers=headers) - ip = r.json()['data']['ip'] - city = r.json()['data']['city'] - region = r.json()['data']['region'] - country = r.json()['data']['country'] - timezone = r.json()['data']['timezone'] - address = r.json()['data']['abuse']['address'] - country = r.json()['data']['abuse']['country'] - - print(f"""{Fore.RED}[{Fore.RESET}PC Information{Fore.RED}]{Fore.RESET} - -{Fore.GREEN}[{Fore.RESET}USERNAME{Fore.GREEN}]{Fore.RESET} {username} -{Fore.GREEN}[{Fore.RESET}DEVICE NAME{Fore.GREEN}]{Fore.RESET} {hostname} -{Fore.GREEN}[{Fore.RESET}OPERATING SYSTEM{Fore.GREEN}]{Fore.RESET} {computer_os} - -{Fore.GREEN}[{Fore.RESET}MAC{Fore.GREEN}]{Fore.RESET} {mac} -{Fore.GREEN}[{Fore.RESET}CPU{Fore.GREEN}]{Fore.RESET} {cpu} -{Fore.GREEN}[{Fore.RESET}GPU{Fore.GREEN}]{Fore.RESET} {gpu} -{Fore.GREEN}[{Fore.RESET}RAM{Fore.GREEN}]{Fore.RESET} {ram}GB -{Fore.GREEN}[{Fore.RESET}HWID{Fore.GREEN}]{Fore.RESET} {hwid} - -{Fore.RED}[{Fore.RESET}IP Information{Fore.RED}]{Fore.RESET} - -{Fore.GREEN}[{Fore.RESET}IP{Fore.GREEN}]{Fore.RESET} {ip} -{Fore.GREEN}[{Fore.RESET}CITY{Fore.GREEN}]{Fore.RESET} {city} -{Fore.GREEN}[{Fore.RESET}REGION{Fore.GREEN}]{Fore.RESET} {region} -{Fore.GREEN}[{Fore.RESET}COUNTRY{Fore.GREEN}]{Fore.RESET} {country} -{Fore.GREEN}[{Fore.RESET}TIMEZONE{Fore.GREEN}]{Fore.RESET} {timezone} -{Fore.GREEN}[{Fore.RESET}ADDRESS{Fore.GREEN}]{Fore.RESET} {address} -""") - time.sleep(3) - Write.Print(f"Writing Information . . .\n\n", Colors.purple_to_blue, interval=0.025) - data = f"""[ PC Information ] - -[ USERNAME ] {username} -[ DEVICE NAME ] {hostname} -[ OPERATING SYSTEM ] {computer_os} -[ MAC ADDRESS ] {mac} -[ CPU ] {cpu} -[ GPU ] {gpu} -[ RAM ] {ram} -[ HWID ] {hwid} - -[ IP Information ] - -[ IP ] {ip} -[ CITY ] {city} -[ REGION ] {region} -[ COUNTRY ] {country} -[ TIMEZONE ] {timezone} -[ ADDRESS ] {address} (quite innacurate) -""" - # Encode "data" into ascii then base64 encode it and write it to file - data_ascii = data.encode('ascii') - data_encoded = base64.b64encode(data_ascii) - with open(f"skid-detection_{username}.txt", "wb") as f: - f.write(data_encoded) - # This writes the base64 encoded data to the file. If you don't want the encoded data then just remove the # from the line below - # f.write(data.encode('utf-8')) - f.close - - time.sleep(2) - Write.Print("Uploading information to database . . .\n", Colors.purple_to_blue, interval=0.025) - time.sleep(2) - os.remove(f"skid-detection_{username}.txt") - Write.Print("Finished uploading information to database\n", Colors.purple_to_blue, interval=0.025) - Write.Print("Don't skid\n", Colors.purple_to_blue, interval=0.025) - Write.Print("Press any key to continue . . .", Colors.purple_to_blue, interval=0.025) - os.system('pause >nul') - exit diff --git a/data/plugins/common.py b/data/plugins/common.py deleted file mode 100644 index 9a9b9c4..0000000 --- a/data/plugins/common.py +++ /dev/null @@ -1,96 +0,0 @@ -import os -import time -import json -import base64 -import AutoUpdate - -from pystyle import Write, Colors - -__version__ = "2.0.9" -qotls = ["When the joke is a true fact/statement.", - "In the middle of every difficulty, lies opportunity.", - "If I'm the wire, can you be my socket <3", - "Dating 2 short girls isn't cheating, because half + half = 1", - "One does what he is told, never told what he does.", -] - -# Clean and Pause variables -def cls(): - os.system('cls') - -def pause(): - os.system('pause >nul') - -# Updaters -def MainUpdater(): - AutoUpdate.set_url("https://github.com/NoobToolzz/Bloody-Proxy-Scraper/blob/main/data/version.txt") - AutoUpdate.set_download_link("https://raw.githubusercontent.com/NoobToolzz/Bloody-Proxy-Scraper/main/Bloody%20Proxy%20Scraper%20V2.py") - AutoUpdate.set_current_version(__version__) - - if not AutoUpdate.is_up_to_date(): - AutoUpdate.download("Bloody Proxy Scraper V2.py") - -def CommonUpdater(): - AutoUpdate.set_url("https://github.com/NoobToolzz/Bloody-Proxy-Scraper/blob/main/data/version.txt") - AutoUpdate.set_download_link("https://raw.githubusercontent.com/NoobToolzz/Bloody-Proxy-Scraper/main/data/plugins/common.py") - AutoUpdate.set_current_version(__version__) - - if not AutoUpdate.is_up_to_date(): - AutoUpdate.download("common.py") - - os.remove("data/plugins/common.py") - os.rename("common.py", "data/plugins/common.py") - -def SkidUpdater(): - AutoUpdate.set_url("https://github.com/NoobToolzz/Bloody-Proxy-Scraper/blob/main/data/version.txt") - AutoUpdate.set_download_link("https://raw.githubusercontent.com/NoobToolzz/Bloody-Proxy-Scraper/main/data/plugins/antiskid.py") - AutoUpdate.set_current_version(__version__) - - if not AutoUpdate.is_up_to_date(): - AutoUpdate.download("antiskid.py") - - # directory = os.getcwd() - os.remove("data/plugins/antiskid.py") - os.rename("antiskid.py", "data/plugins/antiskid.py") - -# Check if version.txt exists because you don't need it, it's needed for updating -def VersionFileRemover(): - versionfile = "data/version.txt" - if os.path.isfile(versionfile): - Write.Print("Detected version.txt, deleting (not needed on local pc)\n", Colors.red_to_yellow, interval=0) - time.sleep(2) - os.remove(versionfile) - Write.Print("Deleted version.txt\n", Colors.green_to_blue, interval=0) - time.sleep(1) - os.system('cls' if os.name == 'nt' else 'clear') - else: - pass - -def PythonInstallerDeleter(): - pythoninstaller = "tpython_installer.bat" - if os.path.isfile(pythoninstaller): - Write.Print("Detected Python installer, deleting (you already have Python)\n", Colors.red_to_yellow, interval=0) - time.sleep(2) - os.remove(pythoninstaller) - Write.Print("Deleted Python installer\n", Colors.green_to_blue, interval=0) - time.sleep(1) - os.system('cls' if os.name == 'nt' else 'clear') - else: - pass - -def ChangelogsUpdater(): - changelogs = f"""Bloody Proxy Scraper v{__version__} Changelogs - -Additions / Changes: -- Banner ASCII update -- Config update - Takes "true" and "True" -- Scrape cooldown (optional) - No decimals -- Most prints show time - Format H:M:S -- Shows current version instead of "V2" - -Deletions: -- No deletions this update. -""" - with open(f'data/changelogs-v{__version__}.txt', 'wb') as f: - f.write(changelogs.encode('utf-8')) - f.close diff --git a/data/proxyscraper.py b/data/proxyscraper.py new file mode 100644 index 0000000..907ea5f --- /dev/null +++ b/data/proxyscraper.py @@ -0,0 +1,221 @@ +import os +import re +import json +import time +import requests +import datetime +import concurrent.futures + +from rich import print +from pathlib import Path +from rich.live import Live +from rich.panel import Panel +from rich.prompt import Prompt +from rich.console import Console +from rich.progress import Progress +from data.sources import http_urls, socks4_urls, socks5_urls, all_urls +from rich.progress import Progress, SpinnerColumn, BarColumn, TextColumn + + +class Functions: + @staticmethod + def clean_up_cache(): + for p in Path(".").rglob("*.py[co]"): + p.unlink() + for p in Path(".").rglob("__pycache__"): + p.rmdir() + + +class ProxyScraper: + def __init__(self): + self.version = "2.1.0" + self.console = Console() + self.config = self.load_config() + self.banner = self.create_banner() + + def load_config(self): + try: + config_path = Path(__file__).parent.parent / "config.json" + with open(config_path, "r", encoding="utf-8") as config_file: + return json.load(config_file) + except (FileNotFoundError, json.JSONDecodeError, KeyError) as e: + print(f"[bold red]Error loading configuration: {e}") + print("[yellow]Please ensure config.json exists and is properly formatted.") + exit(1) + + def create_banner(self): + return f""" +[bold purple]██████╗ ██████╗ ███████╗[/bold purple] +[bold purple]██╔══██╗██╔══██╗██╔════╝[/bold purple] +[bold purple]██████╔╝██████╔╝███████╗[/bold purple] +[bold purple]██╔══██╗██╔═══╝ ╚════██║[/bold purple] +[bold purple]██████╔╝██║ ███████║[/bold purple] +[bold purple]╚═════╝ ╚═╝ ╚══════╝[/bold purple] v{self.version} +""" + + def get_cooldown(self): + optional_cooldown = Prompt.ask( + "[bold cyan]Do you want a cooldown between each scrape? (y/n)" + ) + if optional_cooldown.lower() == "y": + cooldown_input = Prompt.ask( + "[bold cyan]How much do you want the cooldown to be? (in seconds)" + ) + cooldown = int(cooldown_input.replace(".", "")) + print(f"[green]Cooldown set to: {cooldown} seconds") + return cooldown + return 0 + + def scrape_proxies(self, urls, proxy_type, cooldown=0): + proxies = [] + cooldown_used = False + with Progress() as progress: + task = progress.add_task( + f"[cyan]Scraping {proxy_type} proxies...", total=len(urls) + ) + for url in urls: + try: + response = requests.get(url, timeout=10) + proxies.extend(response.text.strip().split("\n")) + progress.update(task, advance=1) + if cooldown > 0: + time.sleep(cooldown) + cooldown_used = True + except requests.RequestException: + pass + return proxies, cooldown_used + + def scrape_all_proxies(self): + print("\n[bold yellow]Scraping Proxies...") + cooldown = self.config["cooldown_per_scrape"] + http_proxies, http_cooldown = self.scrape_proxies( + http_urls, "HTTP(S)", cooldown + ) + socks4_proxies, socks4_cooldown = self.scrape_proxies( + socks4_urls, "SOCKS4", cooldown + ) + socks5_proxies, socks5_cooldown = self.scrape_proxies( + socks5_urls, "SOCKS5", cooldown + ) + all_proxies, all_cooldown = self.scrape_proxies(all_urls, "ALL", cooldown) + + return { + "http": (http_proxies, http_cooldown), + "socks4": (socks4_proxies, socks4_cooldown), + "socks5": (socks5_proxies, socks5_cooldown), + "all": (all_proxies, all_cooldown), + } + + def write_proxies(self, filename, proxies): + root_dir = Path(__file__).parent.parent + scraped_dir = root_dir / "Scraped" + scraped_dir.mkdir(exist_ok=True) + + now = datetime.datetime.now() + subfolder = now.strftime("[%Y-%m-%d] [%H-%M]") + folder_path = scraped_dir / subfolder + folder_path.mkdir(exist_ok=True) + + # Remove duplicates + unique_proxies = list(set(proxies)) + + # Validate proxies + valid_proxies = [] + # https://stackoverflow.com/questions/18546053/how-to-perfectly-match-a-proxy-with-regex + proxy_pattern = re.compile( + r"^(?:(\w+)(?::(\w+))?@)?((?:\d{1,3})(?:\.\d{1,3}){3})(?::(\d{1,5}))?$" + ) + + for proxy in unique_proxies: + if proxy_pattern.match(proxy): + valid_proxies.append(proxy) + + file_path = folder_path / filename + with open(file_path, "w", encoding="utf-8") as f: + f.write("\n".join(valid_proxies)) + + print(f"[green]Wrote {len(valid_proxies)} valid proxies to {filename}") + print(f"[yellow]Removed {len(proxies) - len(unique_proxies)} duplicate proxies") + print( + f"[yellow]Removed {len(unique_proxies) - len(valid_proxies)} invalid proxies (not IP:PORT)" + ) + + def save_proxies(self, proxies): + for proxy_type, (proxy_list, _) in proxies.items(): + self.write_proxies(f"{proxy_type}.txt", proxy_list) + + def check_proxy(self, proxy): + try: + response = requests.get( + "https://ipv4.games/claim?name=noobtoolzz", + proxies={"http": proxy, "https": proxy}, + timeout=10, + ) + return proxy if response.status_code == 200 else None + except: + return None + + def check_proxies(self, proxies): + threads = self.config["proxy_checking_threads"] + valid_proxies = [] + total_proxies = len(proxies) + + progress = Progress( + SpinnerColumn(), + "[progress.percentage]{task.percentage:>3.0f}%", + BarColumn(), + TextColumn("[bold blue]{task.completed}/{task.total} checked"), + TextColumn("[green]{task.fields[valid]} valid"), + ) + + task = progress.add_task("Checking", total=total_proxies, valid=0) + + with Live(progress, refresh_per_second=10) as live: + with concurrent.futures.ThreadPoolExecutor(max_workers=threads) as executor: + future_to_proxy = { + executor.submit(self.check_proxy, proxy): proxy for proxy in proxies + } + for future in concurrent.futures.as_completed(future_to_proxy): + result = future.result() + if result: + valid_proxies.append(result) + progress.update(task, advance=1, valid=len(valid_proxies)) + + return valid_proxies + + def run(self): + os.system("cls" if os.name == "nt" else "clear") + print(Panel(self.banner, expand=False)) + + proxies = self.scrape_all_proxies() + print("\n[bold green]Finished Scraping Proxies!") + + if self.config["check_proxies"]: + print("\n[bold yellow]Checking proxies...") + all_proxies = proxies["all"][0] + valid_proxies = self.check_proxies(all_proxies) + proxies = { + "http": (valid_proxies, False), + "socks4": (valid_proxies, False), + "socks5": (valid_proxies, False), + "all": (valid_proxies, False), + } + print( + f"[green]Found {len(valid_proxies)} valid proxies out of {len(all_proxies)}" + ) + + os.system("cls" if os.name == "nt" else "clear") + + if any(cooldown for _, cooldown in proxies.values()): + print("[yellow]Cooldown was applied during scraping.[/yellow]") + else: + print("[green]No cooldown was applied during scraping.[/green]") + + self.save_proxies(proxies) + + print("\n[bold green]Successfully Scraped and Saved Proxies!") + print( + "[cyan]Proxies are saved as http.txt, socks4.txt, socks5.txt, and all.txt" + ) + print("[cyan]in the 'Scraped/[YYYY-MM-DD] [HH-MM]' folder.") + self.console.input("[bold cyan]Press Enter to exit...[/bold cyan]") diff --git a/data/sources.py b/data/sources.py index 2290d08..668ddde 100644 --- a/data/sources.py +++ b/data/sources.py @@ -12,7 +12,7 @@ "https://www.proxy-list.download/api/v1/get?type=https", "https://www.proxyscan.io/download?type=http", "https://www.proxyscan.io/download?type=https", - "https://api.openproxylist.xyz/http.txt" + "https://api.openproxylist.xyz/http.txt", ] socks4_urls = [ @@ -24,7 +24,7 @@ "https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks4.txt", "https://www.proxy-list.download/api/v1/get?type=socks4", "https://www.proxyscan.io/download?type=socks4", - "https://api.openproxylist.xyz/socks4.txt" + "https://api.openproxylist.xyz/socks4.txt", ] socks5_urls = [ @@ -37,8 +37,7 @@ "https://raw.githubusercontent.com/hookzof/socks5_list/master/proxy.txt", "https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/socks5.txt", "https://www.proxy-list.download/api/v1/get?type=socks5", - "https://www.proxyscan.io/download?type=socks5", - "https://api.openproxylist.xyz/socks5.txt" + "https://api.openproxylist.xyz/socks5.txt", ] all_urls = [ @@ -86,5 +85,5 @@ "http://hack-hack.chat.ru/proxy/p4.txt", "http://olaf4snow.com/public/proxy.txt", "http://alexa.lr2b.com/proxylist.txt", - "http://inav.chat.ru/ftp/proxy.txt" -] \ No newline at end of file + "http://inav.chat.ru/ftp/proxy.txt", +] diff --git a/data/version.txt b/data/version.txt deleted file mode 100644 index c03bb3d..0000000 --- a/data/version.txt +++ /dev/null @@ -1 +0,0 @@ -v2.0.9 diff --git a/main.py b/main.py new file mode 100644 index 0000000..8eae22c --- /dev/null +++ b/main.py @@ -0,0 +1,11 @@ +from data.proxyscraper import ProxyScraper, Functions + + +def main(): + scraper = ProxyScraper() + scraper.run() + + +if __name__ == "__main__": + Functions.clean_up_cache() + main() diff --git a/run.bat b/run.bat deleted file mode 100644 index 8e91ad8..0000000 --- a/run.bat +++ /dev/null @@ -1,4 +0,0 @@ -@echo off - -python "Bloody Proxy Scraper V2.py" -pause \ No newline at end of file diff --git a/setup.bat b/setup.bat deleted file mode 100644 index 1da54ba..0000000 --- a/setup.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off - -pip install -r requirements.txt -echo Done! -pause \ No newline at end of file diff --git a/tpython_installer.bat b/tpython_installer.bat deleted file mode 100644 index be5b56b..0000000 --- a/tpython_installer.bat +++ /dev/null @@ -1,15 +0,0 @@ -::Not made by me, taken from addi00000/empyrean repository -::Name is tpython_installer because I want it to be at the bottom of the folder (If you assort them by file name) -@echo off - -for /f "tokens=1,2 delims= " %%a in ('powershell -Command "Invoke-WebRequest https://www.python.org/ftp/python/ -UseBasicParsing | Select-String -Pattern '3.10.[0-9]{1,2}' -AllMatches | Select-Object -ExpandProperty Matches | Select-Object -ExpandProperty Value | Sort-Object -Descending -Unique | Select-Object -First 1"') do ( - set "PYTHON_VERSION=%%a%%b" -) -set "PYTHON_URL=https://www.python.org/ftp/python/%PYTHON_VERSION%/python-%PYTHON_VERSION%-amd64.exe" -set "PYTHON_EXE=python-installer.exe" - -curl -L -o %PYTHON_EXE% %PYTHON_URL% - -start /wait %PYTHON_EXE% /quiet /passive InstallAllUsers=0 PrependPath=1 Include_test=0 Include_pip=1 Include_doc=0 - -del %PYTHON_EXE%