Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Release:v1.4.7 #347

Merged
merged 5 commits into from
Sep 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,17 @@

## v1.4.7

### 2024/09/23
### 2024/09/26

- 修复部分设备本地运行软件 driver 问题(#335)
- 修复 driver 模式下新版谷歌浏览器白屏问题
- 优化对历史结果的测速
- 优化测速进度条显示

- Fix some issues with local software driver operation on certain devices (#335)
- Fix the white screen issue with the new version of Google Chrome in driver mode
- Optimize the speed measurement of historical results
- Optimize speed test progress bar display

## v1.4.6

Expand Down
2 changes: 1 addition & 1 deletion docs/config.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
| 配置项 | 默认值 | 描述 |
| ----------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- |
| open_update | True | 开启更新,若关闭则只运行结果页面服务 |
| open_use_old_result | True | 开启使用历史更新结果,合并至本次更新中 |
| open_use_old_result | True | 开启使用历史更新结果(包含模板与结果文件的接口),合并至本次更新中 |
| open_driver | True | 开启浏览器运行,若更新无数据可开启此模式,较消耗性能 |
| open_proxy | False | 开启代理,自动获取免费可用代理,若更新无数据可开启此模式 |
| source_file | config/demo.txt | 模板文件路径 |
Expand Down
66 changes: 33 additions & 33 deletions docs/config_en.md

Large diffs are not rendered by default.

21 changes: 10 additions & 11 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
get_ip_address,
convert_to_m3u,
get_result_file_content,
process_nested_dict,
)
from updates.subscribe import get_channels_by_subscribe_urls
from updates.multicast import get_channels_by_multicast
Expand All @@ -30,6 +31,7 @@
import shutil
import atexit
import pickle
import copy

app = Flask(__name__)

Expand Down Expand Up @@ -121,15 +123,12 @@ def pbar_update(self, name=""):
)

def get_urls_len(self, filter=False):
def process_cache_url(url):
if filter and "$cache:" in url:
cache_part = url.split("$cache:", 1)[1]
return cache_part.split("?")[0]
return url

data = copy.deepcopy(self.channel_data)
if filter:
process_nested_dict(data, seen=set(), flag="$cache:")
processed_urls = set(
process_cache_url(url_info[0])
for channel_obj in self.channel_data.values()
url_info[0]
for channel_obj in data.values()
for url_info_list in channel_obj.values()
for url_info in url_info_list
)
Expand All @@ -146,7 +145,7 @@ async def main(self):
await self.visit_page(channel_names)
self.tasks = []
channel_items_obj_items = self.channel_items.items()
self.channel_data = append_total_data(
append_total_data(
channel_items_obj_items,
self.channel_data,
self.hotel_fofa_result,
Expand All @@ -155,7 +154,7 @@ async def main(self):
self.subscribe_result,
self.online_search_result,
)
channel_data_cache = self.channel_data
channel_data_cache = copy.deepcopy(self.channel_data)
self.total = self.get_urls_len(filter=True)
sort_callback = lambda: self.pbar_update(name="测速")
open_sort = config.getboolean("Settings", "open_sort")
Expand Down Expand Up @@ -190,7 +189,7 @@ async def main(self):
shutil.copy(user_final_file, result_file)
if config.getboolean("Settings", "open_use_old_result"):
if open_sort:
channel_data_cache = get_channel_data_cache_with_compare(
get_channel_data_cache_with_compare(
channel_data_cache, self.channel_data
)
with open(resource_path("output/result_cache.pkl"), "wb") as file:
Expand Down
Binary file modified updates/fofa/fofa_hotel_region_result.pkl
Binary file not shown.
Binary file modified updates/fofa/fofa_multicast_region_result.pkl
Binary file not shown.
5 changes: 3 additions & 2 deletions updates/multicast/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
get_channel_multicast_result,
get_multicast_fofa_search_urls,
)
from utils.tools import get_pbar_remaining, get_soup
from utils.tools import get_pbar_remaining, get_soup, merge_objects
from utils.config import config
from updates.proxy import get_proxy, get_proxy_next
from updates.fofa import get_channels_by_fofa
Expand Down Expand Up @@ -49,9 +49,10 @@ async def get_channels_by_multicast(names, callback=None):
search_region_type_result = defaultdict(lambda: defaultdict(list))
if open_multicast_fofa:
fofa_search_urls = get_multicast_fofa_search_urls()
search_region_type_result = await get_channels_by_fofa(
fofa_result = await get_channels_by_fofa(
fofa_search_urls, multicast=True, callback=callback
)
merge_objects(search_region_type_result, fofa_result)

def process_channel_by_multicast(region, type):
nonlocal proxy, open_driver, page_num, start_time
Expand Down
89 changes: 60 additions & 29 deletions utils/channel.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,14 @@
check_url_by_patterns,
get_total_urls_from_info_list,
check_ipv6_support,
process_nested_dict,
)
from utils.speed import (
sort_urls_by_speed_and_resolution,
is_ffmpeg_installed,
format_url,
speed_cache,
)
from utils.speed import sort_urls_by_speed_and_resolution, is_ffmpeg_installed
import os
from collections import defaultdict
import re
Expand All @@ -15,6 +21,7 @@
import asyncio
import base64
import pickle
import copy

log_dir = "output"
log_file = "result_new.log"
Expand Down Expand Up @@ -479,28 +486,26 @@ def init_info_data(data, cate, name):
data[cate] = {}
if data[cate].get(name) is None:
data[cate][name] = []
return data


def append_data_to_info_data(info_data, cate, name, data, check=True):
"""
Append channel data to total info data
"""
info_data = init_info_data(info_data, cate, name)
init_info_data(info_data, cate, name)
for url, date, resolution in data:
if (url and not check) or (url and check and check_url_by_patterns(url)):
info_data[cate][name].append((url, date, resolution))
return info_data


def append_total_data(*args, **kwargs):
"""
Append total channel data
"""
if config.getboolean("Settings", "open_keep_all"):
return append_all_method_data_keep_all(*args, **kwargs)
append_all_method_data_keep_all(*args, **kwargs)
else:
return append_all_method_data(*args, **kwargs)
append_all_method_data(*args, **kwargs)


def append_all_method_data(
Expand Down Expand Up @@ -530,7 +535,7 @@ def append_all_method_data(
) and config.getboolean("Settings", f"open_hotel") == False:
continue
name_results = get_channel_results_by_name(name, result)
data = append_data_to_info_data(
append_data_to_info_data(
data,
cate,
name,
Expand All @@ -545,7 +550,7 @@ def append_all_method_data(
if total_channel_data_len == 0 or config.getboolean(
"Settings", "open_use_old_result"
):
data = append_data_to_info_data(
append_data_to_info_data(
data,
cate,
name,
Expand All @@ -557,7 +562,6 @@ def append_all_method_data(
"total num:",
len(data.get(cate, {}).get(name, [])),
)
return data


def append_all_method_data_keep_all(
Expand Down Expand Up @@ -586,18 +590,17 @@ def append_all_method_data_keep_all(
) and config.getboolean("Settings", f"open_hotel") == False:
continue
for name, urls in result.items():
data = append_data_to_info_data(data, cate, name, urls)
append_data_to_info_data(data, cate, name, urls)
print(name, f"{method.capitalize()} num:", len(urls))
if config.getboolean("Settings", "open_use_old_result"):
old_info_list = channel_obj.get(name, [])
data = append_data_to_info_data(
append_data_to_info_data(
data,
cate,
name,
old_info_list,
)
print(name, "using old num:", len(old_info_list))
return data


async def sort_channel_list(
Expand All @@ -622,10 +625,7 @@ async def sort_channel_list(
logging.info(
f"Name: {name}, URL: {url}, Date: {date}, Resolution: {resolution}, Response Time: {response_time} ms"
)
data = [
(url, date, resolution)
for (url, date, resolution), _ in sorted_data
]
data.append((url, date, resolution))
except Exception as e:
logging.error(f"Error: {e}")
finally:
Expand All @@ -639,16 +639,18 @@ async def process_sort_channel_list(data, callback=None):
open_ffmpeg = config.getboolean("Settings", "open_ffmpeg")
ipv_type = config.get("Settings", "ipv_type").lower()
open_ipv6 = "ipv6" in ipv_type or "all" in ipv_type or "全部" in ipv_type
ipv6_proxy = None
if open_ipv6:
ipv6_proxy = (
None if check_ipv6_support() else "http://www.ipv6proxy.net/go.php?u="
)
ipv6_proxy = (
None
if not open_ipv6 or check_ipv6_support()
else "http://www.ipv6proxy.net/go.php?u="
)
ffmpeg_installed = is_ffmpeg_installed()
if open_ffmpeg and not ffmpeg_installed:
print("FFmpeg is not installed, using requests for sorting.")
is_ffmpeg = open_ffmpeg and ffmpeg_installed
semaphore = asyncio.Semaphore(3)
need_sort_data = copy.deepcopy(data)
process_nested_dict(need_sort_data, seen=set(), flag="$cache:")
tasks = [
asyncio.create_task(
sort_channel_list(
Expand All @@ -661,18 +663,48 @@ async def process_sort_channel_list(data, callback=None):
callback=callback,
)
)
for cate, channel_obj in data.items()
for cate, channel_obj in need_sort_data.items()
for name, info_list in channel_obj.items()
]
sort_results = await asyncio.gather(*tasks)
data = {}
sort_data = {}
for result in sort_results:
if result:
cate = result.get("cate")
name = result.get("name")
result_data = result.get("data")
data = append_data_to_info_data(data, cate, name, result_data, False)
return data
cate, name, result_data = result["cate"], result["name"], result["data"]
append_data_to_info_data(sort_data, cate, name, result_data, False)
for cate, obj in data.items():
for name, info_list in obj.items():
sort_info_list = sort_data.get(cate, {}).get(name, [])
sort_urls = {
sort_url[0].split("$")[0]
for sort_url in sort_info_list
if sort_url and sort_url[0]
}
for url, date, resolution in info_list:
url_rsplit = url.rsplit("$cache:", 1)
if len(url_rsplit) != 2:
continue
url, cache_key = url_rsplit
if url in sort_urls or cache_key not in speed_cache:
continue
cache = speed_cache[cache_key]
if not cache:
continue
response_time, resolution = cache
if response_time and response_time != float("inf"):
if resolution:
url = format_url(url, resolution)
append_data_to_info_data(
sort_data,
cate,
name,
[(url, date, resolution)],
False,
)
logging.info(
f"Name: {name}, URL: {url}, Date: {date}, Resolution: {resolution}, Response Time: {response_time} ms"
)
return sort_data


def write_channel_to_file(items, data, callback=None):
Expand Down Expand Up @@ -752,4 +784,3 @@ def match_url(url, sort_urls):
data[cate][name] = [
info for info in data[cate][name] if match_url(info[0], new_urls)
]
return data
29 changes: 19 additions & 10 deletions utils/speed.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
from time import time
import asyncio
import re
from urllib.parse import quote
from utils.config import config
from utils.tools import is_ipv6
import subprocess
Expand Down Expand Up @@ -107,14 +106,23 @@ async def check_stream_speed(url_info):
if frame is None or frame == float("inf"):
return float("inf")
if resolution:
url_info[0] = url_info[0] + f"${resolution}"
url_info[0] = format_url(url, resolution)
url_info[2] = resolution
return (tuple(url_info), frame)
except Exception as e:
print(e)
return float("inf")


def format_url(url, info):
"""
Format the url
"""
separator = "|" if "$" in url else "$"
url += f"{separator}{info}"
return url


speed_cache = {}


Expand All @@ -125,36 +133,37 @@ async def get_speed_by_info(
Get the info with speed
"""
async with semaphore:
url, _, _ = url_info
url, _, resolution = url_info
url_info = list(url_info)
cache_key = None
if "$" in url:
url, cache_info = url.split("$", 1)
url, cache_info = url.rsplit("$", 1)
if "cache:" in cache_info:
cache_key = cache_info.replace("cache:", "")
url = quote(url, safe=":/?&=$[]")
url_is_ipv6 = is_ipv6(url)
if url_is_ipv6:
url = format_url(url, "IPv6")
url_info[0] = url
if cache_key in speed_cache:
speed = speed_cache[cache_key]
speed = speed_cache[cache_key][0]
url_info[2] = speed_cache[cache_key][1]
return (tuple(url_info), speed) if speed != float("inf") else float("inf")
try:
url_is_ipv6 = is_ipv6(url)
if ".m3u8" not in url and ffmpeg and not url_is_ipv6:
speed = await check_stream_speed(url_info)
url_speed = speed[1] if speed != float("inf") else float("inf")
resolution = speed[0][2] if speed != float("inf") else None
else:
if ipv6_proxy and url_is_ipv6:
url = ipv6_proxy + url
url_speed = await get_speed(url)
if url_is_ipv6:
url_info[0] = url_info[0] + "$IPv6"
speed = (
(tuple(url_info), url_speed)
if url_speed != float("inf")
else float("inf")
)
if cache_key and cache_key not in speed_cache:
speed_cache[cache_key] = url_speed
speed_cache[cache_key] = (url_speed, resolution)
return speed
except Exception:
return float("inf")
Expand Down
Loading