Skip to content

Commit

Permalink
refactor: 将网络请求提到 apis 并删去 Modrinth Version slug 字段
Browse files Browse the repository at this point in the history
  • Loading branch information
z0z0r4 committed Feb 1, 2025
1 parent 7b7d242 commit 2ff74b3
Show file tree
Hide file tree
Showing 9 changed files with 371 additions and 256 deletions.
Empty file added mcim_sync/apis/__init__.py
Empty file.
61 changes: 61 additions & 0 deletions mcim_sync/apis/curseforge.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
from typing import List, Optional, Union

from mcim_sync.utils.network import request
from mcim_sync.config import Config

config = Config.load()

API = config.curseforge_api
HEADERS = {
"x-api-key": config.curseforge_api_key,
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 Edg/116.0.1938.54",
}


def get_mod_files(modId: int, index: int, pageSize: int) -> dict:
params = {"index": index, "pageSize": pageSize}
res = request(f"{API}/v1/mods/{modId}/files", headers=HEADERS, params=params).json()
return res


def get_mod(modId: int) -> dict:
res = request(f"{API}/v1/mods/{modId}", headers=HEADERS).json()["data"]
return res


def get_mutil_mods_info(modIds: List[int]):
data = {"modIds": modIds}
res = request(
method="POST", url=f"{API}/v1/mods", json=data, headers=HEADERS
).json()["data"]
return res


def get_mutil_files(fileIds: List[int]):
data = {"fileIds": fileIds}
res = request(
method="POST", url=f"{API}/v1/mods/files", json=data, headers=HEADERS
).json()["data"]
return res


def get_mutil_fingerprints(fingerprints: List[int]):
res = request(
method="POST",
url=f"{API}/v1/fingerprints/432",
headers=HEADERS,
json={"fingerprints": fingerprints},
).json()["data"]
return res


def get_categories(
gameId: int = 432, classId: Optional[int] = None, classOnly: Optional[bool] = None
) -> List[dict]:
params = {"gameId": gameId}
if classId is not None:
params["classId"] = classId
elif classOnly:
params["classOnly"] = classOnly
res = request(f"{API}/v1/categories", params=params, headers=HEADERS).json()["data"]
return res
120 changes: 120 additions & 0 deletions mcim_sync/apis/modrinth.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
from typing import List, Optional, Union
import json

from mcim_sync.utils.network import request
from mcim_sync.config import Config

config = Config.load()
API = config.modrinth_api


def get_project_all_version(
project_id: str
) -> List[dict]:
res = request(f"{API}/v2/project/{project_id}/version").json()
return res
# latest_version_id_list = []

# with ModelSubmitter() as submitter:
# for version in res:
# latest_version_id_list.append(version["id"])
# for file in version["files"]:
# file["version_id"] = version["id"]
# file["project_id"] = version["project_id"]
# file_model = File(**file)
# if config.file_cdn:
# if (
# need_to_cache
# and file_model.size <= MAX_LENGTH
# and file_model.filename
# and file_model.url
# and file_model.hashes.sha1
# ):
# submitter.add(
# FileCDN(
# url=file_model.url,
# sha1=file_model.hashes.sha1,
# size=file_model.size,
# mtime=int(time.time()),
# path=file_model.hashes.sha1,
# ) # type: ignore
# )
# file_model.file_cdn_cached = True
# submitter.add(file_model)
# submitter.add(Version(**version))

# removed_count = mongodb_engine.remove(
# Version,
# query.not_in(Version.id, latest_version_id_list),
# Version.project_id == project_id,
# )
# total_count = len(res)
# log.info(
# f"Finished sync project {project_id} versions info, total {total_count} versions, removed {removed_count} versions"
# )
# return total_count


def get_project(project_id: str) -> dict:
res = request(f"{API}/v2/project/{project_id}").json()
# with ModelSubmitter() as submitter:
# project_model = Project(**res)
# submitter.add(project_model)
# total_count = get_project_all_version(
# project_id,
# need_to_cache=project_model.project_type == "mod",
# )
# return ProjectDetail(id=project_id, name=res["slug"], version_count=total_count)

return res

def get_mutil_projects_info(project_ids: List[str]) -> List[dict]:
res = request(f"{API}/v2/projects", params={"ids": json.dumps(project_ids)}).json()
return res


def get_multi_versions_info(version_ids: List[str]) -> List[dict]:
res = request(f"{API}/v2/versions", params={"ids": json.dumps(version_ids)}).json()
return res


def get_multi_hashes_info(hashes: List[str], algorithm: str) -> dict:
res = request(
method="POST",
url=f"{API}/v2/version_files",
json={"hashes": hashes, "algorithm": algorithm},
).json()
return res


def get_categories() -> List[dict]:
# mongodb_engine.remove(Category)
# with ModelSubmitter() as submitter:
# categories = request(f"{API}/v2/tag/category").json()
# for category in categories:
# submitter.add(Category(**category))
# return categories
res = request(f"{API}/v2/tag/category").json()
return res


def get_loaders() -> List[dict]:
# mongodb_engine.remove(Loader)
# with ModelSubmitter() as submitter:
# loaders = request(f"{API}/v2/tag/loader").json()
# for loader in loaders:
# submitter.add(Loader(**loader))
# return loaders
res = request(f"{API}/v2/tag/loader").json()
return res


def get_game_versions() -> List[dict]:
# mongodb_engine.remove(GameVersion)
# with ModelSubmitter() as submitter:
# game_versions = request(f"{API}/v2/tag/game_version").json()
# for game_version in game_versions:
# submitter.add(GameVersion(**game_version))
# return game_versions
res = request(f"{API}/v2/tag/game_version").json()
return res
52 changes: 27 additions & 25 deletions mcim_sync/checker/curseforge.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
from typing import Union, List, Set
from odmantic import query
import datetime
import time

from mcim_sync.database.mongodb import sync_mongo_engine, raw_mongo_client
from mcim_sync.utils.loger import log
Expand Down Expand Up @@ -30,25 +28,26 @@
def check_curseforge_data_updated(mods: List[Mod]) -> Set[int]:
mod_date = {mod.id: {"sync_date": mod.dateModified} for mod in mods}
expired_modids: Set[int] = set()
mod_info = fetch_mutil_mods_info(modIds=[mod.id for mod in mods])
with ModelSubmitter() as submitter:
for mod in mod_info:
submitter.add(Mod(**mod))
modid = mod["id"]
mod_date[modid]["source_date"] = mod["dateModified"]
sync_date: datetime.datetime = mod_date[modid]["sync_date"].replace( # type: ignore
tzinfo=None
)
dateModified_date = datetime.datetime.fromisoformat(
mod["dateModified"]
).replace(tzinfo=None)
if int(sync_date.timestamp()) == int(dateModified_date.timestamp()):
log.debug(f"Mod {modid} is not updated, pass!")
else:
expired_modids.add(modid)
log.debug(
f"Mod {modid} is updated {sync_date.isoformat(timespec='seconds')} -> {dateModified_date.isoformat(timespec='seconds')}!"
mods_info = fetch_mutil_mods_info(modIds=[mod.id for mod in mods])
if mods_info is not None:
with ModelSubmitter() as submitter:
for mod in mods_info:
submitter.add(Mod(**mod))
modid = mod["id"]
mod_date[modid]["source_date"] = mod["dateModified"]
sync_date: datetime.datetime = mod_date[modid]["sync_date"].replace( # type: ignore
tzinfo=None
)
dateModified_date = datetime.datetime.fromisoformat(
mod["dateModified"]
).replace(tzinfo=None)
if int(sync_date.timestamp()) == int(dateModified_date.timestamp()):
log.debug(f"Mod {modid} is not updated, pass!")
else:
expired_modids.add(modid)
log.debug(
f"Mod {modid} is updated {sync_date.isoformat(timespec='seconds')} -> {dateModified_date.isoformat(timespec='seconds')}!"
)

return expired_modids

Expand All @@ -66,7 +65,8 @@ def check_curseforge_modids_available():
for i in range(0, len(modids), CURSEFORGE_LIMIT_SIZE):
chunk = modids[i : i + CURSEFORGE_LIMIT_SIZE]
info = fetch_mutil_mods_info(modIds=chunk)
available_modids.extend([mod["id"] for mod in info])
if info is not None:
available_modids.extend([mod["id"] for mod in info])
return list(set(available_modids))


Expand All @@ -82,7 +82,8 @@ def check_curseforge_fileids_available():
for i in range(0, len(fileids), CURSEFORGE_LIMIT_SIZE):
chunk = fileids[i : i + CURSEFORGE_LIMIT_SIZE]
info = fetch_mutil_files(fileIds=chunk)
available_modids.extend([file["modId"] for file in info])
if info is not None:
available_modids.extend([file["modId"] for file in info])
return list(set(available_modids))


Expand All @@ -98,9 +99,10 @@ def check_curseforge_fingerprints_available():
for i in range(0, len(fingerprints), CURSEFORGE_LIMIT_SIZE):
chunk = fingerprints[i : i + CURSEFORGE_LIMIT_SIZE]
info = fetch_mutil_fingerprints(fingerprints=chunk)
available_modids.extend(
[fingerprint["file"]["modId"] for fingerprint in info["exactMatches"]]
)
if info is not None:
available_modids.extend(
[fingerprint["file"]["modId"] for fingerprint in info["exactMatches"]]
)
return list(set(available_modids))


Expand Down
62 changes: 32 additions & 30 deletions mcim_sync/checker/modrinth.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,10 @@
from typing import Union, List, Set
from odmantic import query
import datetime
import time

from mcim_sync.database.mongodb import sync_mongo_engine, raw_mongo_client
from mcim_sync.utils.loger import log
from mcim_sync.config import Config
from mcim_sync.models.database.modrinth import Project
from mcim_sync.sync.modrinth import fetch_mutil_projects_info
from mcim_sync.utils.model_submitter import ModelSubmitter

from mcim_sync.queues.modrinth import (
Expand All @@ -34,35 +31,37 @@ def check_modrinth_data_updated(projects: List[Project]) -> Set[str]:
project.id: {"sync_date": project.updated, "versions": project.versions}
for project in projects
}
info = fetch_mutil_projects_info(project_ids=[project.id for project in projects])
expired_project_ids: Set[str] = set()
with ModelSubmitter() as submitter:
for project in info:
submitter.add(Project(**project))
project_id = project["id"]
sync_date: datetime.datetime = project_info[project_id][
"sync_date"
].replace(tzinfo=None)
project_info[project_id]["source_date"] = project["updated"]
updated_date = datetime.datetime.fromisoformat(project["updated"]).replace(
tzinfo=None
)
if int(sync_date.timestamp()) == int(updated_date.timestamp()):
if project_info[project_id]["versions"] != project["versions"]:
info = fetch_mutil_projects_info(project_ids=[project.id for project in projects])
if info is not None:
with ModelSubmitter() as submitter:
for project in info:
submitter.add(Project(**project))
project_id = project["id"]
sync_date: datetime.datetime = project_info[project_id][
"sync_date"
].replace(tzinfo=None)
project_info[project_id]["source_date"] = project["updated"]
updated_date = datetime.datetime.fromisoformat(project["updated"]).replace(
tzinfo=None
)
if int(sync_date.timestamp()) == int(updated_date.timestamp()):
if project_info[project_id]["versions"] != project["versions"]:
log.debug(
f"Project {project_id} version count is not completely equal, some version were deleted, sync it!"
)
expired_project_ids.add(project_id)
else:
log.debug(f"Project {project_id} is not updated, pass!")
else:
expired_project_ids.add(project_id)
log.debug(
f"Project {project_id} version count is not completely equal, some version were deleted, sync it!"
f"Project {project_id} is updated {sync_date.isoformat(timespec='seconds')} -> {updated_date.isoformat(timespec='seconds')}!"
)
expired_project_ids.add(project_id)
else:
log.debug(f"Project {project_id} is not updated, pass!")
else:
expired_project_ids.add(project_id)
log.debug(
f"Project {project_id} is updated {sync_date.isoformat(timespec='seconds')} -> {updated_date.isoformat(timespec='seconds')}!"
)

return expired_project_ids


# check modrinth_project_ids queue
def check_modrinth_project_ids_available():
"""
Expand All @@ -75,7 +74,8 @@ def check_modrinth_project_ids_available():
for i in range(0, len(project_ids), MODRINTH_LIMIT_SIZE):
chunk = project_ids[i : i + MODRINTH_LIMIT_SIZE]
info = fetch_mutil_projects_info(project_ids=chunk)
available_project_ids.extend([project["id"] for project in info])
if info is not None:
available_project_ids.extend([project["id"] for project in info])
return list(set(available_project_ids))


Expand All @@ -91,7 +91,8 @@ def check_modrinth_version_ids_available():
for i in range(0, len(version_ids), MODRINTH_LIMIT_SIZE):
chunk = version_ids[i : i + MODRINTH_LIMIT_SIZE]
info = fetch_multi_versions_info(version_ids=chunk)
available_project_ids.extend([version["project_id"] for version in info])
if info is not None:
available_project_ids.extend([version["project_id"] for version in info])
return list(set(available_project_ids))


Expand All @@ -109,7 +110,8 @@ def check_modrinth_hashes_available():
for i in range(0, len(hashes), MODRINTH_LIMIT_SIZE):
chunk = hashes[i : i + MODRINTH_LIMIT_SIZE]
info = fetch_multi_hashes_info(hashes=chunk, algorithm=algorithm)
available_project_ids.extend([hash["project_id"] for hash in info.values()])
if info is not None:
available_project_ids.extend([hash["project_id"] for hash in info.values()])
return list(set(available_project_ids))


Expand All @@ -121,4 +123,4 @@ def check_new_project_ids(project_ids: List[str]) -> List[str]:
{"_id": {"$in": project_ids}}, {"_id": 1}
)
found_project_ids = [project["_id"] for project in find_result]
return list(set(project_ids) - set(found_project_ids))
return list(set(project_ids) - set(found_project_ids))
1 change: 0 additions & 1 deletion mcim_sync/models/database/modrinth.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,6 @@ class FileInfo(BaseModel):
class Version(Model):
id: str = Field(primary_field=True, index=True)
project_id: str = Field(index=True)
slug: Optional[str] = None
name: Optional[str] = None
version_number: Optional[str] = None
changelog: Optional[str] = None
Expand Down
Loading

0 comments on commit 2ff74b3

Please sign in to comment.