diff --git a/.flake8 b/.flake8 index a320ecb..b09dc1d 100644 --- a/.flake8 +++ b/.flake8 @@ -3,7 +3,10 @@ count = True ignore = # As of PEP 8, W503 is yielded in old-style - W503 + W503, + # Let `pylint` check this because `pyflake` (used by `flake8`) checks + # if the items in `__init__.py` are listed as `__all__` or not + F401 max-complexity = 10 diff --git a/.gitignore b/.gitignore index f0b4560..dcd75a4 100644 --- a/.gitignore +++ b/.gitignore @@ -4,9 +4,14 @@ venv/ # Cache .mypy_cache .pytest_cache +__pycache__ # Project files .idea/ +# Test files +config-temp.yaml + # Misc .wakatime-project +.exclude/ diff --git a/README.md b/README.md index 39ef747..275d464 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,15 @@ # dragalia-asset-downloader-2 [![downloader-ci]][downloader-ci-link] -[![downloader-cq-badge]][downloader-cq-link] [![downloader-lgtm-alert-badge]][downloader-lgtm-alert-link] [![downloader-lgtm-quality-badge]][downloader-lgtm-quality-link] [![downloader-lgtm-loc-badge]][downloader-lgtm-quality-link] [![downloader-time-badge]][downloader-time-link] +Main | Dev +:---: | :---: +[![downloader-cq-badge-main]][downloader-cq-link-main] | [![downloader-cq-badge-dev]][downloader-cq-link-dev] + Python scripts for downloading and pre-processing Dragalia Lost game assets. Replaces [dragalia-asset-downloader]. @@ -15,6 +18,10 @@ Replaces [dragalia-asset-downloader]. - Python 3.9 +- .NET Core 3.1+ + - Download here: https://dotnet.microsoft.com/download/dotnet-core/3.1. + - Run `dotnet` to ensure it's working. + - Dependencies listed in `requirements.txt` - Run `pip install -r requirements.txt` to install required dependencies. - Run `pip install -r requirements-dev.txt` to install required and development dependencies. @@ -26,23 +33,15 @@ Replaces [dragalia-asset-downloader]. [dragalia-asset-downloader]: https://github.com/RaenonX-DL/dragalia-asset-downloader [downloader-ci]: https://github.com/RaenonX-DL/dragalia-asset-downloader-2/workflows/CI/badge.svg - [downloader-ci-link]: https://github.com/RaenonX-DL/dragalia-asset-downloader-2/actions?query=workflow%3ACI - -[downloader-cq-badge]: https://app.codacy.com/project/badge/Grade/455468d9c9184f88af1249e82cb2c4ad - -[downloader-cq-link]: https://www.codacy.com/gh/RaenonX-DL/dragalia-asset-downloader-2/dashboard - +[downloader-cq-badge-main]: https://app.codacy.com/project/badge/Grade/455468d9c9184f88af1249e82cb2c4ad?branch=main +[downloader-cq-badge-dev]: https://app.codacy.com/project/badge/Grade/455468d9c9184f88af1249e82cb2c4ad?branch=dev +[downloader-cq-link-main]: https://www.codacy.com/gh/RaenonX-DL/dragalia-asset-downloader-2/dashboard?branch=main +[downloader-cq-link-dev]: https://www.codacy.com/gh/RaenonX-DL/dragalia-asset-downloader-2/dashboard?branch=dev [downloader-time-badge]: https://wakatime.com/badge/github/RaenonX-DL/dragalia-asset-downloader-2.svg - [downloader-time-link]: https://wakatime.com/badge/github/RaenonX-DL/dragalia-asset-downloader-2 - [downloader-lgtm-alert-badge]: https://img.shields.io/lgtm/alerts/g/RaenonX-DL/dragalia-asset-downloader-2.svg?logo=lgtm&logoWidth=18 - [downloader-lgtm-alert-link]: https://lgtm.com/projects/g/RaenonX-DL/dragalia-asset-downloader-2/alerts/ - [downloader-lgtm-quality-badge]: https://img.shields.io/lgtm/grade/python/g/RaenonX-DL/dragalia-asset-downloader-2.svg?logo=lgtm&logoWidth=18 - [downloader-lgtm-quality-link]: https://lgtm.com/projects/g/RaenonX-DL/dragalia-asset-downloader-2/context:python - [downloader-lgtm-loc-badge]: https://badgen.net/lgtm/lines/g/RaenonX-DL/dragalia-asset-downloader-2 diff --git a/config.schema.json b/config.schema.json new file mode 100644 index 0000000..d7d3ea6 --- /dev/null +++ b/config.schema.json @@ -0,0 +1,159 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://raw.githubusercontent.com/RaenonX-DL/dragalia-asset-downloader-2/main/config.schema.json", + "title": "Dragalia Lost Asset Downloader Config Schema", + "description": "JSON Schema for the asset downloader config.", + "type": "object", + "properties": { + "paths": { + "type": "object", + "description": "Various I/O paths.", + "properties": { + "downloaded": { + "type": "string", + "description": "Root directory for storing the downloaded/unextracted assets." + }, + "lib": { + "type": "string", + "description": "Root directory for the external libraries." + }, + "export": { + "type": "string", + "description": "Root directory for the exported files." + }, + "index": { + "type": "string", + "description": "Root directory for the indexing files." + }, + "log": { + "type": "string", + "description": "Root directory for the log files." + } + }, + "required": [ + "downloaded", + "lib", + "export", + "index", + "log" + ], + "additionalProperties": false + }, + "assets": { + "type": "array", + "description": "Asset exporting tasks.", + "items": { + "type": "object", + "description": "A single asset exporting task. Note that the objects will be exported only if it pass all the regex tests in its task.", + "properties": { + "task": { + "type": "string", + "description": "Name of the asset exporting task." + }, + "name": { + "type": "string", + "description": "Regex of the asset name to export. This has to be a full match. Note that the dependency assets of a main asset selected by a task always pass this test." + }, + "type": { + "type": "string", + "description": "Type of the export task.", + "enum": [ + "MonoBehaviour", + "GameObject", + "AnimatorController", + "AnimatorOverrideController", + "Texture2D", + "Texture2D-Alpha", + "Texture2D-Story", + "Sprite" + ] + }, + "filter": { + "type": "array", + "description": "Export the objects that pass one of these filters and all the conditions of a filter. Note that these filters are only apply to the main asset. Dependency assets always pass the tests unless it is selected as main assets.", + "items": { + "type": "object", + "description": "A single filter. Objects that pass all conditions of a filter will be exported.", + "properties": { + "container": { + "type": "string", + "description": "Regex test for the container name. This can be a partial match." + }, + "name": { + "type": "string", + "description": "Regex test for the object name. This can be a partial match." + } + }, + "required": [ + "container" + ], + "additionalProperties": false + }, + "minItems": 1, + "uniqueItems": true + }, + "isMultiLocale": { + "type": "boolean", + "description": "If true, the task will perform on all locale. Otherwise, the task only perform on the master (JP) locale.", + "default": false + }, + "suppressWarnings": { + "type": "array", + "description": "Types of warning message to suppress.", + "items": { + "type": "string", + "description": "Warning message type to suppress.", + "enum": [ + "nothingToExport", + "noMainTexture" + ] + } + } + }, + "required": [ + "task", + "name", + "type", + "filter" + ], + "additionalProperties": false + }, + "uniqueItems": true + }, + "raw": { + "type": "array", + "description": "Raw asset exporting tasks.", + "items": { + "type": "object", + "description": "A single raw asset exporting task. Note that the objects will be exported only if it pass all the regex tests in its task.", + "properties": { + "task": { + "type": "string", + "description": "Name of the raw asset exporting task." + }, + "name": { + "type": "string", + "description": "Regex of the asset name to export. This has to be a full match." + }, + "isMultiLocale": { + "type": "boolean", + "description": "If true, the task will perform on all locale. Otherwise, the task only perform on the master (JP) locale.", + "default": false + } + }, + "required": [ + "task", + "name" + ], + "additionalProperties": false + }, + "uniqueItems": true + } + }, + "required": [ + "paths", + "assets", + "raw" + ], + "additionalProperties": false +} diff --git a/config.yaml b/config.yaml new file mode 100644 index 0000000..2f1cc0e --- /dev/null +++ b/config.yaml @@ -0,0 +1,82 @@ +paths: + downloaded: .exclude/downloaded + lib: lib + export: .exclude/media + index: .exclude/index + log: .exclude/logs + +assets: + - task: Master Assets (all) + name: master + type: MonoBehaviour + filter: + - container: /resources/master/ + + - task: Master Assets (text label) + name: master + type: MonoBehaviour + filter: + - container: /resources/master/textlabel + isMultiLocale: true + + - task: Action Scripts + name: actions + type: GameObject + filter: + - container: /resources/actions/ + + - task: Action Parts List + name: actions + type: MonoBehaviour + filter: + - container: /resources/actions/actionpartslist + + - task: Animation Data (Base Controller) + name: ((characters|dragon)/motion/|meshes/dragon) + type: AnimatorController + filter: + - container: /resources/characters/motion/(.*)\.controller + suppressWarnings: + - nothingToExport + + - task: Animation Data (Override Controller) + name: ((characters|dragon)/motion/|meshes/dragon) + type: AnimatorOverrideController + filter: + - container: /resources/characters/motion/(.*)\.overridecontroller + suppressWarnings: + - nothingToExport + + - task: UI Sprites + name: images/ingame/ui + type: Sprite + filter: + - container: /resources/images/ingame/ui/atlas/ui + isMultiLocale: true + + - task: Icons + name: images/icon/ + type: Texture2D-Alpha + filter: + - container: /resources/images/icon/ + isMultiLocale: true + + - task: Unit Image + name: images/outgame/unitdetail/(amulet|chara|dragon) + type: Texture2D + filter: + - container: /resources/images/outgame/unitdetail + + - task: Story Image + name: emotion/story + type: Texture2D-Story + filter: + - container: /resources/emotion/story + + - task: Story Content + name: story/(unit|castle|quest)story/ + type: MonoBehaviour + filter: + - container: /resources/story + +raw: [ ] diff --git a/dlasset/__init__.py b/dlasset/__init__.py index e4a7340..6b2a59e 100644 --- a/dlasset/__init__.py +++ b/dlasset/__init__.py @@ -1,4 +1 @@ """Implementations to download and pre-process the assets.""" -from .dummy import workflow - -__all__ = ("workflow",) diff --git a/dlasset/config/__init__.py b/dlasset/config/__init__.py new file mode 100644 index 0000000..3fd5e19 --- /dev/null +++ b/dlasset/config/__init__.py @@ -0,0 +1,3 @@ +"""Implementations for the downloader config.""" +from .load import load_config +from .model import * # noqa diff --git a/dlasset/config/load.py b/dlasset/config/load.py new file mode 100644 index 0000000..cc56506 --- /dev/null +++ b/dlasset/config/load.py @@ -0,0 +1,30 @@ +"""Implementations to load the config file.""" +import json +from typing import Any, cast + +import yaml +from jsonschema import ValidationError, validate + +from .model import Config + +__all__ = ("load_config",) + + +def load_config(path: str) -> Config: + """ + Load and validate the config. + + Raises :class:`ValueError` if the config schema doesn't match. + """ + with open(path, encoding="utf-8") as f: + config = cast(dict[Any, Any], yaml.safe_load(f)) + + with open("config.schema.json", encoding="utf-8") as f: + schema = cast(dict[Any, Any], json.load(f)) + + try: + validate(instance=config, schema=schema) + except ValidationError as ex: + raise ValueError("Config validation failed") from ex + + return Config(config) diff --git a/dlasset/config/model/__init__.py b/dlasset/config/model/__init__.py new file mode 100644 index 0000000..3185f4e --- /dev/null +++ b/dlasset/config/model/__init__.py @@ -0,0 +1,4 @@ +"""Config data model classes.""" +from .main import Config +from .task import AssetRawTask, AssetTask, AssetTaskFilter +from .types import ExportType, UnityType diff --git a/dlasset/config/model/base.py b/dlasset/config/model/base.py new file mode 100644 index 0000000..b3dbe67 --- /dev/null +++ b/dlasset/config/model/base.py @@ -0,0 +1,13 @@ +"""Base config object class.""" +from abc import ABC +from dataclasses import dataclass + +from dlasset.model import JsonModel + +__all__ = ("ConfigBase",) + + +# https://github.com/python/mypy/issues/5374 +@dataclass # type: ignore +class ConfigBase(JsonModel, ABC): + """Base class of a config data.""" diff --git a/dlasset/config/model/main.py b/dlasset/config/model/main.py new file mode 100644 index 0000000..69240d6 --- /dev/null +++ b/dlasset/config/model/main.py @@ -0,0 +1,22 @@ +"""Main config model implementation.""" +from dataclasses import dataclass, field + +from .base import ConfigBase +from .paths import Paths +from .task import AssetRawTask, AssetTask + +__all__ = ("Config",) + + +@dataclass +class Config(ConfigBase): + """Asset downloader config.""" + + paths: Paths = field(init=False) + asset_tasks: tuple[AssetTask, ...] = field(init=False) + raw_tasks: tuple[AssetRawTask, ...] = field(init=False) + + def __post_init__(self) -> None: + self.paths = Paths(self.json_obj["paths"]) + self.asset_tasks = tuple(AssetTask(task) for task in self.json_obj["assets"]) + self.raw_tasks = tuple(AssetRawTask(task) for task in self.json_obj["raw"]) diff --git a/dlasset/config/model/paths.py b/dlasset/config/model/paths.py new file mode 100644 index 0000000..532ac3d --- /dev/null +++ b/dlasset/config/model/paths.py @@ -0,0 +1,57 @@ +"""Config path model class.""" +import os +from dataclasses import dataclass, field +from datetime import datetime +from typing import cast + +from dlasset.enums import Locale +from dlasset.log import log +from .base import ConfigBase + +__all__ = ("Paths",) + + +@dataclass +class Paths(ConfigBase): + """Various paths for different types of data.""" + + downloaded: str = field(init=False) + lib: str = field(init=False) + export: str = field(init=False) + index: str = field(init=False) + log: str = field(init=False) + + def __post_init__(self) -> None: + self.downloaded = cast(str, os.path.normpath(self.json_obj["downloaded"])) + self.lib = cast(str, os.path.normpath(self.json_obj["lib"])) + self.export = cast(str, os.path.normpath(self.json_obj["export"])) + self.index = cast(str, os.path.normpath(self.json_obj["index"])) + + today = datetime.now().strftime("%Y-%m-%d-%H-%M-%S") + self.log = cast(str, os.path.normpath(os.path.join(self.json_obj["log"], today))) + + def export_asset_dir_of_locale(self, locale: Locale) -> str: + """Get the root directory for the exported assets of ``locale``.""" + if locale.is_master: + return self.export + + return os.path.join(self.export, "localized", locale.value) + + def init_dirs(self) -> None: + """Initialize directories for output.""" + log("DEBUG", "Making directory for downloaded files...") + os.makedirs(self.downloaded, exist_ok=True) + + log("DEBUG", "Making directory for exported files...") + os.makedirs(self.export, exist_ok=True) + + log("DEBUG", "Making directory for file index...") + os.makedirs(self.index, exist_ok=True) + + log("DEBUG", "Making directory for logs...") + os.makedirs(self.log, exist_ok=True) + + @property + def lib_decrypt_dll_path(self) -> str: + """Path of the DLL for decryption.""" + return os.path.join(self.lib, "decrypt", "Decrypt.dll") diff --git a/dlasset/config/model/task.py b/dlasset/config/model/task.py new file mode 100644 index 0000000..e33cd9b --- /dev/null +++ b/dlasset/config/model/task.py @@ -0,0 +1,84 @@ +"""Asset exporting task model.""" +import re +from abc import ABC +from dataclasses import dataclass, field +from typing import Optional, Pattern, cast + +from dlasset.enums import WarningType +from .base import ConfigBase +from .types import ExportType + +__all__ = ("AssetTask", "AssetRawTask", "AssetTaskFilter") + + +@dataclass +class AssetTaskBase(ConfigBase, ABC): + """Base class for asset exporting task model.""" + + name: str = field(init=False) + asset_regex: Pattern = field(init=False) + is_multi_locale: bool = field(init=False) + suppress_warnings: tuple[WarningType, ...] = field(init=False) + + def __post_init__(self) -> None: + self.name = cast(str, self.json_obj["task"]) + self.asset_regex = re.compile(self.json_obj["name"]) + self.is_multi_locale = cast(bool, self.json_obj.get("isMultiLocale", False)) + self.suppress_warnings = tuple(WarningType(type_) for type_ in self.json_obj.get("suppressWarnings", [])) + + @property + def title(self) -> str: + """Get a string containg the summary of this task.""" + return f"{self.name} (Regex: {self.asset_regex.pattern} - " \ + f"{'all locale' if self.is_multi_locale else 'master only'})" + + +@dataclass +class AssetTaskFilter(ConfigBase): + """Asset exporting task filter model.""" + + container_regex: Pattern = field(init=False) + name_regex: Optional[Pattern] = field(init=False) + + def __post_init__(self) -> None: + self.container_regex = re.compile(self.json_obj["container"]) + + if name_regex := self.json_obj.get("name"): + self.name_regex = re.compile(name_regex) + else: + self.name_regex = None + + def match_name(self, name: str) -> bool: + """Check if the given ``name`` matches the filter.""" + if self.name_regex: + return bool(re.search(self.name_regex, name)) + + # `name_regex` not set, always match + return True + + def match_container(self, container: str) -> bool: + """Check if the given ``container`` matches the filter.""" + return bool(re.search(self.container_regex, container)) + + def match_filter(self, container: str, name: str) -> bool: + """Check if both the given ``container`` and ``name`` match the filter.""" + return self.match_container(container) and self.match_name(name) + + +@dataclass +class AssetTask(AssetTaskBase): + """Asset exporting task model.""" + + type: ExportType = field(init=False) + conditions: tuple[AssetTaskFilter, ...] = field(init=False) + + def __post_init__(self) -> None: + super().__post_init__() + + self.types = cast(ExportType, self.json_obj["type"]) + self.conditions = tuple(AssetTaskFilter(filter_) for filter_ in self.json_obj["filter"]) + + +@dataclass +class AssetRawTask(AssetTaskBase): + """Raw asset exporting task model.""" diff --git a/dlasset/config/model/types.py b/dlasset/config/model/types.py new file mode 100644 index 0000000..ffba8d5 --- /dev/null +++ b/dlasset/config/model/types.py @@ -0,0 +1,26 @@ +"""Type definitions for the config.""" +from typing import Literal + +__all__ = ("UnityType", "ExportType") + +# Should follow the naming of `UnityPy` +# List supported types only +UnityType = Literal[ + "MonoBehaviour", + "GameObject", + "AnimatorController", + "AnimatorOverrideController", + "Texture2D", + "Sprite", + "Material" +] + +# Special types that performs some special actions +# Names should be in the format of -. +ExtendedType = Literal[ + "Texture2D-Alpha", + "Texture2D-Story" +] + +# Should be the same as the listed enums for the field `type` +ExportType = Literal[UnityType, ExtendedType] diff --git a/dlasset/const.py b/dlasset/const.py new file mode 100644 index 0000000..0256d1b --- /dev/null +++ b/dlasset/const.py @@ -0,0 +1,13 @@ +"""Various environmental constants.""" +from dlasset.enums import Locale + +__all__ = ("CDN_BASE_URL", "MANIFEST_NAMES") + +CDN_BASE_URL = "https://dragalialost.akamaized.net/dl" + +MANIFEST_NAMES: dict[Locale, str] = { + Locale.JP: "assetbundle.manifest", + Locale.EN: "assetbundle.en_us.manifest", + Locale.CHS: "assetbundle.zh_cn.manifest", + Locale.CHT: "assetbundle.zh_tw.manifest", +} diff --git a/dlasset/dummy.py b/dlasset/dummy.py deleted file mode 100644 index 3cd1cca..0000000 --- a/dlasset/dummy.py +++ /dev/null @@ -1,7 +0,0 @@ -"""Dummy implementations.""" -__all__ = ("workflow",) - - -def workflow() -> None: - """Dummy workflow.""" - print("Does nothing yet") diff --git a/dlasset/enums/__init__.py b/dlasset/enums/__init__.py new file mode 100644 index 0000000..13a3be3 --- /dev/null +++ b/dlasset/enums/__init__.py @@ -0,0 +1,3 @@ +"""Various types of enums.""" +from .locale import Locale +from .warning_type import WarningType diff --git a/dlasset/enums/locale.py b/dlasset/enums/locale.py new file mode 100644 index 0000000..397151a --- /dev/null +++ b/dlasset/enums/locale.py @@ -0,0 +1,18 @@ +"""Enums for asset locale.""" +from enum import Enum + +__all__ = ("Locale",) + + +class Locale(Enum): + """Asset locale enum.""" + + CHT = "tw" + CHS = "cn" + EN = "en" + JP = "jp" + + @property + def is_master(self) -> bool: + """Check if the locale is the master locale (JP).""" + return self == Locale.JP diff --git a/dlasset/enums/warning_type.py b/dlasset/enums/warning_type.py new file mode 100644 index 0000000..837a7b2 --- /dev/null +++ b/dlasset/enums/warning_type.py @@ -0,0 +1,15 @@ +"""Enum for the types of warning.""" +from enum import Enum + +__all__ = ("WarningType",) + + +class WarningType(Enum): + """ + Types of warning message. + + Values should be the same as the listed enums for the field ``suppressWarnings``. + """ + + NOTHING_TO_EXPORT = "nothingToExport" + NO_MAIN_TEXTURE = "noMainTexture" diff --git a/dlasset/env/__init__.py b/dlasset/env/__init__.py new file mode 100644 index 0000000..70a728d --- /dev/null +++ b/dlasset/env/__init__.py @@ -0,0 +1,3 @@ +"""Implmentations for the environment.""" +from .args import get_cli_args +from .main import Environment, init_env diff --git a/dlasset/env/args.py b/dlasset/env/args.py new file mode 100644 index 0000000..949cfbe --- /dev/null +++ b/dlasset/env/args.py @@ -0,0 +1,44 @@ +"""Implementations for parsing the CLI arguments.""" +import argparse +import os +from dataclasses import dataclass +from typing import cast + +__all__ = ("CliArgs", "get_cli_args") + + +@dataclass +class CliArgs: + """CLI arguments.""" + + version_code: str + iv: str + key: str + config_path: str + no_index: bool + + +def get_cli_args() -> CliArgs: + """Get CLI arguments.""" + parser = argparse.ArgumentParser(description="Downloads and pre-processes Dragalia Lost assets.") + + parser.add_argument("version", type=str, + help="Manifest version code") + parser.add_argument("-iv", "--iv", type=str, + help="IV to decrypt the manifest asset") + parser.add_argument("-key", "--key", type=str, + help="Key to decrypt the manifest asset") + parser.add_argument("-c", "--config", type=str, required=True, + help="Config file path to use") + parser.add_argument("-ni", "--no-index", action="store_true", default=False, + help="File index will be ignored if this flag is provided") + + args = parser.parse_args() + + return CliArgs( + version_code=cast(str, args.version), + iv=cast(str, args.iv or os.environ["CRYPTO_IV"]), + key=cast(str, args.key or os.environ["CRYPTO_KEY"]), + config_path=cast(str, args.config), + no_index=cast(bool, args.no_index), + ) diff --git a/dlasset/env/index.py b/dlasset/env/index.py new file mode 100644 index 0000000..3969974 --- /dev/null +++ b/dlasset/env/index.py @@ -0,0 +1,78 @@ +"""Implementations for the file index.""" +import json +import os.path +from dataclasses import dataclass, field +from typing import TYPE_CHECKING + +from dlasset.enums import Locale + +if TYPE_CHECKING: + from dlasset.manifest import ManifestEntryBase + +__all__ = ("FileIndex",) + + +@dataclass +class FileIndex: + """File index model class.""" + + index_dir: str + enabled: bool + + _data: dict[Locale, dict[str, str]] = field(init=False) # key = file name from entry; value = hash + + def __post_init__(self) -> None: + self._data = {} + + if not self.enabled: + # Skip initializing index data if not enabled + return + + for locale in Locale: + index_file_path = self.get_index_file_path(locale) + + if not os.path.exists(index_file_path): + # Index file not exists, create empty index + self._data[locale] = {} + continue + + with open(index_file_path, "r", encoding="utf-8") as f: + self._data[locale] = json.load(f) + + def get_index_file_path(self, locale: Locale) -> str: + """Get the index file path of ``locale``.""" + return os.path.join(self.index_dir, f"index-{locale.value}.json") + + def is_file_updated(self, locale: Locale, entry: "ManifestEntryBase") -> bool: + """Check if ``entry`` is updated.""" + if not self.enabled: + # Always return ``True`` to force re-download if not enabled + return True + + # File name not being in the index is considered as updated (should perform downloading tasks) + if entry.name not in self._data[locale]: + return True + + # Hash mismatch is considered as updated + return self._data[locale][entry.name] != entry.hash + + def update_entry(self, locale: Locale, entry: "ManifestEntryBase") -> None: + """Update ``entry`` in the index.""" + if not self.enabled: + # Do nothing if not enabled + return + + self._data[locale][entry.name] = entry.hash + + def update_index_files(self) -> None: + """Push the updated file index to its corresponding file.""" + if not self.enabled: + # Do nothing if not enabled + return + + for locale, data in self._data.items(): + file_path = self.get_index_file_path(locale) + + with open(file_path, "w+", encoding="utf-8") as f: + # `separators` argument for minify + json.dump(data, f, separators=(",", ":")) diff --git a/dlasset/env/main.py b/dlasset/env/main.py new file mode 100644 index 0000000..7428ce4 --- /dev/null +++ b/dlasset/env/main.py @@ -0,0 +1,99 @@ +"""Implementations for initializing the environment.""" +import os.path +from dataclasses import dataclass, field + +from dlasset.config import Config +from dlasset.const import MANIFEST_NAMES +from dlasset.enums import Locale +from dlasset.log import init_log, log, log_group_end, log_group_start +from .args import CliArgs +from .index import FileIndex + +__all__ = ("init_env", "Environment") + + +@dataclass +class Environment: + """Settings related to the environment.""" + + args: CliArgs + config: Config + + index: FileIndex = field(init=False) + + def __post_init__(self) -> None: + self.index = FileIndex(self.config.paths.index, enabled=not self.args.no_index) + + def manifest_asset_path_of_locale(self, locale: Locale) -> str: + """Get the manifest asset path of ``locale``.""" + return os.path.join(self.manifest_asset_dir, MANIFEST_NAMES[locale]) + + def manifest_asset_decrypted_path(self, locale: Locale) -> str: + """Get the decrypted manifest asset path of ``locale``.""" + return f"{self.manifest_asset_path_of_locale(locale)}.decrypted" + + @property + def manifest_asset_dir(self) -> str: + """Directory of the encrypted manifest assets.""" + return os.path.join(self.config.paths.downloaded, "manifest", self.args.version_code) + + @property + def downloaded_assets_dir(self) -> str: + """Directory of the downloaded assets.""" + return os.path.join(self.config.paths.downloaded, "assets") + + def print_info(self) -> None: + """Print the info about the current environment.""" + log_group_start("Environment info") + + if self.args.no_index: + log("WARNING", "File indexing is not enabled. Files matching the task criteria will be downloaded.") + + log("INFO", f"Version code: {self.args.version_code}") + log("INFO", f"Config file path: {self.args.config_path}") + log("INFO", "-" * 20) + log("INFO", f"External library directory: {self.config.paths.lib}") + log("INFO", "-" * 20) + log("INFO", f"Manifest asset directory: {self.manifest_asset_dir}") + log("INFO", f"Downloaded assets directory: {self.downloaded_assets_dir}") + log("INFO", f"Exported files directory: {self.config.paths.export}") + log("INFO", f"File index directory: {self.config.paths.index}") + log("INFO", "-" * 20) + log("INFO", "Suppressed warnings:") + for task in self.config.asset_tasks: + if not task.suppress_warnings: + continue + + log("INFO", f"{task.title}:") + for warning_type in task.suppress_warnings: + log("INFO", f"- {warning_type}") + + log_group_end() + + def init_dirs(self) -> None: + """Initialize directories.""" + self.config.paths.init_dirs() + + log("DEBUG", "Making directory for manifest assets...") + os.makedirs(self.manifest_asset_dir, exist_ok=True) + log("DEBUG", "Making directory for downloaded assets...") + os.makedirs(self.downloaded_assets_dir, exist_ok=True) + + def prepare_logging(self) -> None: + """Prepare logging factory.""" + init_log(self.config.paths.log) + + +def init_env(args: CliArgs, config: Config) -> Environment: + """Initializes the environment.""" + log_group_start("Environment initialization") + + env = Environment(args, config) + log("INFO", "Creating directories...") + env.init_dirs() + log("INFO", "Initializing logging...") + env.prepare_logging() + + log_group_end() + + return env diff --git a/dlasset/export/__init__.py b/dlasset/export/__init__.py new file mode 100644 index 0000000..b3603ec --- /dev/null +++ b/dlasset/export/__init__.py @@ -0,0 +1,6 @@ +"""Implementations for exporting Unity assets.""" +from .main import export_asset +from .model import ExportInfo +from .raw import export_raw_by_task +from .task import export_by_task +from .types import * # noqa diff --git a/dlasset/export/functions/__init__.py b/dlasset/export/functions/__init__.py new file mode 100644 index 0000000..9fa56a5 --- /dev/null +++ b/dlasset/export/functions/__init__.py @@ -0,0 +1,5 @@ +"""Functions to export the asset objects.""" +from .image import export_image +from .image_alpha import export_image_alpha +from .image_story import export_image_story +from .monobehaviour import export_mono_behaviour diff --git a/dlasset/export/functions/image.py b/dlasset/export/functions/image.py new file mode 100644 index 0000000..c46d5a8 --- /dev/null +++ b/dlasset/export/functions/image.py @@ -0,0 +1,23 @@ +"""Implementations to export images such as ``Texture2D`` and ``Sprite``.""" +import os +from typing import TYPE_CHECKING + +from dlasset.log import log + +if TYPE_CHECKING: + from dlasset.export import ExportInfo + +__all__ = ("export_image",) + + +def export_image(export_info: "ExportInfo") -> None: + """Export the image objects in ``info_path_dict``.""" + for obj_info in export_info.objects: + obj = obj_info.obj + + log("INFO", f"Exporting {obj.name} ({obj_info.container})...") + + export_path = os.path.join(export_info.get_export_dir_of_obj(obj_info), f"{obj.name}.png") + + img = obj.image + img.save(export_path) diff --git a/dlasset/export/functions/image_alpha.py b/dlasset/export/functions/image_alpha.py new file mode 100644 index 0000000..ef0e88d --- /dev/null +++ b/dlasset/export/functions/image_alpha.py @@ -0,0 +1,75 @@ +"""Implementations to export image with alpha channel.""" +import os +from typing import Optional, TYPE_CHECKING, cast + +from PIL import Image +from UnityPy.classes import Texture2D + +from dlasset.log import log +from .image import export_image + +if TYPE_CHECKING: + from dlasset.export import ExportInfo + +__all__ = ("export_image_alpha",) + + +def get_alpha_channel_tex(texture_envs: dict, export_info: "ExportInfo") -> Optional[Texture2D]: + """Get the alpha texture. Returns ``None`` if not available.""" + if "_AlphaTex" not in texture_envs: # Alpha channel texture not available + return None + + path_id_alpha = texture_envs["_AlphaTex"]["m_Texture"]["m_PathID"] + + if not path_id_alpha: # Path ID points to null (path ID = 0) + return None + + return cast(Texture2D, export_info.get_obj_info(path_id_alpha).obj) + + +def export_image_alpha(export_info: "ExportInfo") -> None: + """Export the image objects in ``export_info`` with alpha channel merged.""" + material = next((info for info in export_info.objects if info.obj.type == "Material"), None) + + if not material: + log("INFO", f"Asset {export_info} does not have any `Material` - fallback to normal image export") + export_image(export_info) + return + + log("DEBUG", f"Reading material data... ({material.container})") + + tree = material.obj.read_typetree() + + texture_envs = dict(tree["m_SavedProperties"]["m_TexEnvs"]) + + path_id_main = texture_envs["_MainTex"]["m_Texture"]["m_PathID"] + + if not path_id_main: # Main texture points to null file - don't return anything + return + + info_main = export_info.get_obj_info(path_id_main) + obj_main = info_main.obj + + log("INFO", f"Exporting {obj_main.name}... ({info_main.container})") + + export_path = os.path.join(export_info.get_export_dir_of_obj(info_main), f"{obj_main.name}.png") + + log("DEBUG", f"Merging alpha channel of {obj_main.name}... ({info_main.container})") + + if obj_alpha := get_alpha_channel_tex(texture_envs, export_info): + # Alpha texture exists, merge image + img_main = obj_main.image + img_alpha = obj_alpha.image + + # Alpha texture could be in a different size + if img_alpha.size != img_main.size: + img_alpha = img_alpha.resize(img_main.size) + + r, g, b = img_main.split()[:3] + a = img_alpha.split()[3] + + Image.merge("RGBA", (r, g, b, a)).save(export_path) + return + + # Alpha texture does not exist, just save it + obj_main.image.save(export_path) diff --git a/dlasset/export/functions/image_story.py b/dlasset/export/functions/image_story.py new file mode 100644 index 0000000..901c2b2 --- /dev/null +++ b/dlasset/export/functions/image_story.py @@ -0,0 +1,91 @@ +"""Implementations to export story emotion image.""" +import os +from typing import TYPE_CHECKING, cast + +from PIL import Image +from UnityPy.classes import Material + +from dlasset.log import log +from dlasset.utils import crop_image, merge_y_cb_cr_a + +if TYPE_CHECKING: + from dlasset.export import ExportInfo + +__all__ = ("export_image_story",) + +# Almost same across all the assets +_parts_image_size = (256, 256) + + +def get_y_cb_cr_a_from_material(material: Material, export_info: "ExportInfo") -> tuple[Image, Image, Image, Image]: + """Get a tuple containing Y, Cb, Cr, alpha image object in order of the material.""" + texture_envs = dict(material.read_typetree()["m_SavedProperties"]["m_TexEnvs"]) + + obj_y = cast(Image, export_info.get_obj_info(texture_envs["_TexY"]["m_Texture"]["m_PathID"]).obj.image) + obj_cb = cast(Image, export_info.get_obj_info(texture_envs["_TexCb"]["m_Texture"]["m_PathID"]).obj.image) + obj_cr = cast(Image, export_info.get_obj_info(texture_envs["_TexCr"]["m_Texture"]["m_PathID"]).obj.image) + obj_a = cast(Image, export_info.get_obj_info(texture_envs["_TexA"]["m_Texture"]["m_PathID"]).obj.image) + + return obj_y, obj_cb, obj_cr, obj_a + + +def crop_parts_image( + img: Image, parts_table: list[dict[str, dict[str, int]]], image_name: str, container: str +) -> Image: + """ + Crop the parts image of ``img`` according to ``parts_table``. + + If no position data is provided in ``parts_table``, default coordinates will be used instead. + """ + is_using_default = False + log("DEBUG", f"Cropping image part of {image_name}... ({container})") + + # size = (tl_x, tl_y, rb_x, rb_y) + if not parts_table: + log("WARNING", f"{image_name} ({container}) does not have parts, using default positions") + + # Use default coordinates because parts table not available + size = (296, 21, 808, 533) # 512 x 512 + is_using_default = True + else: + # Use data from parts table + parts_position = parts_table[0]["position"] + center_x, center_y = parts_position["x"], parts_position["y"] + size = (center_x - 128, center_y - 128, center_x + 128, center_y + 128) + + img = crop_image(img, *size) + + if is_using_default: + img = img.resize(_parts_image_size, Image.ANTIALIAS) + + return img + + +def export_image_story(export_info: "ExportInfo") -> None: + """Export the image objects in ``info_path_dict`` with YCbCr channel merged.""" + mono_behaviour = next(info for info in export_info.objects if info.obj.type == "MonoBehaviour") + + log("DEBUG", f"Reading mono behaviour data... ({mono_behaviour.container})") + + tree = mono_behaviour.obj.read_typetree() + + image_name = mono_behaviour.obj.name + + try: + channels = get_y_cb_cr_a_from_material( + cast(Material, export_info.get_obj_info(tree["basePartsData"]["material"]["m_PathID"]).obj), + export_info + ) + except KeyError as ex: + raise ValueError(f"Asset {image_name} ({mono_behaviour.container}) has missing object") from ex + + log("INFO", f"Exporting {image_name}... ({mono_behaviour.container})") + + export_path = os.path.join(export_info.get_export_dir_of_obj(mono_behaviour), f"{image_name}.png") + + log("DEBUG", f"Merging YCbCr of {image_name}... ({mono_behaviour.container})") + + img = merge_y_cb_cr_a(*channels) + img = crop_parts_image(img, tree["partsDataTable"], image_name, mono_behaviour.container) + + img.save(export_path) diff --git a/dlasset/export/functions/monobehaviour.py b/dlasset/export/functions/monobehaviour.py new file mode 100644 index 0000000..f5a5d41 --- /dev/null +++ b/dlasset/export/functions/monobehaviour.py @@ -0,0 +1,40 @@ +"""Implementations to export ``MonoBehaviour``.""" +import json +import os +from typing import TYPE_CHECKING + +from dlasset.export.types import MonoBehaviourTree +from dlasset.log import log + +if TYPE_CHECKING: + from dlasset.export import ExportInfo + +__all__ = ("export_mono_behaviour",) + + +def export_mono_behaviour(export_info: "ExportInfo") -> list[MonoBehaviourTree]: + """ + Export ``MonoBehaviour`` objects in ``export_info``. + + Returns the exported mono behaviour trees. + """ + trees: list[MonoBehaviourTree] = [] + + for obj_info in export_info.objects: + obj = obj_info.obj + + log("INFO", f"Exporting {obj.name} ({obj_info.container})...") + + export_path: str = os.path.join(export_info.get_export_dir_of_obj(obj_info), f"{obj.name}.json") + + if not obj.serialized_type.nodes: + log("WARNING", f"No exportable data for {obj.name}") + continue + + tree = obj.read_typetree() + with open(export_path, "w+", encoding="utf-8") as f: + f.write(json.dumps(tree, ensure_ascii=False, indent=2)) + + trees.append(tree) + + return trees diff --git a/dlasset/export/lookup.py b/dlasset/export/lookup.py new file mode 100644 index 0000000..fec6a02 --- /dev/null +++ b/dlasset/export/lookup.py @@ -0,0 +1,28 @@ +"""Exporting function index.""" +from dlasset.config import ExportType, UnityType +from .functions import export_image, export_image_alpha, export_image_story, export_mono_behaviour +from .types import ExportFunction + +__all__ = ("EXPORT_FUNCTIONS", "TYPES_TO_INCLUDE") + +EXPORT_FUNCTIONS: dict[ExportType, ExportFunction] = { + "MonoBehaviour": export_mono_behaviour, + "Texture2D": export_image, + "Texture2D-Alpha": export_image_alpha, + "Texture2D-Story": export_image_story, + "Sprite": export_image, + "GameObject": export_mono_behaviour, + "AnimatorController": export_mono_behaviour, + "AnimatorOverrideController": export_mono_behaviour, +} + +TYPES_TO_INCLUDE: dict[ExportType, tuple[UnityType, ...]] = { + "MonoBehaviour": ("MonoBehaviour",), + "Texture2D": ("Texture2D",), + "Texture2D-Alpha": ("Texture2D", "Material"), + "Texture2D-Story": ("Texture2D", "Material", "MonoBehaviour"), + "Sprite": ("Sprite",), + "GameObject": ("GameObject",), + "AnimatorController": ("AnimatorController",), + "AnimatorOverrideController": ("AnimatorOverrideController",), +} diff --git a/dlasset/export/main.py b/dlasset/export/main.py new file mode 100644 index 0000000..1598490 --- /dev/null +++ b/dlasset/export/main.py @@ -0,0 +1,162 @@ +"""Implementations to export files from an Unity asset.""" +import os +from typing import Optional, Sequence + +from UnityPy.environment import Environment as UnityAsset + +from dlasset.config import AssetTaskFilter, ExportType +from dlasset.enums import WarningType +from dlasset.log import log +from dlasset.manage import get_asset +from .lookup import EXPORT_FUNCTIONS, TYPES_TO_INCLUDE +from .model import ExportInfo, ObjectInfo +from .types import ExportReturn + +__all__ = ("export_asset",) + + +def log_asset_export_debug_info( + assets: list[UnityAsset], asset_paths: list[str], export_type: ExportType, export_dir: str +) -> None: + """Log the debug info about the asset exporting.""" + log("DEBUG", "Exporting asset:") + for asset_path in asset_paths: + log("DEBUG", f"- {asset_path}") + log("DEBUG", f"Export type: {export_type}") + log("DEBUG", f"Destination: {export_dir}") + log("DEBUG", f"Fallback Container: {get_container_fallback(assets)}") + + +def get_container_fallback(assets: list[UnityAsset]) -> str: + """Get the fallback container to use for ``assets``.""" + main_asset = assets[0] + + # Pick the 1st container in the main asset + return next(iter(main_asset.container.keys())) + + +def get_objects_to_export_of_asset( + asset: UnityAsset, export_type: ExportType, /, + container_fallback: str, is_main_asset: bool, + filters: Optional[Sequence[AssetTaskFilter]] = None +) -> list[ObjectInfo]: + """ + Get a list of objects to export in ``asset``. + + ``filters`` are omitted if ``is_main_asset`` is ``True``. + """ + obj_export: list[ObjectInfo] = [] + + for obj in asset.objects: + # `__ne__` not properly overridden, so `!=` doesn't work + if obj.type not in TYPES_TO_INCLUDE[export_type]: + continue + + container = obj.container or container_fallback + if is_main_asset and filters and not any(filter_.match_container(container) for filter_ in filters): + return [] + + obj_export.append(ObjectInfo(obj=obj, container=container, is_from_main=is_main_asset)) + + return obj_export + + +def get_objects_to_export( + assets: list[UnityAsset], export_type: ExportType, /, + filters: Optional[Sequence[AssetTaskFilter]] = None +) -> list[ObjectInfo]: + """Get a list of objects to export from all ``assets``.""" + obj_export: list[ObjectInfo] = [] + + container_fallback = get_container_fallback(assets) + + for idx, asset in enumerate(assets): + if not asset.objects: + continue + + obj_export.extend(get_objects_to_export_of_asset( + asset, export_type, + container_fallback=container_fallback, filters=filters, is_main_asset=idx == 0, + )) + + return obj_export + + +def export_objects( + obj_export: list[ObjectInfo], export_type: ExportType, export_dir: str, /, + asset_name: str, container_fallback: str, filters: Optional[Sequence[AssetTaskFilter]] = None +) -> list[ExportReturn]: + """ + Export the objects in ``obj_export``. + + Note that ``filters`` are only apply to the objects coming from the main asset. + """ + obj_info_to_export: list[ObjectInfo] = [] + + for obj_info in obj_export: + obj = obj_info.read_obj() + + if ( + obj_info.is_from_main + and filters + and not any(filter_.match_filter(obj_info.container, obj.name) for filter_ in filters) + ): + continue + + obj_info_to_export.append(obj_info) + + export_info = ExportInfo( + export_dir=export_dir, + obj_info_list=obj_info_to_export, + asset_name=asset_name, + container_fallback=container_fallback + ) + results = EXPORT_FUNCTIONS[export_type](export_info) + + if not results: + return [] + + return results + + +def export_asset( + asset_paths: list[str], + export_type: ExportType, + export_dir: str, /, + filters: Optional[Sequence[AssetTaskFilter]] = None, + suppress_warnings: Sequence[WarningType] = () +) -> Optional[list[ExportReturn]]: + """ + Export the asset from ``asset_paths`` with the given criteria to ``export_dir`` and get the exported data. + + Returns ``None`` if nothing exportable or exported. + """ + assets = [get_asset(asset_path) for asset_path in asset_paths] + + asset_path_main = asset_paths[0] + asset_name_main = os.path.basename(asset_path_main) + + log_asset_export_debug_info(assets, asset_paths, export_type, export_dir) + + if not any(asset.objects for asset in assets) and WarningType.NOTHING_TO_EXPORT not in suppress_warnings: + log("WARNING", f"Nothing exportable for the asset: {asset_name_main}") + return None + + log("DEBUG", "Getting objects to export...") + + objects_to_export = get_objects_to_export(assets, export_type, filters=filters) + + if not objects_to_export and WarningType.NOTHING_TO_EXPORT not in suppress_warnings: + log("WARNING", f"Nothing to export for the asset: {asset_name_main}") + return None + + log("INFO", f"Found {len(objects_to_export)} objects to export ({asset_path_main}).") + + results: list[ExportReturn] = export_objects( + objects_to_export, export_type, export_dir, + filters=filters, asset_name=asset_name_main, container_fallback=get_container_fallback(assets) + ) + + log("DEBUG", f"Done exporting {asset_name_main} to {export_dir}") + + return results diff --git a/dlasset/export/model.py b/dlasset/export/model.py new file mode 100644 index 0000000..a986859 --- /dev/null +++ b/dlasset/export/model.py @@ -0,0 +1,72 @@ +"""Various model classes.""" +import os +from dataclasses import InitVar, dataclass, field +from functools import cache +from typing import Iterable + +from UnityPy.classes import Object + +__all__ = ("ExportInfo", "ObjectInfo") + + +@dataclass(unsafe_hash=True) +class ObjectInfo: + """Object info model class.""" + + obj: Object + container: str + is_from_main: bool + + def read_obj(self) -> Object: + """ + Read the object. + + This modified the class attribute ``obj``. + """ + self.obj = self.obj.read() + return self.obj + + +@dataclass +class ExportInfo: + """Export info model class.""" + + export_dir: str + obj_info_list: InitVar[list[ObjectInfo]] + asset_name: str + container_fallback: str + + _object_dict: dict[int, ObjectInfo] = field(init=False) + + def __post_init__(self, obj_info_list: list[ObjectInfo]) -> None: + self._object_dict = {obj_info.obj.path_id: obj_info for obj_info in obj_info_list} + + def __hash__(self) -> int: + return hash((self.export_dir, self.asset_name)) + + def __repr__(self) -> str: + return f"{self.asset_name} ({self.container_fallback})" + + @property + def objects(self) -> Iterable[ObjectInfo]: + """Get the objects to export.""" + return self._object_dict.values() + + def get_obj_info(self, path_id: int) -> ObjectInfo: + """ + Get the object info at ``path_id``. + + Raises :class:`ValueError` if no corresponding object. + """ + if path_id not in self._object_dict: + raise ValueError(f"Path ID #{path_id} not exists on {self}") + + return self._object_dict[path_id] + + @cache + def get_export_dir_of_obj(self, obj_info: Object) -> str: + """Get the export directory of ``obj_info``.""" + obj_export_dir = os.path.join(self.export_dir, os.path.dirname(os.path.normpath(obj_info.container))) + os.makedirs(obj_export_dir, exist_ok=True) + + return obj_export_dir diff --git a/dlasset/export/raw.py b/dlasset/export/raw.py new file mode 100644 index 0000000..b42a4a9 --- /dev/null +++ b/dlasset/export/raw.py @@ -0,0 +1,23 @@ +"""Implementations for exporting raw assets.""" +import sys +from typing import TYPE_CHECKING + +from dlasset.config import AssetRawTask +from dlasset.env import Environment +from dlasset.log import log, log_group_end, log_group_start + +if TYPE_CHECKING: + from dlasset.manifest import Manifest + +__all__ = ("export_raw_by_task",) + + +def export_raw_by_task(_: Environment, __: "Manifest", task: AssetRawTask) -> None: + """Export raw assets according to ``task``.""" + log_group_start(task.title) + + log("ERROR", "Raw task extraction not implemented") + + log_group_end() + + sys.exit(1) diff --git a/dlasset/export/task.py b/dlasset/export/task.py new file mode 100644 index 0000000..85ab850 --- /dev/null +++ b/dlasset/export/task.py @@ -0,0 +1,50 @@ +"""Implementations for performing an asset exporting task.""" +from typing import TYPE_CHECKING + +from dlasset.config import AssetTask +from dlasset.enums import Locale +from dlasset.env import Environment +from dlasset.log import log, log_group_end, log_group_start +from dlasset.manage import get_asset_paths +from dlasset.utils import concurrent_run_no_return +from .main import export_asset + +if TYPE_CHECKING: + from dlasset.manifest import Manifest, ManifestEntry + +__all__ = ("export_by_task",) + + +def export_from_manifest(env: Environment, locale: Locale, entries: list["ManifestEntry"], task: AssetTask) -> None: + """Export the asset of ``entry`` according to ``task``.""" + log("INFO", f"Exporting ({len(entries)}) {entries[0].name}...") + asset_paths = get_asset_paths(env, entries) + export_asset( + asset_paths, task.types, env.config.paths.export_asset_dir_of_locale(locale), + filters=task.conditions, suppress_warnings=task.suppress_warnings + ) + + +def export_by_task(env: Environment, manifest: "Manifest", task: AssetTask) -> None: + """Export the assets according to ``task``.""" + log_group_start(task.title) + + log("DEBUG", f"Types of object to export: {task.types}") + + log("INFO", "Filtering assets...") + asset_entries = list(manifest.get_entry_with_regex(task.asset_regex, is_master_only=not task.is_multi_locale)) + args_list = [ + [env, locale, entries, task] for locale, entries in asset_entries + if any(env.index.is_file_updated(locale, entry) for entry in entries) + ] + log("INFO", f"{len(asset_entries)} assets matching the criteria. {len(args_list)} assets updated.") + + concurrent_run_no_return(export_from_manifest, args_list, env.config.paths.log) + + # MUST update outside of the concurrent run + # Otherwise the index will not update because of the separated memory space + for locale, entries in asset_entries: + for entry in entries: + env.index.update_entry(locale, entry) + + log_group_end() diff --git a/dlasset/export/types.py b/dlasset/export/types.py new file mode 100644 index 0000000..944d536 --- /dev/null +++ b/dlasset/export/types.py @@ -0,0 +1,21 @@ +"""Type definitions for exporting the assets.""" +from typing import Any, Callable, Union + +from .model import ExportInfo + +__all__ = ("ExportFunction", "ExportReturn", "MonoBehaviourTree") + +MonoBehaviourTree = dict[Any, Any] + +MonoBehaviourExportFunction = Callable[[ExportInfo], list[MonoBehaviourTree]] + +Texture2DExportFunction = Callable[[ExportInfo], None] + +ExportFunction = Union[ + MonoBehaviourExportFunction, + Texture2DExportFunction +] + +ExportReturn = Union[ + MonoBehaviourTree +] diff --git a/dlasset/log/__init__.py b/dlasset/log/__init__.py new file mode 100644 index 0000000..e3c2c54 --- /dev/null +++ b/dlasset/log/__init__.py @@ -0,0 +1,4 @@ +"""Implementations about logging.""" +from .init import init_log +from .main import log, log_group_end, log_group_start +from .middleware import PIDFileHandler diff --git a/dlasset/log/const.py b/dlasset/log/const.py new file mode 100644 index 0000000..c6a5862 --- /dev/null +++ b/dlasset/log/const.py @@ -0,0 +1,37 @@ +"""Constants about logging.""" +import logging +from typing import Literal + +__all__ = ( + "LogLevel", "LOG_LEVEL_NUM", "LOG_LEVEL_COLOR", "COLOR_RESET", + "LOGGER_CONSOLE", "LOGGER_FILE", "LOGGER_ERROR" +) + +LogLevel = Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] + +LOGGER_CONSOLE = logging.getLogger("console") +LOGGER_CONSOLE.propagate = False # Console message will have ANSI colors, which is not desired for files + +LOGGER_FILE = logging.getLogger("file") +LOGGER_FILE.propagate = True + +LOGGER_ERROR = logging.getLogger("error") +LOGGER_ERROR.propagate = True + +LOG_LEVEL_NUM: dict[LogLevel, int] = { + "CRITICAL": 50, + "ERROR": 40, + "WARNING": 30, + "INFO": 20, + "DEBUG": 10, +} + +LOG_LEVEL_COLOR: dict[LogLevel, str] = { + "CRITICAL": "\x1b[35m", + "ERROR": "\x1b[31m", + "WARNING": "\x1b[33m", + "INFO": "\x1b[36m", + "DEBUG": "\x1b[37m", +} + +COLOR_RESET: str = "\x1b[0m" diff --git a/dlasset/log/init.py b/dlasset/log/init.py new file mode 100644 index 0000000..f5cbffa --- /dev/null +++ b/dlasset/log/init.py @@ -0,0 +1,44 @@ +"""Implementations for initializing the logging factory.""" +import logging +import os.path +import sys + +from .const import LOGGER_CONSOLE +from .middleware import PIDFileHandler + +__all__ = ("init_log",) + +logging.basicConfig( + level=logging.DEBUG, # Root level needs to be lower than the subloggers + handlers=[logging.NullHandler()] # Don't output things +) + +FORMAT = "{asctime}.{msecs:03.0f} PID-{process:>5} [{levelname:>8}]: {message}" +FORMAT_DATE = "%Y-%m-%d %H:%M:%S" +FORMAT_STYLE = "{" + +default_formatter = logging.Formatter(FORMAT, style=FORMAT_STYLE, datefmt=FORMAT_DATE) + +console_handler = logging.StreamHandler(sys.stdout) +console_handler.setLevel(logging.INFO) +console_handler.setFormatter(default_formatter) + +LOGGER_CONSOLE.addHandler(console_handler) + + +def init_log(log_dir: str) -> None: + """Configure the logging factory.""" + pid_handler = PIDFileHandler(log_dir) + pid_handler.setLevel(logging.DEBUG) + pid_handler.setFormatter(default_formatter) + + # Log everything to PID log file + logging.getLogger().addHandler(pid_handler) + + # Log error to a specific file + file_handler = logging.FileHandler(os.path.join(log_dir, "error.log")) + file_handler.setLevel(logging.ERROR) + file_handler.setFormatter(default_formatter) + + # Log error to the specific file + logging.getLogger().addHandler(file_handler) diff --git a/dlasset/log/main.py b/dlasset/log/main.py new file mode 100644 index 0000000..fb6e19d --- /dev/null +++ b/dlasset/log/main.py @@ -0,0 +1,70 @@ +"""Main functions for logging.""" +import os +import time +from typing import Any, Optional, cast + +from .const import COLOR_RESET, LOGGER_CONSOLE, LOGGER_ERROR, LOGGER_FILE, LOG_LEVEL_COLOR, LOG_LEVEL_NUM, LogLevel + +__all__ = ("log", "log_group_start", "log_group_end") + +_GROUP_START_TIME: Optional[float] = None + +_GROUP_CURRENT_NAME: Optional[str] = None + + +def log(level: LogLevel, message: Any, /, exc_info: bool = False) -> None: + """Log ``message`` at ``level``.""" + log_level = LOG_LEVEL_NUM[level] + + LOGGER_CONSOLE.log(log_level, "%s%s%s", LOG_LEVEL_COLOR[level], message, COLOR_RESET, exc_info=exc_info) + LOGGER_FILE.log(log_level, message, exc_info=exc_info) + + if log_level >= LOG_LEVEL_NUM["ERROR"]: + LOGGER_ERROR.error(message, exc_info=exc_info) + + +def log_group_start(name: str) -> None: + """ + Place a log group start marker with ``name``. + + Raises :class:`RuntimeError` if a group has not ended. + """ + global _GROUP_CURRENT_NAME, _GROUP_START_TIME # pylint: disable=global-statement + if _GROUP_CURRENT_NAME is not None: + raise RuntimeError(f"Group name: {_GROUP_CURRENT_NAME} has already started") + + _GROUP_START_TIME = time.time() + _GROUP_CURRENT_NAME = name + + if os.environ.get("GITHUB_ACTIONS"): # Environment variable to indicate that the s + separator = f"::group::{name}" + else: + separator = f"{'-' * 20} {name} {'-' * 20}" + + print(separator) + log("INFO", separator) + + +def log_group_end() -> None: + """ + Place a group end marker. + + Raises :class:`RuntimeError` if currently not in group. + """ + global _GROUP_CURRENT_NAME, _GROUP_START_TIME # pylint: disable=global-statement + if _GROUP_START_TIME is None: + raise RuntimeError("Group not started") + + end_message = f"{_GROUP_CURRENT_NAME} completed in {time.time() - _GROUP_START_TIME:.3f} secs" + log("INFO", end_message) + + if os.environ.get("GITHUB_ACTIONS"): + separator = "::endgroup::" + else: + separator = "-" * (len(cast(str, _GROUP_CURRENT_NAME)) + 42) + + _GROUP_CURRENT_NAME = None + _GROUP_START_TIME = None + + log("INFO", separator) + print(separator) diff --git a/dlasset/log/middleware/__init__.py b/dlasset/log/middleware/__init__.py new file mode 100644 index 0000000..dc33453 --- /dev/null +++ b/dlasset/log/middleware/__init__.py @@ -0,0 +1,2 @@ +"""Various logging middlewares.""" +from .handler import PIDFileHandler diff --git a/dlasset/log/middleware/handler.py b/dlasset/log/middleware/handler.py new file mode 100644 index 0000000..f6250ed --- /dev/null +++ b/dlasset/log/middleware/handler.py @@ -0,0 +1,18 @@ +"""Logging handlers.""" +import logging +import os + +__all__ = ("PIDFileHandler",) + + +def _file_path_with_pid(log_dir: str) -> str: + pid = os.getpid() + return os.path.join(log_dir, f"P-{pid}.log") + + +class PIDFileHandler(logging.FileHandler): + """Logging handler to store the log of a certain PID.""" + + def __init__(self, log_dir: str) -> None: + file_path = _file_path_with_pid(log_dir) + super().__init__(file_path) diff --git a/dlasset/manage/__init__.py b/dlasset/manage/__init__.py new file mode 100644 index 0000000..4e02c28 --- /dev/null +++ b/dlasset/manage/__init__.py @@ -0,0 +1,2 @@ +"""Implementations for managing the assets.""" +from .main import get_asset, get_asset_paths diff --git a/dlasset/manage/main.py b/dlasset/manage/main.py new file mode 100644 index 0000000..cee941a --- /dev/null +++ b/dlasset/manage/main.py @@ -0,0 +1,50 @@ +"""Main implementations for managing the assets.""" +import os.path +from functools import lru_cache +from typing import TYPE_CHECKING + +import UnityPy +import requests +from UnityPy.environment import Environment as UnityAsset + +from dlasset.env import Environment +from .utils import get_asset_url + +if TYPE_CHECKING: + from dlasset.manifest import ManifestEntry + +__all__ = ("get_asset_paths", "get_asset") + + +def download_asset(asset_hash_dir: str, asset_target_path: str, entry: "ManifestEntry") -> None: + """Download the asset of manifest ``entry`` and store it to ``asset_target_path``.""" + response = requests.get(get_asset_url(entry)) + + os.makedirs(asset_hash_dir, exist_ok=True) + with open(asset_target_path, "wb+") as f: + f.write(response.content) + + +def get_asset_paths(env: Environment, entries: list["ManifestEntry"]) -> list[str]: + """ + Get a list of asset paths of ``entry``. + + This automatically download the asset in ``entries`` if not exists. + """ + asset_paths: list[str] = [] + for entry in entries: + asset_hash_dir = os.path.join(env.downloaded_assets_dir, entry.hash_dir) + asset_target_path = os.path.join(asset_hash_dir, entry.hash) + + if not os.path.exists(asset_target_path): + download_asset(asset_hash_dir, asset_target_path, entry) + + asset_paths.append(asset_target_path) + + return asset_paths + + +@lru_cache(maxsize=100) +def get_asset(asset_path: str) -> UnityAsset: + """Get the unity asset at ``asset_path``.""" + return UnityPy.load(asset_path) diff --git a/dlasset/manage/utils.py b/dlasset/manage/utils.py new file mode 100644 index 0000000..4549046 --- /dev/null +++ b/dlasset/manage/utils.py @@ -0,0 +1,14 @@ +"""Utils functions for managing the assets.""" +from typing import TYPE_CHECKING + +from dlasset.const import CDN_BASE_URL + +if TYPE_CHECKING: + from dlasset.manifest import ManifestEntry + +__all__ = ("get_asset_url",) + + +def get_asset_url(entry: "ManifestEntry") -> str: + """Get the URL of the manifest ``entry``.""" + return f"{CDN_BASE_URL}/assetbundles/Android/{entry.hash_dir}/{entry.hash}" diff --git a/dlasset/manifest/__init__.py b/dlasset/manifest/__init__.py new file mode 100644 index 0000000..4b46439 --- /dev/null +++ b/dlasset/manifest/__init__.py @@ -0,0 +1,5 @@ +"""Implementations related to the manifest assets.""" +from .decrypt import decrypt_manifest_all_locale +from .download import download_manifest_all_locale +from .export import export_manifest_all_locale +from .model import Manifest, ManifestEntry, ManifestEntryBase, ManifestLocale diff --git a/dlasset/manifest/decrypt.py b/dlasset/manifest/decrypt.py new file mode 100644 index 0000000..6553a0f --- /dev/null +++ b/dlasset/manifest/decrypt.py @@ -0,0 +1,39 @@ +"""Implementations for decrypting the manifest assets.""" +# The usage of `subprocess` is safe +import subprocess # nosec + +from dlasset.enums import Locale +from dlasset.env import Environment +from dlasset.log import log, log_group_end, log_group_start +from dlasset.utils import concurrent_run_no_return + +__all__ = ("decrypt_manifest_all_locale",) + + +def decrypt_manifest_of_locale(env: Environment, locale: Locale) -> None: + """Decrypt and store the manifest asset of ``locale``.""" + log("INFO", f"Decrypting manifest of {locale}...") + + path_encrypted = env.manifest_asset_path_of_locale(locale) + path_decrypted = env.manifest_asset_decrypted_path(locale) + + # Already listed `dotnet` as prerequisites for running this script (relative executable path) + # Inputs are presumably sanitized + subprocess.run( # nosec + [ + "dotnet", + env.config.paths.lib_decrypt_dll_path, + path_encrypted, + path_decrypted, + env.args.key, + env.args.iv, + ], + check=True + ) + + +def decrypt_manifest_all_locale(env: Environment) -> None: + """Decrypt and store the manifest asset of all locales.""" + log_group_start("Manifest decrypting") + concurrent_run_no_return(decrypt_manifest_of_locale, [[env, locale] for locale in Locale], env.config.paths.log) + log_group_end() diff --git a/dlasset/manifest/download.py b/dlasset/manifest/download.py new file mode 100644 index 0000000..17eae47 --- /dev/null +++ b/dlasset/manifest/download.py @@ -0,0 +1,35 @@ +"""Implementations to download manifest assets.""" +import requests + +from dlasset.const import CDN_BASE_URL, MANIFEST_NAMES +from dlasset.enums import Locale +from dlasset.env import Environment +from dlasset.log import log, log_group_end, log_group_start +from dlasset.utils import concurrent_run_no_return + +__all__ = ("download_manifest_all_locale",) + + +def download_manifest_of_locale(env: Environment, locale: Locale) -> None: + """ + Download and store the manifest asset of ``locale``. + + Downloaded asset needs decryption. + """ + log("INFO", f"Downloading manifest of {locale}...") + manifest_url = f"{CDN_BASE_URL}/manifests/Android/{env.args.version_code}/{MANIFEST_NAMES[locale]}" + + response = requests.get(manifest_url) + with open(env.manifest_asset_path_of_locale(locale), mode="wb+") as f: + f.write(response.content) + + +def download_manifest_all_locale(env: Environment) -> None: + """ + Download and store the manifest asset of all possible ``locale``. + + Downloaded asset needs decryption. + """ + log_group_start("Manifest downloading") + concurrent_run_no_return(download_manifest_of_locale, [[env, locale] for locale in Locale], env.config.paths.log) + log_group_end() diff --git a/dlasset/manifest/export.py b/dlasset/manifest/export.py new file mode 100644 index 0000000..af9f003 --- /dev/null +++ b/dlasset/manifest/export.py @@ -0,0 +1,46 @@ +"""Implementations for exporting the decrypted manifest assets.""" +import sys +from typing import cast + +from dlasset.enums import Locale +from dlasset.env import Environment +from dlasset.export import MonoBehaviourTree, export_asset +from dlasset.log import log, log_group_end, log_group_start +from dlasset.utils import concurrent_run +from .model import Manifest + +__all__ = ("export_manifest_all_locale",) + + +def export_manifest_of_locale(env: Environment, locale: Locale) -> MonoBehaviourTree: + """Export and store the manifest file of ``locale``.""" + log("INFO", f"Exporting manifest of {locale}...") + + exported = export_asset( + [env.manifest_asset_decrypted_path(locale)], + "MonoBehaviour", + env.config.paths.export_asset_dir_of_locale(locale) + ) + + if not exported: + log("ERROR", f"Manifest of {locale} not exported") + sys.exit(1) + + # Manifest asset only contains one `MonoBehaviour` + return exported[0] + + +def export_manifest_all_locale(env: Environment) -> Manifest: + """ + Export and store the manifest file of all possible ``locale``. + + Also, returns the manifest file as model for each locale. + """ + log_group_start("Manifest exporting") + results = concurrent_run( + export_manifest_of_locale, [[env, locale] for locale in Locale], env.config.paths.log, + key_of_call=lambda _, locale: cast(Locale, locale) + ) + log_group_end() + + return Manifest(results) diff --git a/dlasset/manifest/model/__init__.py b/dlasset/manifest/model/__init__.py new file mode 100644 index 0000000..5b589bc --- /dev/null +++ b/dlasset/manifest/model/__init__.py @@ -0,0 +1,4 @@ +"""Manifest file model class.""" +from .entry import ManifestEntry, ManifestEntryBase, ManifestRawEntry +from .locale import ManifestLocale +from .main import Manifest diff --git a/dlasset/manifest/model/category.py b/dlasset/manifest/model/category.py new file mode 100644 index 0000000..9d2403e --- /dev/null +++ b/dlasset/manifest/model/category.py @@ -0,0 +1,17 @@ +"""Manifest categorical model class.""" +from dataclasses import dataclass, field + +from dlasset.model import JsonModel +from .entry import ManifestEntry + + +@dataclass +class ManifestCategory(JsonModel): + """Manifest category model.""" + + name: str = field(init=False) + assets: tuple[ManifestEntry, ...] = field(init=False) + + def __post_init__(self) -> None: + self.name = self.json_obj["name"] + self.assets = tuple(ManifestEntry(entry) for entry in self.json_obj["assets"]) diff --git a/dlasset/manifest/model/entry.py b/dlasset/manifest/model/entry.py new file mode 100644 index 0000000..7500d9e --- /dev/null +++ b/dlasset/manifest/model/entry.py @@ -0,0 +1,48 @@ +"""Manifest entry model classes.""" +from abc import ABC +from dataclasses import dataclass, field + +from dlasset.model import JsonModel + +__all__ = ("ManifestEntry", "ManifestRawEntry", "ManifestEntryBase",) + + +@dataclass +class ManifestEntryBase(JsonModel, ABC): + """Manifest entry model base.""" + + name: str = field(init=False) + hash: str = field(init=False) + size: int = field(init=False) + group: int = field(init=False) + + dependencies: list[str] = field(init=False) + + hash_dir: str = field(init=False) + + def __post_init__(self) -> None: + self.name = self.json_obj["name"] + self.hash = self.json_obj["hash"] + self.size = self.json_obj["size"] + self.group = self.json_obj["group"] + + self.dependencies = self.json_obj.get("dependencies", []) + + self.hash_dir = self.hash[:2] + + +@dataclass +class ManifestEntry(ManifestEntryBase): + """Manifest entry model.""" + + assets: list[str] = field(init=False) + + def __post_init__(self) -> None: + super().__post_init__() + + self.assets = self.json_obj["assets"] + + +@dataclass +class ManifestRawEntry(ManifestEntryBase): + """Manifest entry model for raw assets.""" diff --git a/dlasset/manifest/model/locale.py b/dlasset/manifest/model/locale.py new file mode 100644 index 0000000..06ee0f3 --- /dev/null +++ b/dlasset/manifest/model/locale.py @@ -0,0 +1,28 @@ +"""Manifest model class of a locale.""" +from dataclasses import dataclass, field +from typing import Generator + +from dlasset.model import JsonModel +from .category import ManifestCategory +from .entry import ManifestEntry, ManifestRawEntry + + +@dataclass +class ManifestLocale(JsonModel): + """Manifest model of a locale.""" + + categories: tuple[ManifestCategory, ...] = field(init=False) + raw_assets: tuple[ManifestRawEntry, ...] = field(init=False) + + entry_by_name: dict[str, ManifestEntry] = field(init=False) + + def __post_init__(self) -> None: + self.categories = tuple(ManifestCategory(category) for category in self.json_obj["categories"]) + self.raw_assets = tuple(ManifestRawEntry(asset) for asset in self.json_obj["rawAssets"]) + + self.entry_by_name = {entry.name: entry for entry in self.entries_across_category} + + @property + def entries_across_category(self) -> Generator[ManifestEntry, None, None]: + """Get a generator yielding the manifest entries across categories.""" + return (asset for category in self.categories for asset in category.assets) diff --git a/dlasset/manifest/model/main.py b/dlasset/manifest/model/main.py new file mode 100644 index 0000000..8243317 --- /dev/null +++ b/dlasset/manifest/model/main.py @@ -0,0 +1,84 @@ +"""Manifest model class.""" +import re +from dataclasses import dataclass, field +from typing import Callable, Generator, Iterable, Pattern, TypeVar, cast + +from dlasset.enums import Locale +from dlasset.export import MonoBehaviourTree +from .entry import ManifestEntry, ManifestEntryBase, ManifestRawEntry +from .locale import ManifestLocale + +__all__ = ("Manifest",) + +T = TypeVar("T", bound=ManifestEntryBase) + + +@dataclass +class Manifest: + """Manifest of all locales.""" + + data: dict[Locale, MonoBehaviourTree] + + manifests: dict[Locale, ManifestLocale] = field(init=False) + + def __post_init__(self) -> None: + self.manifests = {locale: ManifestLocale(manifest) for locale, manifest in self.data.items()} + + def get_entries_including_dependencies( + self, locale: Locale, parent_entry: T + ) -> list[T]: + """Get ``parent_entry`` and its dependencies attached at the tail of the returned entry list.""" + ret = [parent_entry] + for dependency in parent_entry.dependencies: + dependency_entry = self.manifests[locale].entry_by_name[dependency] + ret.extend(self.get_entries_including_dependencies(locale, cast(T, dependency_entry))) + return ret + + def get_manifest_entries_of_locale( + self, regex: Pattern, get_entries: Callable[[ManifestLocale], Iterable[T]], /, + is_master_only: bool + ) -> Generator[tuple[Locale, list[T]], None, None]: + """ + Get a generator yielding locale and the entry with its name matching ``regex`` from ``entries``. + + Yields the manifest in the master locale only if ``is_master_only`` is ``True``. + + Resolves asset dependecy. + """ + for locale, manifest_of_locale in self.manifests.items(): + if is_master_only and not locale.is_master: + continue + + for entry in get_entries(manifest_of_locale): + if not re.match(regex, entry.name): + continue + + yield locale, self.get_entries_including_dependencies(locale, entry) + + def get_entry_with_regex( + self, regex: Pattern, /, + is_master_only: bool + ) -> Generator[tuple[Locale, list[ManifestEntry]], None, None]: + """ + Get a generator yielding the manifest entry with its name matching ``regex``. + + Resolves asset dependecy. + """ + return self.get_manifest_entries_of_locale( + regex, lambda manifest: manifest.entries_across_category, + is_master_only=is_master_only + ) + + def get_raw_entry_with_regex( + self, regex: Pattern, *, + is_master_only: bool + ) -> Generator[tuple[Locale, list[ManifestRawEntry]], None, None]: + """ + Get a generator yielding the manifest entry with its name matching ``regex``. + + Resolves asset dependecy. + """ + return self.get_manifest_entries_of_locale( + regex, lambda manifest: manifest.raw_assets, + is_master_only=is_master_only + ) diff --git a/dlasset/model/__init__.py b/dlasset/model/__init__.py new file mode 100644 index 0000000..f004bbd --- /dev/null +++ b/dlasset/model/__init__.py @@ -0,0 +1,2 @@ +"""Data model classes.""" +from .obj import JsonModel diff --git a/dlasset/model/obj.py b/dlasset/model/obj.py new file mode 100644 index 0000000..3602d50 --- /dev/null +++ b/dlasset/model/obj.py @@ -0,0 +1,18 @@ +"""JSON object model class.""" +from abc import ABC, abstractmethod +from dataclasses import dataclass +from typing import Any + +__all__ = ("JsonModel",) + + +# https://github.com/python/mypy/issues/5374 +@dataclass # type: ignore +class JsonModel(ABC): + """A data class that is based on a json object.""" + + json_obj: dict[Any, Any] + + @abstractmethod + def __post_init__(self) -> None: + raise NotImplementedError() diff --git a/dlasset/utils/__init__.py b/dlasset/utils/__init__.py new file mode 100644 index 0000000..a7bbabc --- /dev/null +++ b/dlasset/utils/__init__.py @@ -0,0 +1,3 @@ +"""Various utility functions.""" +from .execution import concurrent_run, concurrent_run_no_return, time_exec +from .image import crop_image, merge_y_cb_cr_a diff --git a/dlasset/utils/execution.py b/dlasset/utils/execution.py new file mode 100644 index 0000000..1e47815 --- /dev/null +++ b/dlasset/utils/execution.py @@ -0,0 +1,103 @@ +"""Utility functions related to execution.""" +import sys +import time +from concurrent.futures import Future, ProcessPoolExecutor +from functools import wraps +from typing import Any, Callable, Hashable, Sequence, TypeVar, Union + +from dlasset.log import init_log, log + +__all__ = ("concurrent_run", "concurrent_run_no_return", "time_exec") + +K = TypeVar("K", bound=Union[Hashable, None]) +R = TypeVar("R") + + +def on_concurrency_start(log_dir: str) -> None: + """Function to call on each concurrency start.""" + # Each process has a brand new logging factory + init_log(log_dir) + + +def concurrent_run( + fn: Callable[..., R], # type: ignore + args_list: Sequence[Sequence[Any]], + log_dir: str, /, + key_of_call: Callable[..., K] # type: ignore +) -> dict[K, R]: + """ + Run ``fn`` concurrently with different set of ``args``. + + If ``key_of_call`` is not ``None``, a ``dict`` where + key is obtained from ``key_of_call`` and the value as the result will be returned. + """ + results: dict[K, R] = {} + + def on_done( + key_of_call: Callable[..., K], # type: ignore + args: Sequence[Any] + ) -> Callable[[Future], None]: + def inner(future: Future) -> None: + try: + results[key_of_call(*args)] = future.result() + except Exception as ex: + log("ERROR", ex, exc_info=True) + raise ex + + return inner + + with ProcessPoolExecutor(initializer=on_concurrency_start, initargs=(log_dir,)) as executor: + futures: list[Future] = [] + for args in args_list: + future = executor.submit(fn, *args) + + if key_of_call: + future.add_done_callback(on_done(key_of_call, args)) + + futures.append(future) + + exceptions = [future.exception() for future in futures if future.exception()] + if error_count := len(exceptions): + log("ERROR", f"{error_count} of {len(futures)} concurrent tasks have error.") + log("ERROR", "-" * 20) + for exception in exceptions: + log("ERROR", f"{exception.__class__.__name__}: {exception}") + sys.exit(1) + + return results + + +def concurrent_run_no_return( + fn: Callable[..., R], + args_list: Sequence[Sequence[Any]], + log_dir: str +) -> None: # type: ignore + """ + Run ``fn`` concurrently with different set of ``args``. + + Does not return result. + """ + + def key_of_call(*_: Any, **__: Any) -> None: + return None + + concurrent_run(fn, args_list, log_dir, key_of_call=key_of_call) + + +FuncT = TypeVar("FuncT", bound=Callable[..., Any]) + + +def time_exec(title: str) -> Callable[[FuncT], Any]: + """Time a function execution and log it.""" + + def decorator(fn: FuncT) -> Any: + @wraps(fn) + def wrapper(*args: Any, **kwargs: Any) -> Any: + _start = time.time() + ret = fn(*args, **kwargs) + log("INFO", f"{title} completed in {time.time() - _start:.3f} secs") + return ret + + return wrapper + + return decorator diff --git a/dlasset/utils/image.py b/dlasset/utils/image.py new file mode 100644 index 0000000..fd7e89e --- /dev/null +++ b/dlasset/utils/image.py @@ -0,0 +1,24 @@ +"""Utility functions for image processing.""" +from PIL import Image + +__all__ = ("merge_y_cb_cr_a", "crop_image") + + +def merge_y_cb_cr_a(img_y: Image, img_cb: Image, img_cr: Image, img_alpha: Image) -> Image: + """Merge the image channel of YCbCr and alpha into one single image.""" + y = img_y.split()[-1] # Y uses A for value + cb = img_cb.convert("L").resize(y.size, Image.ANTIALIAS) + cr = img_cr.convert("L").resize(y.size, Image.ANTIALIAS) + + img = Image.merge("YCbCr", (y, cb, cr)).convert("RGBA") + + a = img_alpha.convert("L") + + img.putalpha(a) + + return img + + +def crop_image(img: Image, tl_x: int, tl_y: int, rb_x: int, rb_y: int) -> Image: + """Crop ``img`` starting from the top-left corner at ``(tl_x, tl_y)`` to the right-bottom at ``(rb_x, rb_y)``.""" + return img.crop((tl_x, tl_y, rb_x, rb_y)) diff --git a/dlasset/workflow.py b/dlasset/workflow.py new file mode 100644 index 0000000..17271d6 --- /dev/null +++ b/dlasset/workflow.py @@ -0,0 +1,37 @@ +"""Workflows for processing the assets.""" +from .config import load_config +from .env import Environment, get_cli_args, init_env +from .export import export_by_task, export_raw_by_task +from .manifest import Manifest, decrypt_manifest_all_locale, download_manifest_all_locale, export_manifest_all_locale + +__all__ = ("initialize", "process_manifest", "export_assets") + + +def initialize() -> Environment: + """Initialize.""" + args = get_cli_args() + config = load_config(args.config_path) + + env = init_env(args, config) + env.print_info() + + return env + + +def process_manifest(env: Environment) -> Manifest: + """Process manifest asset and return its model.""" + download_manifest_all_locale(env) + decrypt_manifest_all_locale(env) + return export_manifest_all_locale(env) + + +def export_assets(env: Environment, manifest: Manifest) -> None: + """Perform asset exporting tasks in the config.""" + for asset_task in env.config.asset_tasks: + export_by_task(env, manifest, asset_task) + + # Update index file per task + env.index.update_index_files() + + for raw_task in env.config.raw_tasks: + export_raw_by_task(env, manifest, raw_task) diff --git a/lib/decrypt/BouncyCastle.Crypto.dll b/lib/decrypt/BouncyCastle.Crypto.dll new file mode 100644 index 0000000..0a50458 Binary files /dev/null and b/lib/decrypt/BouncyCastle.Crypto.dll differ diff --git a/lib/decrypt/Decrypt.deps.json b/lib/decrypt/Decrypt.deps.json new file mode 100644 index 0000000..5b42d59 --- /dev/null +++ b/lib/decrypt/Decrypt.deps.json @@ -0,0 +1,41 @@ +{ + "runtimeTarget": { + "name": ".NETCoreApp,Version=v3.1", + "signature": "" + }, + "compilationOptions": {}, + "targets": { + ".NETCoreApp,Version=v3.1": { + "Decrypt/1.0.0": { + "dependencies": { + "BouncyCastle.NetCore": "1.8.8" + }, + "runtime": { + "Decrypt.dll": {} + } + }, + "BouncyCastle.NetCore/1.8.8": { + "runtime": { + "lib/netstandard2.0/BouncyCastle.Crypto.dll": { + "assemblyVersion": "1.8.8.0", + "fileVersion": "1.8.20265.1" + } + } + } + } + }, + "libraries": { + "Decrypt/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "BouncyCastle.NetCore/1.8.8": { + "type": "package", + "serviceable": true, + "sha512": "sha512-Jx3dQd4SQOHZmxcImYSp0YT2WpYxosQXoZutbHORhFK/zHGH8aykfIyM6S4DVGyu4g9+KHGJUQEh2RH4qXj/Jg==", + "path": "bouncycastle.netcore/1.8.8", + "hashPath": "bouncycastle.netcore.1.8.8.nupkg.sha512" + } + } +} \ No newline at end of file diff --git a/lib/decrypt/Decrypt.dll b/lib/decrypt/Decrypt.dll new file mode 100644 index 0000000..7f13818 Binary files /dev/null and b/lib/decrypt/Decrypt.dll differ diff --git a/lib/decrypt/Decrypt.runtimeconfig.json b/lib/decrypt/Decrypt.runtimeconfig.json new file mode 100644 index 0000000..bc456d7 --- /dev/null +++ b/lib/decrypt/Decrypt.runtimeconfig.json @@ -0,0 +1,9 @@ +{ + "runtimeOptions": { + "tfm": "netcoreapp3.1", + "framework": { + "name": "Microsoft.NETCore.App", + "version": "3.1.0" + } + } +} \ No newline at end of file diff --git a/main.py b/main.py index 9d5f3f2..a3146be 100644 --- a/main.py +++ b/main.py @@ -1,8 +1,13 @@ -from dlasset import workflow +from dlasset.utils import time_exec +from dlasset.workflow import export_assets, initialize, process_manifest +@time_exec("Assets downloading & preprocessing") def main(): - workflow() + env = initialize() + manifest = process_manifest(env) + + export_assets(env, manifest) if __name__ == '__main__': diff --git a/precommit.ps1 b/precommit.ps1 index 2b4d426..d64a80e 100644 --- a/precommit.ps1 +++ b/precommit.ps1 @@ -12,15 +12,15 @@ function Invoke-Check([string]$command, [string]$taskName) } } +Write-Host "Checking with mypy..." -Fore Cyan +Invoke-Check "mypy dlasset" "mypy" + Write-Host "Checking with pylint..." -Fore Cyan Invoke-Check "pylint dlasset" "pylint" Write-Host "Checking with pydocstyle..." -Fore Cyan Invoke-Check "pydocstyle dlasset --count" "pydocstyle" -Write-Host "Checking with mypy..." -Fore Cyan -Invoke-Check "mypy dlasset" "mypy" - Write-Host "Checking with bandit..." -Fore Cyan Invoke-Check "bandit -r dlasset" "bandit" diff --git a/pyproject.toml b/pyproject.toml index d3e7ba8..b89c9de 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,25 +11,21 @@ docstring-quote = "double" [tool.pylint.BASIC] # Reason of the good names: -# - _ -# often used as dummy variable during unpacking -# - T -# often used to for TypeVar -# - f -# often used as a file stream name -# - i, j, k -# often used in for loops -# - s -# often used to represent "string" -# - v -# often used to represent "value" -# - dt, tz -# often used in datetime handling (dt for datetime, tz for timezone) -# - ex -# often used as the var name of exception caught by try..except -# - fn -# often used to represent a function -good-names = "_,T,f,i,j,k,s,v,dt,ex,fn,tz" +# - _: dummy variable during unpacking +# - T: for TypeVar +# - f: file stream name +# - i, j, k: for loops +# - r, g, b, a: represents RGBA +# - y, cb, cr: represents YCbCr +# - K: type variable name for "K"ey +# - s: represents "string" +# - v: represents "value" +# - R: type variable name for "R"eturn +# - iv: "iv" for crypto +# - dt, tz: datetime handling (dt for datetime, tz for timezone) +# - ex: var name of exception caught by try..except +# - fn: represents a function +good-names = "_, T, f, i, j, K, k, r, g, b, a, y, cb, cr, s, v, R, iv, dt, ex, fn, tz" [tool.pylint.FORMAT] max-line-length = 119 @@ -40,6 +36,9 @@ max-line-length = 119 # arguments-differ: Let it checked by IDE or flake8 disable = "fixme, too-many-instance-attributes, cyclic-import, arguments-differ" +[tool.pylint.SIMILARITIES] +ignore-imports="yes" + # -------------------------- pydocstyle -------------------------- [tool.pydocstyle] @@ -65,4 +64,11 @@ exclude = "(tests/|script_*)" show_error_codes = true disallow_untyped_defs = true disallow_untyped_decorators = true -disallow_any_expr = true + +[[tool.mypy.overrides]] +module = [ + "UnityPy.*", + "jsonschema.*", + "PIL", +] +ignore_missing_imports = true diff --git a/requirements-dev.txt b/requirements-dev.txt index a1a71d0..a1353ae 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -12,3 +12,7 @@ pylint pylint-quotes pydocstyle flake8 + +# Typings +types-PyYAML +types-requests diff --git a/requirements.txt b/requirements.txt index e8a8255..2f5c678 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,12 @@ +# Config +pyyaml +jsonschema + +# Data downloading +requests + +# Asset processing UnityPy + +# Image processing Pillow