Skip to content

Commit

Permalink
Merge branch 'main' into whywontyoudie
Browse files Browse the repository at this point in the history
  • Loading branch information
RunDevelopment authored Apr 5, 2024
2 parents 678dfcf + 1e7c99a commit 92717c2
Show file tree
Hide file tree
Showing 14 changed files with 461 additions and 326 deletions.
6 changes: 0 additions & 6 deletions backend/src/api/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,9 +183,3 @@ def make_optional(self):
def make_fused(self, with_output: OutputId | int = 0):
self.fused = IOFusion(output_id=OutputId(with_output))
return self

def __repr__(self):
return str(self.to_dict())

def __iter__(self):
yield from self.to_dict().items()
77 changes: 75 additions & 2 deletions backend/src/api/node_context.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import time
from abc import ABC, abstractmethod
from typing import Literal

from .settings import SettingsParser

Expand All @@ -8,7 +9,7 @@ class Aborted(Exception):
pass


class NodeProgress(ABC):
class Progress(ABC):
@property
@abstractmethod
def aborted(self) -> bool:
Expand Down Expand Up @@ -51,8 +52,80 @@ def set_progress(self, progress: float) -> None:
Raises an `Aborted` exception if the current operation was aborted.
"""

def sub_progress(self, offset: float, length: float) -> "Progress":
"""
Returns a new `NodeProgress` object that represents a sub-progress of the current operation.
The progress range of the sub-progress is defined by `offset` and `length`. `offset` must be a value between 0
and 1, and `length` must be a positive value such that `offset + length <= 1`.
The real progress of the sub-progress is calculated as `offset + progress * length`, where `progress` is the
progress value passed to `set_progress` of the sub-progress.
"""
return _SubProgress(self, offset, length)

@staticmethod
def noop_progress() -> "Progress":
"""
Returns a `Progress` object that does nothing. It is never paused or aborted and does not report any progress.
"""
return _NoopProgress()


class _NoopProgress(Progress):
@property
def aborted(self) -> Literal[False]:
return False

@property
def paused(self) -> Literal[False]:
return False

def check_aborted(self) -> None:
pass

def suspend(self) -> None:
pass

def set_progress(self, progress: float) -> None:
pass

def sub_progress(self, offset: float, length: float) -> "Progress":
return _NoopProgress()


class _SubProgress(Progress):
def __init__(self, parent: Progress, offset: float, length: float):
self._parent = parent
self._offset = offset
self._length = length

@property
def aborted(self) -> bool:
return self._parent.aborted

@property
def paused(self) -> bool:
return self._parent.paused

def check_aborted(self) -> None:
self._parent.check_aborted()

def suspend(self) -> None:
self._parent.suspend()

def set_progress(self, progress: float) -> None:
self._parent.set_progress(self._offset + progress * self._length)

def sub_progress(self, offset: float, length: float) -> "_SubProgress":
return _SubProgress(
self._parent,
offset=self._offset + offset * self._length,
length=length * self._length,
)


class NodeContext(NodeProgress, ABC):
class NodeContext(Progress, ABC):
"""
The execution context of the current node.
"""
Expand Down
6 changes: 0 additions & 6 deletions backend/src/api/output.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,12 +59,6 @@ def suggest(self):
self.should_suggest = True
return self

def __repr__(self):
return str(self.to_dict())

def __iter__(self):
yield from self.to_dict().items()

def get_broadcast_data(self, _value: object):
return None

Expand Down
5 changes: 3 additions & 2 deletions backend/src/nodes/impl/pytorch/auto_split.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import torch
from spandrel import ImageModelDescriptor

from api import NodeProgress
from api import Progress

from ..upscale.auto_split import Split, Tiler, auto_split
from .utils import safe_cuda_cache_empty
Expand Down Expand Up @@ -55,12 +55,13 @@ def pytorch_auto_split(
device: torch.device,
use_fp16: bool,
tiler: Tiler,
progress: NodeProgress,
progress: Progress,
) -> np.ndarray:
dtype = torch.float16 if use_fp16 else torch.float32
model = model.to(device, dtype)

def upscale(img: np.ndarray, _: object):
progress.check_aborted()
if progress.paused:
# clear resources before pausing
gc.collect()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from sanic.log import logger
from spandrel import ImageModelDescriptor, ModelTiling

from api import KeyInfo, NodeContext, NodeProgress
from api import KeyInfo, NodeContext, Progress
from nodes.groups import Condition, if_group
from nodes.impl.pytorch.auto_split import pytorch_auto_split
from nodes.impl.upscale.auto_split_tiles import (
Expand Down Expand Up @@ -37,7 +37,7 @@ def upscale(
model: ImageModelDescriptor,
tile_size: TileSize,
options: PyTorchSettings,
progress: NodeProgress,
progress: Progress,
):
with torch.no_grad():
# Borrowed from iNNfer
Expand Down
12 changes: 6 additions & 6 deletions backend/src/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,25 +46,25 @@ def collect_input_information(
if not enforced:
try:
value = node_input.enforce_(value) # noqa
except Exception as e:
except Exception:
logger.error(
f"Error enforcing input {node_input.label} (id {node_input.id})",
e,
exc_info=True,
)
# We'll just try using the un-enforced value. Maybe it'll work.

try:
input_dict[node_input.id] = node_input.get_error_value(value)
except Exception as e:
except Exception:
logger.error(
f"Error getting error value for input {node_input.label} (id {node_input.id})",
e,
exc_info=True,
)

return input_dict
except Exception as outer_e:
except Exception:
# this method must not throw
logger.error("Error collecting input information.", outer_e)
logger.error("Error collecting input information.", exc_info=True)
return {}


Expand Down
18 changes: 17 additions & 1 deletion backend/src/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -474,8 +474,24 @@ async def import_packages(

if len(import_errors) > 0:
logger.warning(f"Failed to import {len(import_errors)} modules:")

by_error: dict[str, list[api.LoadErrorInfo]] = {}
for e in import_errors:
logger.warning(f"{e.error} -> {e.module}")
key = str(e.error)
if key not in by_error:
by_error[key] = []
by_error[key].append(e)

for error in sorted(by_error.keys()):
modules = [e.module for e in by_error[error]]
if len(modules) == 1:
logger.warning(f"{error} -> {modules[0]}")
else:
count = len(modules)
if count > 3:
modules = modules[:2] + [f"and {count - 2} more ..."]
l = "\n".join(" -> " + m for m in modules)
logger.warning(f"{error} -> {count} modules ...\n{l}")

if config.error_on_failed_node:
raise ValueError("Error importing nodes")
Expand Down
14 changes: 7 additions & 7 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@
"rregex": "^1.8.0",
"semver": "^7.3.7",
"systeminformation": "^5.21.7",
"undici": "^5.28.3",
"undici": "^5.28.4",
"use-context-selector": "^1.4.0",
"use-debounce": "^8.0.1",
"uuid": "^8.3.2",
Expand Down
10 changes: 8 additions & 2 deletions src/main/backend/process.ts
Original file line number Diff line number Diff line change
Expand Up @@ -92,10 +92,16 @@ export class OwnedBackendProcess implements BaseBackendProcess {
const dataString = String(data);
// Remove unneeded timestamp
const fixedData = dataString.split('] ').slice(1).join('] ');
log.info(`Backend: ${removedTrailingNewLine(fixedData)}`);
const message = removedTrailingNewLine(fixedData);
if (message) {
log.info(`Backend: ${message}`);
}
});
backend.stderr.on('data', (data) => {
log.error(`Backend: ${removedTrailingNewLine(String(data))}`);
const message = removedTrailingNewLine(String(data));
if (message) {
log.error(`Backend: ${message}`);
}
});

return backend;
Expand Down
5 changes: 5 additions & 0 deletions src/main/setting-storage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,11 @@ export const readSettings = (): ChainnerSettings => {

// legacy settings
const storagePath = path.join(getRootDirSync(), 'settings');
if (!existsSync(storagePath)) {
// neither settings.json nor old settings exist, so this is a fresh install
return { ...defaultSettings };
}

const storage = new LocalStorage(storagePath);
const partialSettings = migrateOldStorageSettings({
keys: Array.from({ length: storage.length }, (_, i) => storage.key(i)),
Expand Down
Loading

0 comments on commit 92717c2

Please sign in to comment.