Skip to content

Commit

Permalink
use new .NET 8 Syntax features
Browse files Browse the repository at this point in the history
  • Loading branch information
mcmonkey4eva committed Mar 10, 2024
1 parent 9484efe commit f2a3d16
Show file tree
Hide file tree
Showing 43 changed files with 300 additions and 307 deletions.
2 changes: 1 addition & 1 deletion src/Accounts/Session.cs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public class Session : IEquatable<Session>

public CancellationTokenSource SessInterrupt = new();

public List<GenClaim> Claims = new();
public List<GenClaim> Claims = [];

/// <summary>Statistics about the generations currently waiting in this session.</summary>
public int WaitingGenerations = 0, LoadingModels = 0, WaitingBackends = 0, LiveGens = 0;
Expand Down
6 changes: 3 additions & 3 deletions src/Accounts/User.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ public class DatabaseEntry
public string ID { get; set; }

/// <summary>What presets this user has saved, matched to the preset database.</summary>
public List<string> Presets { get; set; } = new();
public List<string> Presets { get; set; } = [];

/// <summary>This users stored settings data.</summary>
public string RawSettings { get; set; } = "";
Expand Down Expand Up @@ -85,7 +85,7 @@ public List<string> ListAllGenericData(string dataname)
catch (Exception ex)
{
Logs.Error($"Error loading generic-data for user {UserID}: {ex}");
return new();
return [];
}
}
}
Expand Down Expand Up @@ -137,7 +137,7 @@ public List<T2IPreset> GetAllPresets()
catch (Exception ex)
{
Logs.Error($"Error loading presets for user {UserID}: {ex}");
return new();
return [];
}
}
}
Expand Down
18 changes: 9 additions & 9 deletions src/Backends/BackendHandler.cs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ public class BackendHandler
public LockObject CentralLock = new();

/// <summary>Map of backend type IDs to metadata about them.</summary>
public Dictionary<string, BackendType> BackendTypes = new();
public Dictionary<string, BackendType> BackendTypes = [];

/// <summary>Value to ensure unique IDs are given to new backends.</summary>
public int LastBackendID = 0;
Expand Down Expand Up @@ -516,7 +516,7 @@ public void Shutdown()
HasShutdown = true;
NewBackendInitSignal.Set();
CheckBackendsSignal.Set();
List<(T2IBackendData, Task)> tasks = new();
List<(T2IBackendData, Task)> tasks = [];
foreach (T2IBackendData backend in T2IBackends.Values)
{
tasks.Add((backend, Task.Run(async () =>
Expand Down Expand Up @@ -574,13 +574,13 @@ public class ModelRequestPressure
public volatile bool IsLoading;

/// <summary>Sessions that want the model.</summary>
public HashSet<Session> Sessions = new();
public HashSet<Session> Sessions = [];

/// <summary>Requests that want the model.</summary>
public List<T2IBackendRequest> Requests = new();
public List<T2IBackendRequest> Requests = [];

/// <summary>Set of backends that tried to satisfy this request but failed.</summary>
public HashSet<int> BadBackends = new();
public HashSet<int> BadBackends = [];

/// <summary>Async issue prevention lock.</summary>
public LockObject Locker = new();
Expand Down Expand Up @@ -653,7 +653,7 @@ public void Complete()

public void TryFind()
{
List<T2IBackendData> currentBackends = Handler.T2IBackends.Values.ToList();
List<T2IBackendData> currentBackends = [.. Handler.T2IBackends.Values];
List<T2IBackendData> possible = currentBackends.Where(b => b.Backend.IsEnabled && !b.Backend.ShutDownReserve && b.Backend.Reservations == 0 && b.Backend.Status == BackendStatus.RUNNING).ToList();
Logs.Verbose($"[BackendHandler] Backend request #{ID} searching for backend... have {possible.Count}/{currentBackends.Count} possible");
if (!possible.Any())
Expand All @@ -677,7 +677,7 @@ public void TryFind()
Failure = new InvalidOperationException($"No backends match the settings of the request given!{reason}");
return;
}
List<T2IBackendData> available = possible.Where(b => !b.CheckIsInUse).OrderBy(b => b.Usages).ToList();
List<T2IBackendData> available = [.. possible.Where(b => !b.CheckIsInUse).OrderBy(b => b.Usages)];
if (Logs.MinimumLevel <= Logs.LogLevel.Verbose)
{
Logs.Verbose($"Possible: {possible.Select(b => $"{b.ID}/{b.BackType.Name}").JoinString(", ")}, available {available.Select(b => $"{b.ID}/{b.BackType.Name}").JoinString(", ")}");
Expand Down Expand Up @@ -903,7 +903,7 @@ public void LoadHighestPressureNow(List<T2IBackendData> possible, List<T2IBacken
}
Logs.Verbose($"[BackendHandler] Will load highest pressure model...");
long timeRel = Environment.TickCount64;
List<ModelRequestPressure> pressures = ModelRequests.Values.Where(p => !p.IsLoading).OrderByDescending(p => p.Heuristic(timeRel)).ToList();
List<ModelRequestPressure> pressures = [.. ModelRequests.Values.Where(p => !p.IsLoading).OrderByDescending(p => p.Heuristic(timeRel))];
if (pressures.IsEmpty())
{
Logs.Verbose($"[BackendHandler] No model requests, skipping load.");
Expand Down Expand Up @@ -952,7 +952,7 @@ public void LoadHighestPressureNow(List<T2IBackendData> possible, List<T2IBacken
T2IBackendData availableBackend = valid.MinBy(a => a.TimeLastRelease);
Logs.Debug($"[BackendHandler] backend #{availableBackend.ID} will load a model: {highestPressure.Model.Name}, with {highestPressure.Count} requests waiting for {timeWait / 1000f:0.#} seconds");
highestPressure.IsLoading = true;
List<Session.GenClaim> claims = new();
List<Session.GenClaim> claims = [];
foreach (Session sess in highestPressure.Sessions)
{
claims.Add(sess.Claim(0, 1, 0, 0));
Expand Down
22 changes: 11 additions & 11 deletions src/Backends/SwarmSwarmBackend.cs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ public class SwarmSwarmBackendSettings : AutoConfiguration
public ConcurrentDictionary<string, string> RemoteFeatureCombo = new();

/// <summary>A set of all backend-types the remote Swarm instance has.</summary>
public volatile HashSet<string> RemoteBackendTypes = new();
public volatile HashSet<string> RemoteBackendTypes = [];

/// <inheritdoc/>
public override IEnumerable<string> SupportedFeatures => RemoteFeatureCombo.Keys;
Expand All @@ -62,7 +62,7 @@ public class SwarmSwarmBackendSettings : AutoConfiguration

public async Task ValidateAndBuild()
{
JObject sessData = await HttpClient.PostJson($"{Address}/API/GetNewSession", new());
JObject sessData = await HttpClient.PostJson($"{Address}/API/GetNewSession", []);
Session = sessData["session_id"].ToString();
string id = sessData["server_id"]?.ToString();
BackendCount = sessData["count_running"].Value<int>();
Expand Down Expand Up @@ -109,10 +109,10 @@ public void TriggerRefresh()
Logs.Verbose($"Trigger refresh on remote swarm {Address}");
await HttpClient.PostJson($"{Address}/TriggerRefresh", new() { ["session_id"] = Session });
await ReviseRemoteDataList();
List<Task> tasks = new()
{
List<Task> tasks =
[
ReviseRemoteDataList()
};
];
foreach (BackendHandler.T2IBackendData backend in ControlledNonrealBackends.Values)
{
tasks.Add((backend.Backend as SwarmSwarmBackend).ReviseRemoteDataList());
Expand All @@ -129,28 +129,28 @@ await RunWithSession(async () =>
AutoThrowException(backendData);
if (IsReal)
{
List<Task> tasks = new();
RemoteModels ??= new();
List<Task> tasks = [];
RemoteModels ??= [];
foreach (string type in Program.T2IModelSets.Keys)
{
string runType = type;
tasks.Add(Task.Run(async () =>
{
JObject modelsData = await HttpClient.PostJson($"{Address}/API/ListModels", new() { ["session_id"] = Session, ["path"] = "", ["depth"] = 10, ["subtype"] = runType });
Dictionary<string, JObject> remoteModelsParsed = new();
Dictionary<string, JObject> remoteModelsParsed = [];
foreach (JToken x in modelsData["files"].ToList())
{
JObject data = x.DeepClone() as JObject;
data["local"] = false;
remoteModelsParsed[data["name"].ToString()] = data;
}
RemoteModels[runType] = remoteModelsParsed;
Models[runType] = remoteModelsParsed.Keys.ToList();
Models[runType] = [.. remoteModelsParsed.Keys];
}));
}
await Task.WhenAll(tasks);
}
HashSet<string> features = new(), types = new();
HashSet<string> features = [], types = [];
bool isLoading = false;
HashSet<int> ids = IsReal ? new(ControlledNonrealBackends.Keys) : null;
if (!IsReal)
Expand Down Expand Up @@ -245,7 +245,7 @@ public override async Task Init()
if (IsReal)
{
CanLoadModels = false;
Models = new();
Models = [];
}
if (string.IsNullOrWhiteSpace(Settings.Address))
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@ public class AutoWebUIBackendExtension : Extension
{

/// <summary>List of actions to run when generating an image, primarily to alter the input data.</summary>
public static List<Action<JObject, T2IParamInput>> OtherGenHandlers = new();
public static List<Action<JObject, T2IParamInput>> OtherGenHandlers = [];

/// <summary>Set of all feature-ids supported by Auto WebUI backends.</summary>
public static HashSet<string> FeaturesSupported = new() { "variation_seed", "autowebui" };
public static HashSet<string> FeaturesSupported = ["variation_seed", "autowebui"];

public static T2IRegisteredParam<string> SamplerParam;

public static List<string> Samplers = new() { "Euler a", "Euler" };
public static List<string> Samplers = ["Euler a", "Euler"];

public static LockObject ExtBackLock = new();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public async Task LoadValueSet()
}
Logs.Verbose($"Comfy backend {BackendData.ID} loaded value set, parsing...");
RawObjectInfo = result;
Models ??= new();
Models ??= [];
string firstBackSlash = null;
void trackModels(string subtype, string node, string param)
{
Expand Down Expand Up @@ -430,8 +430,8 @@ private async Task<Image[]> GetAllImagesForHistory(JToken output, CancellationTo
{
Logs.Verbose($"ComfyUI history said: {output.ToDenseDebugString()}");
}
List<Image> outputs = new();
List<string> outputFailures = new();
List<Image> outputs = [];
List<string> outputFailures = [];
foreach (JToken outData in output["outputs"].Values())
{
if (outData is null)
Expand Down Expand Up @@ -498,21 +498,21 @@ async Task LoadImage(JObject outImage, Image.ImageType type)
Logs.Warning($"Comfy backend gave no valid output");
}
}
return outputs.ToArray();
return [.. outputs];
}

/// <inheritdoc/>
public override async Task<Image[]> Generate(T2IParamInput user_input)
{
List<Image> images = new();
List<Image> images = [];
await GenerateLive(user_input, "0", output =>
{
if (output is Image img)
{
images.Add(img);
}
});
return images.ToArray();
return [.. images];
}

public static string CreateWorkflow(T2IParamInput user_input, Func<string, string> initImageFixer, string ModelFolderFormat = null, HashSet<string> features = null)
Expand Down Expand Up @@ -610,7 +610,7 @@ long fixSeed(long input)
}
else
{
workflow = new WorkflowGenerator() { UserInput = user_input, ModelFolderFormat = ModelFolderFormat, Features = features ?? new() }.Generate().ToString();
workflow = new WorkflowGenerator() { UserInput = user_input, ModelFolderFormat = ModelFolderFormat, Features = features ?? [] }.Generate().ToString();
workflow = initImageFixer(workflow);
}
return workflow;
Expand All @@ -627,7 +627,7 @@ public virtual bool RemoveInputFile(string filename)
/// <inheritdoc/>
public override async Task GenerateLive(T2IParamInput user_input, string batchId, Action<object> takeOutput)
{
List<Action> completeSteps = new();
List<Action> completeSteps = [];
string initImageFixer(string workflow) // This is a hack, backup for if Swarm nodes are missing
{
void TryApply(string key, Image img, bool resize)
Expand Down
30 changes: 15 additions & 15 deletions src/BuiltinExtensions/ComfyUIBackend/ComfyUIBackendExtension.cs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ public class ComfyUIBackendExtension : Extension
public static Dictionary<string, string> Workflows;

/// <summary>Set of all feature-ids supported by ComfyUI backends.</summary>
public static HashSet<string> FeaturesSupported = new() { "comfyui", "refiners", "controlnet", "endstepsearly", "seamless", "video" };
public static HashSet<string> FeaturesSupported = ["comfyui", "refiners", "controlnet", "endstepsearly", "seamless", "video"];

/// <summary>Extensible map of ComfyUI Node IDs to supported feature IDs.</summary>
public static Dictionary<string, string> NodeToFeatureMap = new()
Expand Down Expand Up @@ -117,7 +117,7 @@ public record class ComfyCustomWorkflow(string Name, string Workflow, string Pro

public void LoadWorkflowFiles()
{
Workflows = new();
Workflows = [];
foreach (string workflow in Directory.EnumerateFiles($"{Folder}/Workflows", "*.json", new EnumerationOptions() { RecurseSubdirectories = true }).Order())
{
string fileName = workflow.Replace('\\', '/').After("/Workflows/");
Expand Down Expand Up @@ -170,14 +170,14 @@ public static ComfyCustomWorkflow GetWorkflowByName(string name)
public void Refresh()
{
LoadWorkflowFiles();
List<Task> tasks = new();
List<Task> tasks = [];
foreach (ComfyUIAPIAbstractBackend backend in RunningComfyBackends.ToArray())
{
tasks.Add(backend.LoadValueSet());
}
if (tasks.Any())
{
Task.WaitAll(tasks.ToArray(), Program.GlobalProgramCancel);
Task.WaitAll([.. tasks], Program.GlobalProgramCancel);
}
}

Expand Down Expand Up @@ -246,13 +246,13 @@ public static void AssignValuesFromRaw(JObject rawObjectInfo)

public static T2IRegisteredParam<int> RefinerHyperTile, VideoFrameInterpolationMultiplier;

public static List<string> UpscalerModels = new() { "latent-nearest-exact", "latent-bilinear", "latent-area", "latent-bicubic", "latent-bislerp", "pixel-nearest-exact", "pixel-bilinear", "pixel-area", "pixel-bicubic", "pixel-lanczos" },
Samplers = new() { "euler", "euler_ancestral", "heun", "dpm_2", "dpm_2_ancestral", "lms", "dpm_fast", "dpm_adaptive", "dpmpp_2s_ancestral", "dpmpp_sde", "dpmpp_2m", "dpmpp_2m_sde", "ddim", "uni_pc", "uni_pc_bh2" },
Schedulers = new() { "normal", "karras", "exponential", "simple", "ddim_uniform" };
public static List<string> UpscalerModels = ["latent-nearest-exact", "latent-bilinear", "latent-area", "latent-bicubic", "latent-bislerp", "pixel-nearest-exact", "pixel-bilinear", "pixel-area", "pixel-bicubic", "pixel-lanczos"],
Samplers = ["euler", "euler_ancestral", "heun", "dpm_2", "dpm_2_ancestral", "lms", "dpm_fast", "dpm_adaptive", "dpmpp_2s_ancestral", "dpmpp_sde", "dpmpp_2m", "dpmpp_2m_sde", "ddim", "uni_pc", "uni_pc_bh2"],
Schedulers = ["normal", "karras", "exponential", "simple", "ddim_uniform"];

public static List<string> IPAdapterModels = new() { "None" };
public static List<string> IPAdapterModels = ["None"];

public static List<string> GligenModels = new() { "None" };
public static List<string> GligenModels = ["None"];

public static ConcurrentDictionary<string, JToken> ControlNetPreprocessors = new() { ["None"] = null };

Expand Down Expand Up @@ -392,7 +392,7 @@ public class ComfyClientData

public AbstractT2IBackend Backend;

public static HashSet<string> ModelNameInputNames = new() { "ckpt_name", "vae_name", "lora_name", "clip_name", "control_net_name", "style_model_name", "model_path", "lora_names" };
public static HashSet<string> ModelNameInputNames = ["ckpt_name", "vae_name", "lora_name", "clip_name", "control_net_name", "style_model_name", "model_path", "lora_names"];

public void FixUpPrompt(JObject prompt)
{
Expand Down Expand Up @@ -448,11 +448,11 @@ public async Task ComfyBackendDirectHandler(HttpContext context)
bool shouldReserve = hasMulti && doMultiStr == "reserve";
if (!shouldReserve && (!hasMulti || doMultiStr != "true"))
{
allBackends = new() { new(webClient, address, backend) };
allBackends = [new(webClient, address, backend)];
}
string path = context.Request.Path.Value;
path = path.After("/ComfyBackendDirect");
if (path.StartsWith("/"))
if (path.StartsWith('/'))
{
path = path[1..];
}
Expand All @@ -464,7 +464,7 @@ public async Task ComfyBackendDirectHandler(HttpContext context)
{
Logs.Debug($"Comfy backend direct websocket request to {path}, have {allBackends.Count} backends available");
WebSocket socket = await context.WebSockets.AcceptWebSocketAsync();
List<Task> tasks = new();
List<Task> tasks = [];
ComfyUser user = new();
// Order all evens then all odds - eg 0, 2, 4, 6, 1, 3, 5, 7 (to reduce chance of overlap when sharing)
int[] vals = Enumerable.Range(0, allBackends.Count).ToArray();
Expand Down Expand Up @@ -725,7 +725,7 @@ void tryFindBackend()
}
else if (path == "queue" || path == "interrupt") // eg queue delete
{
List<Task<HttpResponseMessage>> tasks = new();
List<Task<HttpResponseMessage>> tasks = [];
MemoryStream inputCopy = new();
await context.Request.Body.CopyToAsync(inputCopy);
byte[] inputBytes = inputCopy.ToArray();
Expand Down Expand Up @@ -758,7 +758,7 @@ void tryFindBackend()
{
if (path.StartsWith("view?filename="))
{
List<Task<HttpResponseMessage>> requests = new();
List<Task<HttpResponseMessage>> requests = [];
foreach ((HttpClient clientLocal, string addressLocal, AbstractT2IBackend backendLocal) in allBackends)
{
requests.Add(clientLocal.SendAsync(new(new(context.Request.Method), $"{addressLocal}/{path}")));
Expand Down
10 changes: 2 additions & 8 deletions src/BuiltinExtensions/ComfyUIBackend/ComfyUISelfStartBackend.cs
Original file line number Diff line number Diff line change
Expand Up @@ -72,10 +72,7 @@ async Task EnsureNodeRepo(string url)
Process.Start(new ProcessStartInfo("git", "pull") { WorkingDirectory = Path.GetFullPath($"{nodePath}/{folderName}") }).WaitForExit();
}
}
List<Task> tasks = new()
{
Task.Run(async () => await EnsureNodeRepo("https://github.com/mcmonkeyprojects/sd-dynamic-thresholding"))
};
List<Task> tasks = [Task.Run(async () => await EnsureNodeRepo("https://github.com/mcmonkeyprojects/sd-dynamic-thresholding"))];
await Task.WhenAll(tasks);
}
catch (Exception ex)
Expand Down Expand Up @@ -174,10 +171,7 @@ public override async Task Init()
{
Logs.Warning($"ComfyUI start script is '{settings.StartScript}', which looks wrong - did you forget to append 'main.py' on the end?");
}
List<Task> tasks = new()
{
Task.Run(EnsureNodeRepos)
};
List<Task> tasks = [Task.Run(EnsureNodeRepos)];
if (settings.AutoUpdate && !string.IsNullOrWhiteSpace(settings.StartScript))
{
tasks.Add(Task.Run(async () =>
Expand Down
Loading

0 comments on commit f2a3d16

Please sign in to comment.