diff --git a/flake.lock b/flake.lock index 432f5292..0e8a5bde 100644 --- a/flake.lock +++ b/flake.lock @@ -212,17 +212,17 @@ "nixpkgs": "nixpkgs" }, "locked": { - "lastModified": 1714055636, - "narHash": "sha256-8LCyIPAK4/4ge03ohCIWpoJrMGgoCaOriALzt7gPxHE=", + "lastModified": 1719092179, + "narHash": "sha256-/4jxq5+pDkVMao5RzOm27C2AfANjisTuvsycA0pbBCg=", "owner": "PetarKirov", "repo": "dlang.nix", - "rev": "dab4c199ad644dc23b0b9481e2e5a063e9492b84", + "rev": "21b1b3b18b3b635a43b319612aff529d26b1863b", "type": "github" }, "original": { "owner": "PetarKirov", "repo": "dlang.nix", - "rev": "dab4c199ad644dc23b0b9481e2e5a063e9492b84", + "rev": "21b1b3b18b3b635a43b319612aff529d26b1863b", "type": "github" } }, @@ -814,11 +814,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1711703276, - "narHash": "sha256-iMUFArF0WCatKK6RzfUJknjem0H9m4KgorO/p3Dopkk=", + "lastModified": 1719506693, + "narHash": "sha256-C8e9S7RzshSdHB7L+v9I51af1gDM5unhJ2xO1ywxNH8=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "d8fe5e6c92d0d190646fb9f1056741a229980089", + "rev": "b2852eb9365c6de48ffb0dc2c9562591f652242a", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 0244aa27..683b12fe 100644 --- a/flake.nix +++ b/flake.nix @@ -197,7 +197,7 @@ }; dlang-nix = { - url = "github:PetarKirov/dlang.nix?branch=feat/build-dub-package&rev=dab4c199ad644dc23b0b9481e2e5a063e9492b84"; + url = "github:PetarKirov/dlang.nix?branch=feat/build-dub-package&rev=21b1b3b18b3b635a43b319612aff529d26b1863b"; inputs = { flake-compat.follows = "flake-compat"; flake-parts.follows = "flake-parts"; diff --git a/packages/mcl/dub.sdl b/packages/mcl/dub.sdl index 011a343f..e6b7f246 100644 --- a/packages/mcl/dub.sdl +++ b/packages/mcl/dub.sdl @@ -23,3 +23,5 @@ dflags "-mcpu=baseline" platform="dmd" dependency "mir-cpuid" version="~>1.2.11" dependency "silly" version="~>1.1.1" + +libs "curl" diff --git a/packages/mcl/dub.selections.json b/packages/mcl/dub.selections.json index 2977f10a..ccba14d3 100644 --- a/packages/mcl/dub.selections.json +++ b/packages/mcl/dub.selections.json @@ -1,8 +1,8 @@ { - "fileVersion": 1, - "versions": { - "mir-core": "1.7.1", - "mir-cpuid": "1.2.11", - "silly": "1.1.1" - } + "fileVersion": 1, + "versions": { + "mir-core": "1.7.1", + "mir-cpuid": "1.2.11", + "silly": "1.1.1" + } } diff --git a/packages/mcl/src/main.d b/packages/mcl/src/main.d index c8ec1e7f..e26e3a80 100644 --- a/packages/mcl/src/main.d +++ b/packages/mcl/src/main.d @@ -27,21 +27,21 @@ int main(string[] args) setLogLevel(logLevel); - try switch (args[1]) + try + switch (args[1]) { - default: - return wrongUsage("unknown command: `" ~ args[1] ~ "`"); + default: + return wrongUsage("unknown command: `" ~ args[1] ~ "`"); static foreach (cmd; supportedCommands) - case __traits(identifier, cmd): - { + case __traits(identifier, cmd): + { - info("Running ", __traits(identifier, cmd)); - cmd(); - info("Execution Succesfull"); - return 0; - - } + info("Running ", __traits(identifier, cmd)); + cmd(); + info("Execution Succesfull"); + return 0; + } } catch (Exception e) { @@ -53,8 +53,9 @@ int main(string[] args) void setLogLevel(LogLevel l) { import std.logger : globalLogLevel, sharedLog; + globalLogLevel = l; - (cast()sharedLog()).logLevel = l; + (cast() sharedLog()).logLevel = l; } int wrongUsage(string error) diff --git a/packages/mcl/src/src/mcl/commands/ci.d b/packages/mcl/src/src/mcl/commands/ci.d index acb0045e..344d2bbd 100644 --- a/packages/mcl/src/src/mcl/commands/ci.d +++ b/packages/mcl/src/src/mcl/commands/ci.d @@ -1,16 +1,16 @@ module mcl.commands.ci; import std.file : readText; -import std.json : parseJSON,JSONValue; -import std.stdio : writeln,write; -import std.algorithm : map; +import std.json : parseJSON, JSONValue; +import std.stdio : writeln, write; +import std.algorithm : map, each; import std.array : array, join; import std.conv : to; import std.process : ProcessPipes; import mcl.utils.env : optional, parseEnv; -import mcl.commands.ci_matrix: nixEvalJobs, SupportedSystem, Params; -import mcl.commands.shard_matrix: generateShardMatrix; +import mcl.commands.ci_matrix : nixEvalJobs, SupportedSystem, Params, Package; +import mcl.commands.shard_matrix : generateShardMatrix, Shard; import mcl.utils.path : rootDir, createResultDirs; import mcl.utils.process : execute; import mcl.utils.nix : nix; @@ -23,58 +23,59 @@ export void ci() params = parseEnv!Params; auto shardMatrix = generateShardMatrix(); - foreach (shard; shardMatrix.include) - { - writeln("Shard ", shard.prefix ~ " ", shard.postfix ~ " ", shard.digit); - params.flakePre = shard.prefix; - params.flakePost = shard.postfix; + shardMatrix.include.each!(handleShard); +} - if (params.flakePre == "") - { - params.flakePre = "checks"; - } - if (params.flakePost != "") - { - params.flakePost = "." ~ params.flakePost; - } - string cachixUrl = "https://" ~ params.cachixCache ~ ".cachix.org"; - version (AArch64) { - string arch = "aarch64"; - } - version (X86_64) { - string arch = "x86_64"; - } +static immutable(SupportedSystem) platform() +{ + version (AArch64) + static immutable string arch = "aarch64"; + else version (X86_64) + static immutable string arch = "x86_64"; - version (linux) { - string os = "linux"; - } - version (OSX) { - string os = "darwin"; - } + version (linux) + static immutable string os = "linux"; + else version (OSX) + static immutable string os = "darwin"; - auto matrix = nixEvalJobs(params, (arch ~ "_" ~ os).to!(SupportedSystem), cachixUrl, false); - foreach (pkg; matrix) - { - if (pkg.isCached) - { - writeln("Package ", pkg.name, " is cached"); - } - else - { - writeln("Package ", pkg.name, " is not cached; building..."); - ProcessPipes res = execute!ProcessPipes(["nix", "build", "--json", ".#" ~ pkg.attrPath]); + return (arch ~ "_" ~ os).to!(SupportedSystem); +} - foreach (line; res.stderr.byLine) - { - "\r".write; - line.write; - } - "".writeln; - auto json = parseJSON(res.stdout.byLine.join("\n").to!string); - auto path = json.array[0]["outputs"]["out"].str; - execute(["cachix", "push", params.cachixCache, path], false, true).writeln; - } - } +void handleShard(Shard shard) +{ + writeln("Shard ", shard.prefix ~ " ", shard.postfix ~ " ", shard.digit); + params.flakePre = shard.prefix; + params.flakePost = shard.postfix; + + if (params.flakePre == "") + params.flakePre = "checks"; + if (params.flakePost != "") + params.flakePost = "." ~ params.flakePost; + string cachixUrl = "https://" ~ params.cachixCache ~ ".cachix.org"; + + auto matrix = nixEvalJobs(params, platform, cachixUrl, false); + matrix.each!(handlePackage); +} +void handlePackage(Package pkg) +{ + if (pkg.isCached) + writeln("Package ", pkg.name, " is cached"); + else + { + writeln("Package ", pkg.name, " is not cached; building..."); + ProcessPipes res = execute!ProcessPipes([ + "nix", "build", "--json", ".#" ~ pkg.attrPath + ]); + + foreach (line; res.stderr.byLine) + { + "\r".write; + line.write; + } + "".writeln; + auto json = parseJSON(res.stdout.byLine.join("\n").to!string); + auto path = json.array[0]["outputs"]["out"].str; + execute(["cachix", "push", params.cachixCache, path], false, true).writeln; } } diff --git a/packages/mcl/src/src/mcl/commands/ci_matrix.d b/packages/mcl/src/src/mcl/commands/ci_matrix.d index f0df9ef7..aaedc662 100755 --- a/packages/mcl/src/src/mcl/commands/ci_matrix.d +++ b/packages/mcl/src/src/mcl/commands/ci_matrix.d @@ -12,7 +12,7 @@ import std.regex : matchFirst; import core.cpuid : threadsPerCPU; import std.path : buildPath; import std.process : pipeProcess, wait, Redirect, kill; -import std.exception : enforce; +import std.exception : enforce, assumeUnique; import std.format : fmt = format; import std.logger : tracef, infof; @@ -25,6 +25,7 @@ import mcl.utils.process : execute; enum GitHubOS { @StringRepresentation("ubuntu-latest") ubuntuLatest, + @StringRepresentation("self-hosted") selfHosted, @StringRepresentation("macos-14") macos14 @@ -39,51 +40,77 @@ enum SupportedSystem @StringRepresentation("aarch64-darwin") aarch64_darwin } +immutable GitHubOS[string] osMap; + +shared static this() +{ + import std.exception : assumeUnique; + import std.conv : to; + + GitHubOS[string] temp = [ + "ubuntu-latest": GitHubOS.ubuntuLatest, + "self-hosted": GitHubOS.selfHosted, + "macos-14": GitHubOS.macos14 + ]; + temp.rehash; + + osMap = assumeUnique(temp); +} + GitHubOS getGHOS(string os) { - switch (os) - { - case "self-hosted": - return GitHubOS.selfHosted; - case "ubuntu-latest": - return GitHubOS.ubuntuLatest; - case "macos-14": - return GitHubOS.macos14; - default: - return GitHubOS.selfHosted; - } + return os in osMap ? osMap[os] : GitHubOS.selfHosted; +} + +void assertGHOS(string input, GitHubOS expected) +{ + auto actual = getGHOS(input); + assert(actual == expected, fmt("getGHOS(\"%s\") should return %s, but returned %s", input, expected, actual)); } @("getGHOS") unittest { - assert(getGHOS("ubuntu-latest") == GitHubOS.ubuntuLatest); - assert(getGHOS("macos-14") == GitHubOS.macos14); - assert(getGHOS("crazyos-inator-2000") == GitHubOS.selfHosted); + assertGHOS("ubuntu-latest", GitHubOS.ubuntuLatest); + assertGHOS("macos-14", GitHubOS.macos14); + assertGHOS("crazyos-inator-2000", GitHubOS.selfHosted); +} + +immutable SupportedSystem[string] systemMap; + +shared static this() +{ + import std.exception : assumeUnique; + import std.conv : to; + + SupportedSystem[string] temp = [ + "x86_64-linux": SupportedSystem.x86_64_linux, + "x86_64-darwin": SupportedSystem.x86_64_darwin, + "aarch64-darwin": SupportedSystem.aarch64_darwin + ]; + temp.rehash; + + systemMap = assumeUnique(temp); } SupportedSystem getSystem(string system) { - switch (system) - { - case "x86_64-linux": - return SupportedSystem.x86_64_linux; - case "x86_64-darwin": - return SupportedSystem.x86_64_darwin; - case "aarch64-darwin": - return SupportedSystem.aarch64_darwin; - default: - return SupportedSystem.x86_64_linux; - } + return system in systemMap ? systemMap[system] : SupportedSystem.x86_64_linux; +} + +void assertSystem(string input, SupportedSystem expected) +{ + auto actual = getSystem(input); + assert(actual == expected, fmt("getSystem(\"%s\") should return %s, but returned %s", input, expected, actual)); } @("getSystem") unittest { - assert(getSystem("x86_64-linux") == SupportedSystem.x86_64_linux); - assert(getSystem("x86_64-darwin") == SupportedSystem.x86_64_darwin); - assert(getSystem("aarch64-darwin") == SupportedSystem.aarch64_darwin); - assert(getSystem("bender-bending-rodriguez-os") == SupportedSystem.x86_64_linux); + assertSystem("x86_64-linux", SupportedSystem.x86_64_linux); + assertSystem("x86_64-darwin", SupportedSystem.x86_64_darwin); + assertSystem("aarch64-darwin", SupportedSystem.aarch64_darwin); + assertSystem("bender-bending-rodriguez-os", SupportedSystem.x86_64_linux); } struct Package @@ -130,12 +157,23 @@ struct SummaryTableEntry SummaryTableEntry_aarch64 aarch64; } +enum Status : string +{ + cached = "✅ cached", + notSupported = "🚫 not supported", + building = "⏳ building...", + buildFailed = "❌ build failed" +} + version (unittest) { static immutable SummaryTableEntry[] testSummaryTableEntryArray = [ - SummaryTableEntry("testPackage", SummaryTableEntry_x86_64("✅ cached", "✅ cached"), SummaryTableEntry_aarch64("🚫 not supported")), - SummaryTableEntry("testPackage2", SummaryTableEntry_x86_64("⏳ building...", "❌ build failed"), SummaryTableEntry_aarch64( - "⏳ building...")) + SummaryTableEntry("testPackage", + SummaryTableEntry_x86_64(Status.cached, Status.cached), + SummaryTableEntry_aarch64(Status.notSupported)), + SummaryTableEntry("testPackage2", + SummaryTableEntry_x86_64(Status.building, Status.buildFailed), + SummaryTableEntry_aarch64(Status.building)) ]; } @@ -159,13 +197,17 @@ Package[] checkCacheStatus(Package[] packages) foreach (ref pkg; packages.parallel) { pkg = checkPackage(pkg); - struct Output { string isCached, name, storePath; } + struct Output + { + string isCached, name, storePath; + } + auto res = appender!string; writeRecordAsTable( Output(pkg.isCached ? "✅" : "❌", pkg.name, pkg.output), res ); - tracef("%s", res.data[0..$-1]); + tracef("%s", res.data[0 .. $ - 1]); } return packages; } @@ -196,17 +238,21 @@ struct Params } } -GitHubOS systemToGHPlatform(SupportedSystem os) +GitHubOS systemToGHPlatform(SupportedSystem os) => + os == SupportedSystem.x86_64_linux ? GitHubOS.selfHosted : GitHubOS.macos14; + +void assertGHPlatform(SupportedSystem system, GitHubOS expected) { - return os == SupportedSystem.x86_64_linux ? GitHubOS.selfHosted : GitHubOS.macos14; + auto actual = systemToGHPlatform(system); + assert(actual == expected, fmt("`systemToGHPlatform(%s)` should return `%s`, but returned `%s`", system, expected, actual)); } @("systemToGHPlatform") unittest { - assert(systemToGHPlatform(SupportedSystem.x86_64_linux) == GitHubOS.selfHosted); - assert(systemToGHPlatform(SupportedSystem.x86_64_darwin) == GitHubOS.macos14); - assert(systemToGHPlatform(SupportedSystem.aarch64_darwin) == GitHubOS.macos14); + assertGHPlatform(SupportedSystem.x86_64_linux, GitHubOS.selfHosted); + assertGHPlatform(SupportedSystem.x86_64_darwin, GitHubOS.macos14); + assertGHPlatform(SupportedSystem.aarch64_darwin, GitHubOS.macos14); } static immutable string[] uselessWarnings = @@ -265,24 +311,27 @@ Package[] nixEvalJobs(Params params, SupportedSystem system, string cachixUrl, b cacheUrl: cachixUrl ~ "/" ~ json["outputs"]["out"].str.matchFirst( "^/nix/store/(?P[^-]+)-")["hash"] ~ ".narinfo" }; - if (doCheck) { + if (doCheck) + { pkg = pkg.checkPackage(); } result ~= pkg; - struct Output { + struct Output + { bool isCached; GitHubOS os; @MaxWidth(50) string attr; @MaxWidth(80) string output; } - Output( + Output output = { isCached: pkg.isCached, os: pkg.os, attr: pkg.attrPath, output: pkg.output - ).writeRecordAsTable(stderr.lockingTextWriter); + }; + output.writeRecordAsTable(stderr.lockingTextWriter); } } foreach (line; pipes.stderr.byLine) @@ -329,41 +378,41 @@ Package[] nixEvalForAllSystems() .array; } +static const MAX_WORKERS = 8; + int getNixEvalWorkerCount() { - return params.maxWorkers == 0 ? (threadsPerCPU() < 8 ? threadsPerCPU() : 8) : params.maxWorkers; + return params.maxWorkers == 0 ? (threadsPerCPU() < MAX_WORKERS ? threadsPerCPU() : MAX_WORKERS) + : params.maxWorkers; } @("getNixEvalWorkerCount") unittest { - assert(getNixEvalWorkerCount() == (threadsPerCPU() < 8 ? threadsPerCPU() : 8)); + auto actual = getNixEvalWorkerCount(); + assert(actual == (threadsPerCPU() < MAX_WORKERS ? threadsPerCPU() : MAX_WORKERS), + "getNixEvalWorkerCount() should return the number of threads per CPU if it is less than MAX_WORKERS, otherwise it should return MAX_WORKERS, but returned %s".fmt(actual)); } -int getAvailableMemoryMB() -{ +string[] meminfo; - // free="$(< /proc/meminfo grep MemFree | tr -s ' ' | cut -d ' ' -f 2)" - int free = "/proc/meminfo".readText - .splitLines - .find!(a => a.indexOf("MemFree") != -1) - .front - .split[1].to!int; - int cached = "/proc/meminfo".readText - .splitLines - .find!(a => a.indexOf("Cached") != -1 && a.indexOf("SwapCached") == -1) - .front - .split[1].to!int; - int buffers = "/proc/meminfo".readText - .splitLines - .find!(a => a.indexOf("Buffers") != -1) - .front - .split[1].to!int; - int shmem = "/proc/meminfo".readText - .splitLines - .find!(a => a.indexOf("Shmem:") != -1) +int getMemoryStat(string statName, string excludeName = "EXCLUDE") +{ + return meminfo + .find!(a => a.indexOf(statName) != -1 && a.indexOf(excludeName) == -1) .front .split[1].to!int; +} + +int getAvailableMemoryMB() +{ + meminfo = "/proc/meminfo".readText + .splitLines; + + int free = getMemoryStat("MemFree"); + int cached = getMemoryStat("Cached", "SwapCached"); + int buffers = getMemoryStat("Buffers"); + int shmem = getMemoryStat("Shmem:"); int maxMemoryMB = params.maxMemory == 0 ? ((free + cached + buffers + shmem) / 1024) : params.maxMemory; return maxMemoryMB; @@ -372,15 +421,25 @@ int getAvailableMemoryMB() @("getAvailableMemoryMB") unittest { - assert(getAvailableMemoryMB() > 0); + // Test when params.maxMemory is 0 + params.maxMemory = 0; + auto actual = getAvailableMemoryMB(); + assert(actual > 0, "getAvailableMemoryMB() should return a value greater than 0, but returned %s".fmt(actual)); + + // Test when params.maxMemory is not 0 + params.maxMemory = 1024; + actual = getAvailableMemoryMB(); + assert(actual == 1024, "getAvailableMemoryMB() should return 1024, but returned %s".fmt(actual)); } void saveCachixDeploySpec(Package[] packages) { - auto agents = packages.filter!(pkg => pkg.isCached == false).map!(pkg => JSONValue([ - "package": pkg.name, - "out": pkg.output - ])).array; + auto agents = packages.filter!(pkg => pkg.isCached == false) + .map!(pkg => JSONValue([ + "package": pkg.name, + "out": pkg.output + ])) + .array; auto resPath = resultDir.buildPath("cachix-deploy-spec.json"); resPath.write(JSONValue(agents).toString(JSONOptions.doNotEscapeSlashes)); } @@ -393,15 +452,31 @@ unittest createResultDirs(); saveCachixDeploySpec(cast(Package[]) testPackageArray); JSONValue deploySpec = parseJSON(resultDir.buildPath("cachix-deploy-spec.json").readText); - assert(testPackageArray[1].name == deploySpec[0]["package"].str); - assert(testPackageArray[1].output == deploySpec[0]["out"].str); + string testPackageName = testPackageArray[1].name; + string deploySpecName = deploySpec[0]["package"].str; + string testPackageOutput = testPackageArray[1].output; + string deploySpecOutput = deploySpec[0]["out"].str; + assert(testPackageName == deploySpecName, + "The name of the package should be %s, but was %s".fmt(testPackageName, deploySpecName)); + assert(testPackageOutput == deploySpecOutput, + "The output of the package should be %s, but was %s".fmt(testPackageOutput, deploySpecOutput)); } void saveGHCIMatrix(Package[] packages) { - auto matrix = JSONValue([ + auto matrix = createMatrix(packages); + writeMatrix(matrix); +} + +JSONValue createMatrix(Package[] packages) +{ + return JSONValue([ "include": JSONValue(packages.map!(pkg => pkg.toJSON()).array) ]); +} + +void writeMatrix(JSONValue matrix) +{ string resPath = rootDir.buildPath(params.isInitial ? "matrix-pre.json" : "matrix-post.json"); resPath.write(JSONValue(matrix).toString(JSONOptions.doNotEscapeSlashes)); } @@ -417,13 +492,23 @@ unittest .buildPath(params.isInitial ? "matrix-pre.json" : "matrix-post.json") .readText .parseJSON; - assert(testPackageArray[0].name == matrix["include"][0]["name"].str); + foreach (i, pkg; testPackageArray) + { + string pkgName = pkg.name; + string matrixName = matrix["include"][i]["name"].str; + assert(pkgName == matrixName, + "The name of the package should be %s, but was %s".fmt(pkgName, matrixName)); + } } void saveGHCIComment(SummaryTableEntry[] tableSummaryJSON) { - import std.path : buildNormalizedPath, absolutePath; + string comment = createComment(tableSummaryJSON); + writeComment(comment); +} +string createComment(SummaryTableEntry[] tableSummaryJSON) +{ string comment = "Thanks for your Pull Request!"; comment ~= "\n\nBelow you will find a summary of the cachix status of each package, for each supported platform."; comment ~= "\n\n| package | `x86_64-linux` | `x86_64-darwin` | `aarch64-darwin` |"; @@ -431,6 +516,12 @@ void saveGHCIComment(SummaryTableEntry[] tableSummaryJSON) comment ~= tableSummaryJSON.map!( pkg => "\n| " ~ pkg.name ~ " | " ~ pkg.x86_64.linux ~ " | " ~ pkg.x86_64.darwin ~ " | " ~ pkg.aarch64.darwin ~ " |") .join(""); + return comment; +} + +void writeComment(string comment) +{ + import std.path : buildNormalizedPath, absolutePath; auto outputPath = rootDir.buildNormalizedPath("comment.md"); write(outputPath, comment); @@ -447,10 +538,14 @@ unittest string comment = rootDir.buildPath("comment.md").readText; foreach (pkg; testSummaryTableEntryArray) { - assert(comment.indexOf(pkg.name) != -1); - assert(comment.indexOf(pkg.x86_64.linux) != -1); - assert(comment.indexOf(pkg.x86_64.darwin) != -1); - assert(comment.indexOf(pkg.aarch64.darwin) != -1); + assert(comment.indexOf(pkg.name) != -1, + "The comment should contain the package name %s, the comment is:\n%s".fmt(pkg.name, comment)); + assert(comment.indexOf(pkg.x86_64.linux) != -1, + "The comment should contain the x86_64 linux status %s, the comment is:\n%s".fmt(pkg.x86_64.linux, comment)); + assert(comment.indexOf(pkg.x86_64.darwin) != -1, + "The comment should contain the x86_64 darwin status %s, the comment is:\n%s".fmt(pkg.x86_64.darwin, comment)); + assert(comment.indexOf(pkg.aarch64.darwin) != -1, + "The comment should contain the aarch64 darwin status %s, the comment is:\n%s".fmt(pkg.aarch64.darwin, comment)); } } @@ -460,76 +555,79 @@ string getStatus(JSONValue pkg, string key) { if (pkg[key]["isCached"].boolean) { - return "[✅ cached](" ~ pkg[key]["cacheUrl"].str ~ ")"; + return "[" ~ Status.cached ~ "](" ~ pkg[key]["cacheUrl"].str ~ ")"; } else if (params.isInitial) { - return "⏳ building..."; + return Status.building; } else { - return "❌ build failed"; + return Status.buildFailed; } } else { - return "🚫 not supported"; + return Status.notSupported; } } SummaryTableEntry[] convertNixEvalToTableSummary(Package[] packages) { - - SummaryTableEntry[] tableSummary = packages + return packages .chunkBy!((a, b) => a.name == b.name) - .map!((group) { - JSONValue pkg; - string name = group.array.front.name; - pkg["name"] = JSONValue(name); - foreach (item; group) - { - pkg[item.system.to!string] = item.toJSON(); - } - SummaryTableEntry entry = { - name, { - getStatus(pkg, "x86_64_linux"), getStatus(pkg, "x86_64_darwin") - }, { - getStatus(pkg, "aarch64_darwin") - } - }; - return entry; - }) + .map!(group => createSummaryTableEntry(group.array)) .array .sort!((a, b) => a.name < b.name) .release; - return tableSummary; +} + +SummaryTableEntry createSummaryTableEntry(Package[] group) +{ + JSONValue pkg; + string name = group.front.name; + pkg["name"] = JSONValue(name); + foreach (item; group) + { + pkg[item.system.to!string] = item.toJSON(); + } + SummaryTableEntry entry = { + name, {getStatus(pkg, "x86_64_linux"), getStatus(pkg, "x86_64_darwin")}, { + getStatus(pkg, "aarch64_darwin") + } + }; + return entry; +} + +void assertNixTable(SummaryTableEntry[] tableSummary, immutable(Package[]) testPackageArray, int index, + string expectedLinuxStatus = "[" ~ Status.cached ~ "](https://testPackage.com)", string expectedDarwinStatus = Status.notSupported, string expectedAarch64Status = Status.notSupported) +{ + string actualName = tableSummary[index].name; + string expectedName = testPackageArray[index].name; + assert(actualName == expectedName, fmt("Expected name to be %s, but got %s", expectedName, actualName)); + + string actualLinuxStatus = tableSummary[index].x86_64.linux; + assert(actualLinuxStatus == expectedLinuxStatus, fmt("Expected Linux status to be %s, but got %s", expectedLinuxStatus, actualLinuxStatus)); + + string actualDarwinStatus = tableSummary[index].x86_64.darwin; + assert(actualDarwinStatus == expectedDarwinStatus, fmt("Expected Darwin status to be %s, but got %s", expectedDarwinStatus, actualDarwinStatus)); + + string actualAarch64Status = tableSummary[index].aarch64.darwin; + assert(actualAarch64Status == expectedAarch64Status, fmt("Expected Aarch64 Darwin status to be %s, but got %s", expectedAarch64Status, actualAarch64Status)); } @("convertNixEvalToTableSummary/getStatus") unittest { auto tableSummary = convertNixEvalToTableSummary(cast(Package[]) testPackageArray); - assert(tableSummary[0].name == testPackageArray[0].name); - assert(tableSummary[0].x86_64.linux == "[✅ cached](https://testPackage.com)"); - assert(tableSummary[0].x86_64.darwin == "🚫 not supported"); - assert(tableSummary[0].aarch64.darwin == "🚫 not supported"); - assert(tableSummary[1].name == testPackageArray[1].name); - assert(tableSummary[1].x86_64.linux == "🚫 not supported"); - assert(tableSummary[1].x86_64.darwin == "🚫 not supported"); - assert(tableSummary[1].aarch64.darwin == "❌ build failed"); + assertNixTable(tableSummary, testPackageArray, 0); + assertNixTable(tableSummary, testPackageArray, 1, Status.notSupported, Status.notSupported, Status.buildFailed); params.isInitial = true; tableSummary = convertNixEvalToTableSummary(cast(Package[]) testPackageArray); params.isInitial = false; - assert(tableSummary[0].name == testPackageArray[0].name); - assert(tableSummary[0].x86_64.linux == "[✅ cached](https://testPackage.com)"); - assert(tableSummary[0].x86_64.darwin == "🚫 not supported"); - assert(tableSummary[0].aarch64.darwin == "🚫 not supported"); - assert(tableSummary[1].name == testPackageArray[1].name); - assert(tableSummary[1].x86_64.linux == "🚫 not supported"); - assert(tableSummary[1].x86_64.darwin == "🚫 not supported"); - assert(tableSummary[1].aarch64.darwin == "⏳ building..."); - + assertNixTable(tableSummary, testPackageArray, 0); + assertNixTable(tableSummary, testPackageArray, 1, Status.notSupported, Status.notSupported, Status.building); } void printTableForCacheStatus(Package[] packages) @@ -546,7 +644,7 @@ Package checkPackage(Package pkg) { import std.algorithm : canFind; import std.string : lineSplitter; - import std.net.curl : HTTP, httpGet = get, HTTPStatusException; + import std.net.curl : HTTP, httpGet = get, HTTPStatusException, CurlException; auto http = HTTP(); http.addRequestHeader("Authorization", "Bearer " ~ params.cachixAuthToken); @@ -561,6 +659,11 @@ Package checkPackage(Package pkg) { pkg.isCached = false; } + catch (CurlException e) + { + // Handle network errors + pkg.isCached = false; + } return pkg; } @@ -572,17 +675,16 @@ unittest const storePathHash = "mdb034kf7sq6g03ric56jxr4a7043l41"; const storePath = "/nix/store/" ~ storePathHash ~ "-hello-2.12.1"; - auto testPackage = Package( + Package testPackage = { output: storePath, cacheUrl: nixosCacheEndpoint ~ storePathHash ~ ".narinfo", - ); + }; - assert(!testPackage.isCached); - assert(checkPackage(testPackage).isCached); + assert(checkPackage(testPackage).isCached, "Package %s should be cached".fmt(testPackage.cacheUrl)); testPackage.cacheUrl = nixosCacheEndpoint ~ "nonexistent.narinfo"; - assert(!checkPackage(testPackage).isCached); + assert(!checkPackage(testPackage).isCached, "Package %s should not be cached".fmt(testPackage.cacheUrl)); } Package[] getPrecalcMatrix() @@ -601,8 +703,36 @@ Package[] getPrecalcMatrix() isCached: pkg["isCached"].boolean, os: getGHOS(pkg["os"].str), system: getSystem(pkg["system"].str), - output: pkg["output"].str}; - return result; - }).array; + output: pkg["output"].str + }; + return result; + }).array; - } +} + +@("getPrecalcMatrix") +unittest +{ + string precalcMatrixStr = "{\"include\": [{\"name\": \"test\", \"allowedToFail\": false, \"attrPath\": \"test\", \"cacheUrl\": \"url\", \"isCached\": true, \"os\": \"linux\", \"system\": \"x86_64-linux\", \"output\": \"output\"}]}"; + params.precalcMatrix = precalcMatrixStr; + auto packages = getPrecalcMatrix(); + Package testPackage = { + name: "test", + allowedToFail: false, + attrPath: "test", + cacheUrl: "url", + isCached: true, + os: GitHubOS.selfHosted, + system: SupportedSystem.x86_64_linux, + output: "output" + }; + assert(packages.length == 1); + assert(packages[0].name == testPackage.name, "Expected %s, got %s".fmt(testPackage.name, packages[0].name)); + assert(!packages[0].allowedToFail, "Expected %s, got %s".fmt(testPackage.allowedToFail, packages[0].allowedToFail)); + assert(packages[0].attrPath == testPackage.attrPath, "Expected %s, got %s".fmt(testPackage.attrPath, packages[0].attrPath)); + assert(packages[0].cacheUrl == testPackage.cacheUrl, "Expected %s, got %s".fmt(testPackage.cacheUrl, packages[0].cacheUrl)); + assert(packages[0].isCached); + assert(packages[0].os == testPackage.os, "Expected %s, got %s".fmt(testPackage.os, packages[0].os)); + assert(packages[0].system == testPackage.system, "Expected %s, got %s".fmt(testPackage.system, packages[0].system)); + assert(packages[0].output == testPackage.output, "Expected %s, got %s".fmt(testPackage.output, packages[0].output)); +} diff --git a/packages/mcl/src/src/mcl/commands/deploy_spec.d b/packages/mcl/src/src/mcl/commands/deploy_spec.d index f9cb91b0..9eb44c1f 100644 --- a/packages/mcl/src/src/mcl/commands/deploy_spec.d +++ b/packages/mcl/src/src/mcl/commands/deploy_spec.d @@ -26,8 +26,8 @@ export void deploy_spec() } spawnProcessInline([ - "cachix", "deploy", "activate", deploySpecFile - ]); + "cachix", "deploy", "activate", deploySpecFile + ]); } void createMachineDeploySpec() @@ -47,8 +47,8 @@ void createMachineDeploySpec() auto result = [ "agents": packages - .map!(pkg => tuple(pkg.name, pkg.output)) - .assocArray + .map!(pkg => tuple(pkg.name, pkg.output)) + .assocArray ].JSONValue; writeFile(deploySpecFile, result.toString()); diff --git a/packages/mcl/src/src/mcl/commands/get_fstab.d b/packages/mcl/src/src/mcl/commands/get_fstab.d index 032f79f3..aaa781ae 100755 --- a/packages/mcl/src/src/mcl/commands/get_fstab.d +++ b/packages/mcl/src/src/mcl/commands/get_fstab.d @@ -37,7 +37,6 @@ struct Params void setup() { - cachixStoreUrl = cachixNixStoreUrl(cachixCache); if (!cachixDeployWorkspace) cachixDeployWorkspace = cachixCache; diff --git a/packages/mcl/src/src/mcl/commands/host_info.d b/packages/mcl/src/src/mcl/commands/host_info.d index 66bce461..de314932 100644 --- a/packages/mcl/src/src/mcl/commands/host_info.d +++ b/packages/mcl/src/src/mcl/commands/host_info.d @@ -7,69 +7,44 @@ import std.system; import std.stdio : writeln; import std.conv : to; -import std.string : strip, indexOf, isNumeric; -import std.array : split, join, array, replace; -import std.algorithm : map, filter, startsWith, joiner, any, sum; +import std.string : strip, indexOf, isNumeric, splitLines; +import std.array : split, join, array, replace, assocArray; +import std.algorithm : map, filter, startsWith, joiner, any, sum, canFind, all; import std.file : exists, write, readText, readLink, dirEntries, SpanMode; import std.path : baseName; import std.json; import std.process : ProcessPipes, environment; -import core.stdc.string: strlen; +import std.typecons : tuple; +import core.stdc.string : strlen; import mcl.utils.env : parseEnv, optional; -import mcl.utils.json : toJSON; +import mcl.utils.json : toJSON, getStrOrDefault; import mcl.utils.process : execute, isRoot; import mcl.utils.number : humanReadableSize; import mcl.utils.array : uniqIfSame; import mcl.utils.nix : Literal; -// enum InfoFormat -// { -// JSON, -// CSV, -// TSV -// } - -struct Params -{ - // @optional() - // InfoFormat format = InfoFormat.JSON; - void setup() - { - } -} - string[string] cpuinfo; string[string] meminfo; string[string] getProcInfo(string fileOrData, bool file = true) { - string[string] r; - foreach (line; file ? fileOrData.readText().split( - "\n").map!(strip).array : fileOrData.split("\n").map!(strip).array) - { - if (line.indexOf(":") == -1 || line.strip == "edid-decode (hex):") - { - continue; - } - auto parts = line.split(":"); - if (parts.length >= 2 && parts[0].strip != "") - { - r[parts[0].strip] = parts[1].strip; - } - } - return r; + auto lines = file ? fileOrData.readText().splitLines : fileOrData.splitLines; + return lines + .map!(strip) + .filter!(line => line.canFind(":") && line.strip != "edid-decode (hex):") + .map!(line => line.split(":")) + .filter!(parts => parts.length >= 2 && parts[0].strip != "") + .map!(parts => tuple(parts[0].strip, parts[1].strip)) + .assocArray; } export void host_info() { - const params = parseEnv!Params; - Info info = getInfo(); writeln(info.toJSON(true).toPrettyString(JSONOptions.doNotEscapeSlashes)); - } Info getInfo() @@ -148,31 +123,18 @@ string getOpMode() case ISA.msp430: opMode = [16]; break; - case ISA.x86_64: - case ISA.aarch64: - case ISA.ppc64: - case ISA.mips64: - case ISA.nvptx64: - case ISA.riscv64: - case ISA.sparc64: - case ISA.hppa64: + case ISA.x86_64, ISA.aarch64, ISA.ppc64: + case ISA.mips64, ISA.nvptx64, ISA.riscv64: + case ISA.sparc64, ISA.hppa64: opMode = [32, 64]; break; - case ISA.x86: - case ISA.arm: - case ISA.ppc: - case ISA.mips32: - case ISA.nvptx: - case ISA.riscv32: - case ISA.sparc: - case ISA.s390: - case ISA.hppa: - case ISA.sh: - case ISA.webAssembly: + case ISA.x86, ISA.arm, ISA.ppc: + case ISA.mips32, ISA.nvptx, ISA.riscv32: + case ISA.sparc, ISA.s390, ISA.hppa: + case ISA.sh, ISA.webAssembly: opMode = [32]; break; - case ISA.ia64: - case ISA.alpha: + case ISA.ia64, ISA.alpha: opMode = [64]; break; case ISA.systemZ: @@ -205,7 +167,7 @@ ProcessorInfo getProcessorInfo() r.vendor = cpuid.x86_any.vendor; char[48] modelCharArr; cpuid.x86_any.brand(modelCharArr); - r.model = modelCharArr.idup[0..(strlen(modelCharArr.ptr)-1)]; + r.model = modelCharArr.idup[0 .. (strlen(modelCharArr.ptr) - 1)]; r.cpus = cpuid.unified.cpus(); r.cores = [r.cpus * cpuid.unified.cores()]; r.threads = [cpuid.unified.threads()]; @@ -213,20 +175,29 @@ ProcessorInfo getProcessorInfo() if (isRoot) { - auto dmi = execute("dmidecode -t 4", false).split("\n"); - r.voltage = dmi.getFromDmi("Voltage:").map!(a => a.split(" ")[0]).array.uniqIfSame[0]; - r.frequency = dmi.getFromDmi("Current Speed:") - .map!(a => a.split(" ")[0].to!size_t).array.uniqIfSame; - r.maxFrequency = dmi.getFromDmi("Max Speed:") - .map!(a => a.split(" ")[0].to!size_t).array.uniqIfSame; - r.cpus = dmi.getFromDmi("Processor Information").length; - r.cores = dmi.getFromDmi("Core Count").map!(a => a.to!size_t).array.uniqIfSame; - r.threads = dmi.getFromDmi("Thread Count").map!(a => a.to!size_t).array.uniqIfSame; + auto dmi = execute("dmidecode -t 4", false).splitLines; + r.voltage = dmi.parseDmiDataUniq("Voltage:", a => a.split(" ")[0])[0]; + r.frequency = dmi.parseDmiDataUniq("Current Speed:", a => a.split(" ")[0].to!size_t); + r.maxFrequency = dmi.parseDmiDataUniq("Max Speed:", a => a.split(" ")[0].to!size_t); + r.cpus = dmi.parseDmiData("Processor Information").length; + r.cores = dmi.parseDmiDataUniq!size_t("Core Count"); + r.threads = dmi.parseDmiDataUniq!size_t("Thread Count"); } return r; } +T[] parseDmiData(T = string)(string[] dmi, string key, T delegate(string) transform = a => a.to!T) +{ + return dmi.getFromDmi(key).map!(transform).array; +} + +T[] parseDmiDataUniq(T = string)(string[] dmi, string key, T delegate(string) transform = a => a + .to!T) +{ + return dmi.parseDmiData!T(key, transform).uniqIfSame; +} + struct MotherboardInfo { string vendor; @@ -307,21 +278,14 @@ string getDistribution() auto distribution = execute("uname -o", false); if (exists("/etc/os-release")) { - foreach (line; execute([ - "awk", "-F", "=", "/^NAME=/ {print $2}", "/etc/os-release" - ], false).split("\n")) - { - distribution = line; - } + distribution = execute([ + "awk", "-F", "=", "/^NAME=/ {print $2}", "/etc/os-release" + ], false).strip; } else if (distribution == "Darwin") - { distribution = execute("sw_vers", false); - } else if (exists("/etc/lsb-release")) - { distribution = execute("lsb_release -i", false); - } return distribution; } @@ -330,12 +294,9 @@ string getDistributionVersion() auto distributionVersion = execute("uname -r", false); if (exists("/etc/os-release")) { - foreach (line; execute([ - "awk", "-F", "=", "/^VERSION=/ {print $2}", "/etc/os-release" - ], false).split("\n")) - { - distributionVersion = line.strip("\""); - } + distributionVersion = execute([ + "awk", "-F", "=", "/^VERSION=/ {print $2}", "/etc/os-release" + ], false).strip("\""); } else if (execute("uname -o") == "Darwin") { @@ -343,9 +304,7 @@ string getDistributionVersion() "sw_vers -buildVersion", false) ~ " )"; } else if (exists("/etc/lsb-release")) - { distributionVersion = execute("lsb_release -r", false); - } return distributionVersion; } @@ -363,11 +322,15 @@ struct MemoryInfo string serial = "ROOT PERMISSIONS REQUIRED"; } +static immutable ExcludedStrings = [ + "Unknown", "No Module Installed", "Not Provided", "None" +]; + string[] getFromDmi(string[] dmi, string key) { return dmi.filter!(a => a.strip.startsWith(key)) - .map!(x => x.indexOf(":") != -1 ? x.split(":")[1].strip : x) - .filter!(a => a != "Unknown" && a != "No Module Installed" && a != "Not Provided" && a != "None") + .map!(x => x.canFind(":") ? x.split(":")[1].strip : x) + .filter!(a => ExcludedStrings.all!(s => a != s)) .array; } @@ -379,23 +342,30 @@ MemoryInfo getMemoryInfo() r.totalGB = r.total.split(" ")[0].to!int; if (isRoot) { - string[] dmi = execute("dmidecode -t memory", false).split("\n"); - r.type = dmi.getFromDmi("Type:").uniqIfSame.join("/"); - r.count = dmi.getFromDmi("Type:").length; - r.slots = dmi.getFromDmi("Memory Device") + string[] dmi = execute("dmidecode -t memory", false).splitLines; + r.type = dmi.parseDmiDataUniq("Type:").join("/"); + r.count = dmi.parseDmiData("Type:").length; + r.slots = dmi.parseDmiData("Memory Device") .filter!(a => a.indexOf("DMI type 17") != -1).array.length; - r.totalGB = dmi.getFromDmi("Size:").map!(a => a.split(" ")[0]).array.filter!(isNumeric).array.map!(to!int).array.sum(); - r.total =r.totalGB.to!string ~ " GB (" ~ dmi.getFromDmi("Size:").map!(a => a.split(" ")[0]).join("/") ~ ")"; - auto totalWidth = dmi.getFromDmi("Total Width"); - auto dataWidth = dmi.getFromDmi("Data Width"); + r.totalGB = dmi.parseDmiData("Size:", a => a.split(" ")[0]) + .array + .filter!(isNumeric) + .array + .map!(to!int) + .array + .sum(); + r.total = r.totalGB.to!string ~ " GB (" ~ dmi.parseDmiData("Size:", a => a.split(" ")[0]).join( + "/") ~ ")"; + auto totalWidth = dmi.parseDmiData("Total Width"); + auto dataWidth = dmi.parseDmiData("Data Width"); foreach (i, width; totalWidth) { r.ecc ~= dataWidth[i] != width; } - r.speed = dmi.getFromDmi("Speed:").uniqIfSame.join("/"); - r.vendor = dmi.getFromDmi("Manufacturer:").uniqIfSame.join("/"); - r.partNumber = dmi.getFromDmi("Part Number:").uniqIfSame.join("/"); - r.serial = dmi.getFromDmi("Serial Number:").uniqIfSame.join("/"); + r.speed = dmi.parseDmiDataUniq("Speed:").join("/"); + r.vendor = dmi.parseDmiDataUniq("Manufacturer:").join("/"); + r.partNumber = dmi.parseDmiDataUniq("Part Number:").join("/"); + r.serial = dmi.parseDmiDataUniq("Serial Number:").join("/"); } @@ -445,41 +415,34 @@ StorageInfo getStorageInfo() foreach (JSONValue dev; lsblk["blockdevices"].array) { if (dev["id"].isNull) - { continue; - } Device d; - d.dev = dev["kname"].isNull ? "" : dev["kname"].str; - d.uuid = dev["id"].isNull ? "" : dev["id"].str; - d.type = dev["type"].isNull ? "" : dev["type"].str; - d.size = dev["size"].isNull ? "" : dev["size"].str; - d.model = dev["model"].isNull ? "Unknown Model" : dev["model"].str; - d.serial = dev["serial"].isNull ? "Missing Serial Number" : dev["serial"].str; - d.vendor = dev["vendor"].isNull ? "Unknown Vendor" : dev["vendor"].str; - d.state = dev["state"].isNull ? "" : dev["state"].str; - d.partitionTableType = dev["pttype"].isNull ? "" : dev["pttype"].str; - d.partitionTableUUID = dev["ptuuid"].isNull ? "" : dev["ptuuid"].str; - - switch (d.size[$ - 1]) - { - case 'B': - total += d.size[0 .. $ - 1].to!real; - break; - case 'K': - total += d.size[0 .. $ - 1].to!real * 1024; - break; - case 'M': - total += d.size[0 .. $ - 1].to!real * 1024 * 1024; - break; - case 'G': - total += d.size[0 .. $ - 1].to!real * 1024 * 1024 * 1024; - break; - case 'T': - total += d.size[0 .. $ - 1].to!real * 1024 * 1024 * 1024 * 1024; - break; - default: - assert(0, "Unknown size unit" ~ d.size[$ - 1]); - } + d.dev = getStrOrDefault(dev["kname"]); + d.uuid = getStrOrDefault(dev["id"]); + d.type = getStrOrDefault(dev["type"]); + d.size = getStrOrDefault(dev["size"]); + d.model = getStrOrDefault(dev["model"], "Unknown Model"); + d.serial = getStrOrDefault(dev["serial"], "Missing Serial Number"); + d.vendor = getStrOrDefault(dev["vendor"], "Unknown Vendor"); + d.state = getStrOrDefault(dev["state"]); + d.partitionTableType = getStrOrDefault(dev["pttype"]); + d.partitionTableUUID = getStrOrDefault(dev["ptuuid"]); + + int[char] sizeUnits = [ + 'B': 1, + 'K': 1024, + 'M': 1024 * 1024, + 'G': 1024 * 1024 * 1024, + 'T': 1024 * 1024 * 1024 * 1024 + ]; + + auto size = d.size[0 .. $ - 1].to!real; + auto unit = d.size[$ - 1]; + + if (unit in sizeUnits) + total += size * sizeUnits[unit]; + else + assert(0, "Unknown size unit: " ~ unit); if (isRoot) { @@ -488,17 +451,15 @@ StorageInfo getStorageInfo() foreach (JSONValue part; partData.array) { if (part["partuuid"].isNull) - { continue; - } Partition p; - p.dev = part["kname"].isNull ? "" : part["kname"].str; - p.fslabel = part["label"].isNull ? "" : part["label"].str; - p.partlabel = part["partlabel"].isNull ? "" : part["partlabel"].str; - p.size = part["size"].isNull ? "" : part["size"].str; - p.type = part["fstype"].isNull ? "" : part["fstype"].str; - p.mount = part["mountpoint"].isNull ? "Not Mounted" : part["mountpoint"].str; - p.id = part["partuuid"].isNull ? "" : part["partuuid"].str; + p.dev = getStrOrDefault(part["kname"]); + p.fslabel = getStrOrDefault(part["label"]); + p.partlabel = getStrOrDefault(part["partlabel"]); + p.size = getStrOrDefault(part["size"]); + p.type = getStrOrDefault(part["fstype"]); + p.mount = getStrOrDefault(part["mountpoint"], "Not Mounted"); + p.id = getStrOrDefault(part["partuuid"]); d.partitions ~= p; } } @@ -531,56 +492,57 @@ struct DisplayInfo size_t count; } +Display getDeviceData(JSONValue device) +{ + Display d; + + d.name = device["device_name"].str; + d.connected = device["is_connected"].boolean; + d.primary = device["is_primary"].boolean; + d.resolution = device["resolution_width"].integer.to!string ~ "x" ~ device["resolution_height"] + .integer.to!string; + foreach (JSONValue mode; device["modes"].array) + { + foreach (JSONValue freq; mode["frequencies"].array) + { + d.modes ~= mode["resolution_width"].integer.to!string ~ "x" ~ mode["resolution_height"].integer + .to!string ~ "@" ~ freq["frequency"].floating.to!string ~ "Hz"; + if (freq["is_current"].boolean) + d.refreshRate = freq["frequency"].floating.to!string ~ "Hz"; + } + } + if ("/sys/class/rm/card0-" ~ d.name.replace("HDMI-", "HDMI-A-") ~ "/edid".exists) + { + auto edidTmp = execute("edid-decode /sys/class/drm/card0-" ~ d.name.replace("HDMI-", "HDMI-A-") ~ "/edid", false); + auto edidData = getProcInfo(edidTmp, false); + d.vendor = ("Manufacturer" in edidData) ? edidData["Manufacturer"] : "Unknown"; + d.model = ("Model" in edidData) ? edidData["Model"] : "Unknown"; + d.serial = ("Serial Number" in edidData) ? edidData["Serial Number"] : "Unknown"; + d.manufactureDate = ("Made in" in edidData) ? edidData["Made in"] : "Unknown"; + d.size = ("Maximum image size" in edidData) ? edidData["Maximum image size"] : "Unknown"; + } + return d; +} + DisplayInfo getDisplayInfo() { DisplayInfo r; if ("DISPLAY" !in environment) return r; + JSONValue[] xrandr; try { - auto xrandr = execute!JSONValue("jc xrandr --properties", false)["screens"].array; - foreach (JSONValue screen; xrandr) - { - foreach (JSONValue device; screen["devices"].array) - { - Display d; - - d.name = device["device_name"].str; - d.connected = device["is_connected"].boolean; - d.primary = device["is_primary"].boolean; - d.resolution = device["resolution_width"].integer.to!string ~ "x" ~ device["resolution_height"] - .integer.to!string; - foreach (JSONValue mode; device["modes"].array) - { - foreach (JSONValue freq; mode["frequencies"].array) - { - d.modes ~= mode["resolution_width"].integer.to!string ~ "x" ~ mode["resolution_height"].integer - .to!string ~ "@" ~ freq["frequency"].floating.to!string ~ "Hz"; - if (freq["is_current"].boolean) - { - d.refreshRate = freq["frequency"].floating.to!string ~ "Hz"; - } - } - } - if ("/sys/class/rm/card0-" ~ d.name.replace("HDMI-", "HDMI-A-") ~ "/edid".exists) - { - auto edidTmp = execute("edid-decode /sys/class/drm/card0-" ~ d.name.replace("HDMI-", "HDMI-A-") ~ "/edid", false); - auto edidData = getProcInfo(edidTmp, false); - d.vendor = ("Manufacturer" in edidData) ? edidData["Manufacturer"] : "Unknown"; - d.model = ("Model" in edidData) ? edidData["Model"] : "Unknown"; - d.serial = ("Serial Number" in edidData) ? edidData["Serial Number"] : "Unknown"; - d.manufactureDate = ("Made in" in edidData) ? edidData["Made in"] : "Unknown"; - d.size = ("Maximum image size" in edidData) ? edidData["Maximum image size"] : "Unknown"; - } - - r.displays ~= d; - r.count++; - } - } + xrandr = execute!JSONValue("jc xrandr --properties", false)["screens"].array; } catch (Exception e) - { return r; + foreach (JSONValue screen; xrandr) + { + foreach (JSONValue device; screen["devices"].array) + { + r.displays ~= getDeviceData(device); + r.count++; + } } return r; } @@ -593,22 +555,28 @@ struct GraphicsProcessorInfo string vram; } +string getGlxInfoValue(ref string[string] glxinfo, string key) +{ + return (key in glxinfo) ? glxinfo[key] : "Unknown"; +} + GraphicsProcessorInfo getGraphicsProcessorInfo() { GraphicsProcessorInfo r; - if ("DISPLAY" !in environment) return r; - try { - auto glxinfo = getProcInfo(execute("glxinfo", false), false); - r.vendor = ("OpenGL vendor string" in glxinfo) ? glxinfo["OpenGL vendor string"] : "Unknown"; - r.model = ("OpenGL renderer string" in glxinfo) ? glxinfo["OpenGL renderer string"] : "Unknown"; - r.coreProfile = ("OpenGL core profile version string" in glxinfo) ? glxinfo["OpenGL core profile version string"] : "Unknown"; - r.vram = ("Video memory" in glxinfo) ? glxinfo["Video memory"] : "Unknown"; + if ("DISPLAY" !in environment) + return r; + string[string] glxinfo; + try + { + glxinfo = getProcInfo(execute("glxinfo", false), false); } catch (Exception e) - { return r; - } + r.vendor = getGlxInfoValue(glxinfo, "OpenGL vendor string"); + r.model = getGlxInfoValue(glxinfo, "OpenGL renderer string"); + r.coreProfile = getGlxInfoValue(glxinfo, "OpenGL core profile version string"); + r.vram = getGlxInfoValue(glxinfo, "Video memory"); return r; } @@ -637,10 +605,8 @@ struct MachineConfigInfo string[] videoDrivers; } -MachineConfigInfo getMachineConfigInfo() +void getPCIDeviceInfo(ref MachineConfigInfo info) { - MachineConfigInfo r; - // PCI devices foreach (path; dirEntries("/sys/bus/pci/devices", SpanMode.shallow).map!(a => a.name).array) @@ -650,9 +616,7 @@ MachineConfigInfo getMachineConfigInfo() string _class = readText(path ~ "/class").strip; string _module; if (exists(path ~ "/driver/module")) - { _module = readLink(path ~ "/driver/module").baseName; - } if (_module != "" && ( // Mass-storage controller. Definitely important. @@ -663,7 +627,7 @@ MachineConfigInfo getMachineConfigInfo() // keyboard when things go wrong in the initrd. _class.startsWith("0x0c03"))) { - r.availableKernelModules ~= _module; + info.availableKernelModules ~= _module; } // broadcom STA driver (wl.ko) @@ -678,9 +642,9 @@ MachineConfigInfo getMachineConfigInfo() "0x4331", "0x43a0", "0x43b1" ].any!(a => device.startsWith(a))) { - r.extraModulePackages ~= Literal( + info.extraModulePackages ~= Literal( "config.boot.kernelPackages.broadcom_sta"); - r.kernelModules ~= "wl"; + info.kernelModules ~= "wl"; } // broadcom FullMac driver @@ -695,15 +659,13 @@ MachineConfigInfo getMachineConfigInfo() "0x43c5" ].any!(a => device.startsWith(a))) { - r.imports ~= Literal( + info.imports ~= Literal( "(modulesPath + \"/hardware/network/broadcom-43xx.nix\")"); } // In case this is a virtio scsi device, we need to explicitly make this available. if (vendor.startsWith("0x1af4") && ["0x1004", "0x1048"].any!(a => device.startsWith(a))) - { - r.availableKernelModules ~= "virtio_scsi"; - } + info.availableKernelModules ~= "virtio_scsi"; // Can't rely on $module here, since the module may not be loaded // due to missing firmware. Ideally we would check modules.pcimap @@ -713,12 +675,12 @@ MachineConfigInfo getMachineConfigInfo() if (["0x1043", "0x104f", "0x4220", "0x4221", "0x4223", "0x4224"].any!( a => device.startsWith(a))) { - r.literalAttrs ~= Literal( + info.literalAttrs ~= Literal( "networking.enableIntel2200BGFirmware = true;"); } else if (["0x4229", "0x4230", "0x4222", "0x4227"].any!(a => device.startsWith(a))) { - r.literalAttrs ~= Literal( + info.literalAttrs ~= Literal( "networking.enableIntel3945ABGFirmware = true;"); } } @@ -728,85 +690,109 @@ MachineConfigInfo getMachineConfigInfo() // FIXME: do we want to enable an unfree driver here? if (vendor.startsWith("0x10de") && _class.startsWith("0x03")) { - r.videoDrivers ~= "nvidia"; - r.blacklistedKernelModules ~= "nouveau"; + info.videoDrivers ~= "nvidia"; + info.blacklistedKernelModules ~= "nouveau"; } } +} +void getUSBDeviceInfo(ref MachineConfigInfo info) +{ // USB devices foreach (path; dirEntries("/sys/bus/usb/devices", SpanMode.shallow).map!(a => a.name).array) { if (!exists(path ~ "/bInterfaceClass")) - { continue; - } string _class = readText(path ~ "/bInterfaceClass").strip; string subClass = readText(path ~ "/bInterfaceSubClass").strip; string protocol = readText(path ~ "/bInterfaceProtocol").strip; string _module; if (exists(path ~ "/driver/module")) - { _module = readLink(path ~ "/driver/module").baseName; - } if (_module != "" && - // Mass-storage controller. Definitely important. - _class.startsWith("0x08") || - // Keyboard. Needed if we want to use the keyboard when things go wrong in the initrd. - (subClass.startsWith("0x03") || protocol.startsWith("0x01"))) + // Mass-storage controller. Definitely important. + _class.startsWith("0x08") || + // Keyboard. Needed if we want to use the keyboard when things go wrong in the initrd. + (subClass.startsWith("0x03") || protocol.startsWith("0x01"))) { - r.availableKernelModules ~= _module; + info.availableKernelModules ~= _module; } } +} +void getBlockDeviceInfo(ref MachineConfigInfo info) +{ // Block and MMC devices foreach (path; ( - (exists("/sys/class/block") ? dirEntries("/sys/class/block", SpanMode.shallow).array : []) ~ - (exists("/sys/class/mmc_host") ? dirEntries("/sys/class/mmc_host", SpanMode.shallow).array : [])) + (exists("/sys/class/block") ? dirEntries("/sys/class/block", SpanMode.shallow) + .array : []) ~ + (exists("/sys/class/mmc_host") ? dirEntries("/sys/class/mmc_host", SpanMode.shallow).array + : [])) .map!(a => a.name).array) { - if (exists(path ~ "/device/driver/module")) { + if (exists(path ~ "/device/driver/module")) + { string _module = readLink(path ~ "/device/driver/module").baseName; - r.availableKernelModules ~= _module; + info.availableKernelModules ~= _module; } } +} + +void getBCacheInfo(ref MachineConfigInfo info) +{ // Bcache - auto bcacheDevices = dirEntries("/dev", SpanMode.shallow).map!(a => a.name).array.filter!(a => a.startsWith("bcache")).array; + auto bcacheDevices = dirEntries("/dev", SpanMode.shallow).map!(a => a.name) + .array + .filter!(a => a.startsWith("bcache")) + .array; bcacheDevices = bcacheDevices.filter!(device => device.indexOf("dev/bcachefs") == -1).array; - if (bcacheDevices.length > 0) { - r.availableKernelModules ~= "bcache"; - } + if (bcacheDevices.length > 0) + info.availableKernelModules ~= "bcache"; +} + +void getVMInfo(ref MachineConfigInfo info) +{ //Prevent unbootable systems if LVM snapshots are present at boot time. if (execute("lsblk -o TYPE", false).indexOf("lvm") != -1) - { - r.kernelModules ~= "dm-snapshot"; - } + info.kernelModules ~= "dm-snapshot"; // Check if we're in a VirtualBox guest. If so, enable the guest additions. auto virt = execute!ProcessPipes("systemd-detect-virt", false).stdout.readln.strip; - switch (virt) { - case "oracle": - r.literalAttrs ~= Literal("virtualisation.virtualbox.guest.enable = true;"); - break; - case "parallels": - r.literalAttrs ~= Literal("hardware.parallels.enable = true;"); - r.literalAttrs ~= Literal("nixpkgs.config.allowUnfreePredicate = pkg: builtins.elem (lib.getName pkg) [ \"prl-tools\" ];"); - break; - case "qemu": - case "kvm": - case "bochs": - r.imports ~= Literal("(modulesPath + \"/profiles/qemu-guest.nix\")"); - break; - case "microsoft": - r.literalAttrs ~= Literal("virtualization.hypervGuest.enable = true;"); - break; - case "systemd-nspawn": - r.literalAttrs ~= Literal("boot.isContainer;"); - break; - default: - break; + switch (virt) + { + case "oracle": + info.literalAttrs ~= Literal("virtualisation.virtualbox.guest.enable = true;"); + break; + case "parallels": + info.literalAttrs ~= Literal("hardware.parallels.enable = true;"); + info.literalAttrs ~= Literal( + "nixpkgs.config.allowUnfreePredicate = pkg: builtins.elem (lib.getName pkg) [ \"prl-tools\" ];"); + break; + case "qemu", "kvm", "bochs": + info.imports ~= Literal("(modulesPath + \"/profiles/qemu-guest.nix\")"); + break; + case "microsoft": + info.literalAttrs ~= Literal("virtualization.hypervGuest.enable = true;"); + break; + case "systemd-nspawn": + info.literalAttrs ~= Literal("boot.isContainer;"); + break; + default: + break; } +} + +MachineConfigInfo getMachineConfigInfo() +{ + MachineConfigInfo r; + + r.getPCIDeviceInfo(); + r.getUSBDeviceInfo(); + r.getBlockDeviceInfo(); + r.getBCacheInfo(); + r.getVMInfo(); return r; } diff --git a/packages/mcl/src/src/mcl/commands/machine_create.d b/packages/mcl/src/src/mcl/commands/machine_create.d index 37a7e6f1..964cd60b 100755 --- a/packages/mcl/src/src/mcl/commands/machine_create.d +++ b/packages/mcl/src/src/mcl/commands/machine_create.d @@ -7,7 +7,7 @@ import mcl.utils.nix : nix, toNix, Literal, mkDefault; import mcl.utils.json : toJSON, fromJSON; import mcl.utils.env : optional, parseEnv; import mcl.utils.array : uniqArrays; -import mcl.commands.host_info: Info, getInfo; +import mcl.commands.host_info : Info, getInfo; enum MachineType { @@ -25,7 +25,8 @@ enum Group blocksense } -struct User { +struct User +{ string userName; UserInfo userInfo; EmailInfo emailInfo; @@ -54,199 +55,252 @@ string[] getExistingUsers() User getUser(string userName) { - auto userJson = nix.eval!JSONValue("users/" ~ userName ~ "/user-info.nix", ["--file"]); + auto userJson = nix.eval!JSONValue("users/" ~ userName ~ "/user-info.nix", [ + "--file" + ]); User user; user.userName = userName; user.userInfo.isNormalUser = userJson["userInfo"]["isNormalUser"].boolean; user.userInfo.description = userJson["userInfo"]["description"].str; user.userInfo.extraGroups = userJson["userInfo"]["extraGroups"].array.map!(a => a.str).array; user.userInfo.hashedPassword = userJson["userInfo"]["hashedPassword"].str; - user.userInfo.sshKeys = userJson["userInfo"]["openssh"]["authorizedKeys"]["keys"].array.map!(a => a.str).array; - user.emailInfo.personalEmail = ("personalEmail" in userJson["emailInfo"].object) ? userJson["emailInfo"]["personalEmail"].str : ""; - user.emailInfo.workEmail = ("workEmail" in userJson["emailInfo"].object) ? userJson["emailInfo"]["workEmail"].str : user.emailInfo.personalEmail; - user.emailInfo.gitlabUsername = ("gitlabUsername" in userJson["emailInfo"].object) ? userJson["emailInfo"]["gitlabUsername"].str : ""; + user.userInfo.sshKeys = userJson["userInfo"]["openssh"]["authorizedKeys"]["keys"].array.map!( + a => a.str).array; + user.emailInfo.personalEmail = ("personalEmail" in userJson["emailInfo"].object) ? userJson["emailInfo"]["personalEmail"] + .str : ""; + user.emailInfo.workEmail = ("workEmail" in userJson["emailInfo"].object) ? userJson["emailInfo"]["workEmail"].str + : user.emailInfo.personalEmail; + user.emailInfo.gitlabUsername = ("gitlabUsername" in userJson["emailInfo"].object) ? userJson["emailInfo"]["gitlabUsername"] + .str : ""; return user; } +void writeFile(string filePath, string fileContent, string[] command = []) +{ + mkdirRecurse(filePath.dirName); + std.file.write(format("%s/%s", filePath), fileContent); + if (command.length > 0) + { + execute(command, false); + } +} + void createUserDir(User user) { - mkdirRecurse("users/" ~ user.userName); - string userNix = user.toNix; - std.file.write("users/" ~ user.userName ~ "/user-info.nix", userNix); - execute(["alejandra", "users/" ~ user.userName ~ "/user-info.nix"], false); - string gitConfig = generateGitConfig(user); - std.file.write("users/" ~ user.userName ~ "/.gitconfig", gitConfig); - - mkdirRecurse("users/" ~ user.userName ~ "/home-desktop"); - string homeDesktop = generateHomeDesktop(); - std.file.write("users/" ~ user.userName ~ "/home-desktop/default.nix", homeDesktop); - execute(["alejandra", "users/" ~ user.userName ~ "/home-desktop/default.nix"], false); - mkdirRecurse("users/" ~ user.userName ~ "/home-server"); - string homeServer = generateHomeServer(); - std.file.write("users/" ~ user.userName ~ "/home-server/default.nix", homeServer); - execute(["alejandra", "users/" ~ user.userName ~ "/home-server/default.nix"], false); + string userDir = format("users/%s/", user.userName); + string userInfoNix = format("%s/user-info.nix", userDir); + string userGitConfig = format("%s/.gitconfig", userDir); + writeFile(userInfoNix, user.toNix, ["alejandra", userInfoNix]); + writeFile(userGitConfig, generateGitConfig(user)); + + string homeDesktopDir = format("%s/home-desktop/", userDir); + string homeDesktopNix = format("%s/default.nix", homeDesktopDir); + writeFile(homeDesktopNix, generateHomeDesktop(), [ + "alejandra", homeDesktopNix + ]); + + string homeServerDir = format("%s/home-server/", userDir); + string homeServerNix = format("%s/default.nix", homeServerDir); + writeFile(homeServerNix, generateHomeServer(), ["alejandra", homeServerNix]); } -string generateHomeServer() { - string homeServer = "{pkgs, ...}: {\n"; - homeServer ~= " home.packages = with pkgs; [\n"; - homeServer ~= " ];\n"; - homeServer ~= "}\n"; +string generateHomeServer() +{ + string homeServer = "{pkgs, ...}: {\n + home.packages = with pkgs; [\n + ];\n + }\n"; return homeServer; } -string generateHomeDesktop() { - string homeDesktop = "{pkgs, ...}: {\n"; - homeDesktop ~= " imports = [\n"; - homeDesktop ~= " ../home-server\n"; - homeDesktop ~= " ];\n"; - homeDesktop ~= " home.packages = with pkgs; [\n"; - homeDesktop ~= " ];\n"; - homeDesktop ~= "}\n"; +string generateHomeDesktop() +{ + string homeDesktop = "{pkgs, ...}: {\n + imports = [\n + ../home-server\n + ];\n + home.packages = with pkgs; [\n + ];\n + }\n"; return homeDesktop; } -string generateGitConfig(User user) { - string gitConfig = "[user]\n"; - gitConfig ~= " email = " ~ (user.emailInfo.workEmail != "" ? user.emailInfo.workEmail : user.emailInfo.personalEmail) ~ "\n"; - gitConfig ~= " name = " ~ user.userInfo.description ~ "\n"; - gitConfig ~= "[fetch]\n"; - gitConfig ~= " prune = true\n"; - gitConfig ~= "[rebase]\n"; - gitConfig ~= " updateRefs = true\n"; - gitConfig ~= "[pull]\n"; - gitConfig ~= " ff = true\n"; - gitConfig ~= " rebase = false\n"; - gitConfig ~= "[merge]\n"; - gitConfig ~= " ff = only\n"; - gitConfig ~= "[core]\n"; - gitConfig ~= " editor = nvim\n"; - gitConfig ~= "[include]\n"; - gitConfig ~= " path = git/aliases.gitconfig\n"; - gitConfig ~= " path = git/delta.gitconfig\n"; - gitConfig ~= "[difftool \"diffpdf\"]\n"; - gitConfig ~= " cmd = diffpdf \\\"$LOCAL\\\" \\\"$REMOTE\\\"\n"; - gitConfig ~= "[difftool \"nvimdiff\"]\n"; - gitConfig ~= " cmd = nvim -d \\\"$LOCAL\\\" \\\"$REMOTE\\\"\n"; - gitConfig ~= "[diff]\n"; - gitConfig ~= " colorMoved = dimmed-zebra\n"; +string generateGitConfig(User user) +{ + string gitConfig = format("[user]\n + email = %s\n + name = %s\n + [fetch]\n + prune = true\n + [rebase]\n + updateRefs = true\n + [pull]\n + ff = true\n + rebase = false\n + [merge]\n + ff = only\n + [core]\n + editor = nvim\n + [include]\n + path = git/aliases.gitconfig\n + path = git/delta.gitconfig\n + [difftool \"diffpdf\"]\n + cmd = diffpdf \\\"$LOCAL\\\" \\\"$REMOTE\\\"\n + [difftool \"nvimdiff\"]\n + cmd = nvim -d \\\"$LOCAL\\\" \\\"$REMOTE\\\"\n + [diff]\n + colorMoved = dimmed-zebra\n", (user.emailInfo.workEmail != "" ? user.emailInfo.workEmail + : user.emailInfo.personalEmail), user.userInfo.description); return gitConfig; } - void checkifNixosMachineConfigRepo() { + static immutable string repoUrl = "metacraft-labs/nixos-machine-config"; if (execute(["git", "config", "--get", "remote.origin.url"], false) - .indexOf("metacraft-labs/nixos-machine-config") == -1) + .indexOf(repoUrl) == -1) { - assert(0, "This is not the repo metacraft-labs/nixos-machine-config"); + assert(0, format("This is not the repo %s", repoUrl)); } } +string[] getGroupsFromFile(DirEntry input) +{ + string name = input.name ~ "/user-info.nix"; + if (!std.file.exists(name)) + return ["metacraft"]; + auto userInfoFile = nix.eval!JSONValue(name, ["--file"]); + if ("userInfo" !in userInfoFile || userInfoFile["userInfo"].isNull + || "extraGroups" !in userInfoFile["userInfo"] || userInfoFile["userInfo"]["extraGroups"] + .isNull) + return ["metacraft"]; + return userInfoFile["userInfo"]["extraGroups"].array.map!(a => a.str).array; +} + string[] getGroups() { - string[] groups = dirEntries("users", SpanMode.shallow).map!(a => a.name ~ "/user-info.nix").array.map!((a) { - if (!std.file.exists(a)) - { - return JSONValue(["metacraft"]).array; - } - auto userInfoFile = nix.eval!JSONValue(a, ["--file"]); - if ("userInfo" !in userInfoFile || userInfoFile["userInfo"].isNull) - { - return JSONValue(["metacraft"]).array; - } - if ("extraGroups" !in userInfoFile["userInfo"] || userInfoFile["userInfo"]["extraGroups"].isNull) - { - return JSONValue(["metacraft"]).array; - } - return userInfoFile["userInfo"]["extraGroups"].array; - }).array + string[] groups = dirEntries("users", SpanMode.shallow) + .map!getGroupsFromFile .joiner .array - .map!(a => a.str) - .array .sort - .array .uniq .array; return groups; } -User createUser() { +User createUser() +{ + auto createUser = params.createUser || prompt!bool("Create new user"); + if (!createUser) + { + string[] existingUsers = getExistingUsers(); + string userName = params.userName != "" ? params.userName + : prompt!string("Select an existing username", existingUsers); + return getUser(userName); + } - auto createUser = params.createUser || prompt!bool("Create new user"); - if (!createUser) - { - string[] existingUsers = getExistingUsers(); - string userName = params.userName != "" ? params.userName : prompt!string("Select an existing username", existingUsers); - return getUser(userName); - } - else - { - User user; - user.userName = params.userName != "" ? params.userName : prompt!string("Enter the new username"); - user.userInfo.description = params.description != "" ? params.description : prompt!string("Enter the user's description/full name"); - user.userInfo.isNormalUser = params.isNormalUser || prompt!bool("Is this a normal or root user"); - user.userInfo.extraGroups = (params.extraGroups != "" ? params.extraGroups : prompt!string("Enter the user's extra groups (comma delimited)", getGroups())).split(",").map!(strip).array; - createUserDir(user); - return user; - } + User user; + user.userName = params.userName != "" ? params.userName + : prompt!string("Enter the new username"); + user.userInfo.description = params.description != "" ? params.description + : prompt!string("Enter the user's description/full name"); + user.userInfo.isNormalUser = params.isNormalUser || prompt!bool( + "Is this a normal or root user"); + user.userInfo.extraGroups = (params.extraGroups != "" ? params.extraGroups + : prompt!string("Enter the user's extra groups (comma delimited)", getGroups())).split(",") + .map!(strip).array; + createUserDir(user); + return user; } struct MachineConfiguration { - struct Networking { + struct Networking + { string hostId; } + Networking networking; - struct MachineUserInfo { - struct MCL { + struct MachineUserInfo + { + struct MCL + { string[] includedUsers; } + MCL mcl; - struct UserData { + struct UserData + { string[] extraGroups; } + UserData[string] users; } - struct MCL { - struct HostInfo { + + struct MCL + { + struct HostInfo + { string sshKey; } + HostInfo host_info; } + MCL mcl; MachineUserInfo users; } -void createMachine(MachineType machineType, string machineName, User user) { - auto infoJSON = execute(["ssh", params.sshPath, "sudo nix --experimental-features \\'nix-command flakes\\' --refresh --accept-flake-config run github:metacraft-labs/nixos-modules/feat/machine_create#mcl host_info"],false, false); - auto infoJSONParsed = infoJSON.parseJSON; - Info info = infoJSONParsed.fromJSON!Info; - +MachineConfiguration getMachineConfiguration(User user, Info info) +{ MachineConfiguration machineConfiguration; - machineConfiguration.users.users[user.userName] = MachineConfiguration.MachineUserInfo.UserData([user.userName] ~ "wheel"); + machineConfiguration.users.users[user.userName] = MachineConfiguration + .MachineUserInfo.UserData([user.userName] ~ "wheel"); machineConfiguration.users.mcl.includedUsers = [user.userName]; - machineConfiguration.networking.hostId = executeShell("tr -dc 0-9a-f < /dev/urandom | head -c 8").output; + machineConfiguration.networking.hostId = executeShell( + "tr -dc 0-9a-f < /dev/urandom | head -c 8").output; machineConfiguration.mcl.host_info.sshKey = info.softwareInfo.opensshInfo.publicKey; - string machineNix = machineConfiguration.toNix(["config", "dots"]).replace("host_info", "host-info"); - mkdirRecurse("machines/" ~ machineType.to!string ~ "/" ~ machineName); - std.file.write("machines/" ~ machineType.to!string ~ "/" ~ machineName ~ "/" ~ "configuration.nix", machineNix); - // writeln(info.toJSON(true).toPrettyString()); + return machineConfiguration; +} +void saveMachineConfiguration(MachineType machineType, string machineName, Info info, User user) +{ + MachineConfiguration machineConfiguration = getMachineConfiguration(user, info); + string machineNix = machineConfiguration.toNix(["config", "dots"]) + .replace("host_info", "host-info"); + string filePath = format("machines/%s/%s/configuration.nix", machineType.to!string, machineName); + writeFile(filePath, machineNix, ["alejandra", filePath]); + +} + +Info getInfoOverSSH() +{ + auto infoJSON = execute([ + "ssh", params.sshPath, + "sudo nix --experimental-features \\'nix-command flakes\\' --refresh --accept-flake-config run github:metacraft-labs/nixos-modules/feat/machine_create#mcl host_info" + ], false, false); + auto infoJSONParsed = infoJSON.parseJSON; + Info info = infoJSONParsed.fromJSON!Info; + return info; +} + +HardwareConfiguration initHardwareConfiguration(Info info) +{ HardwareConfiguration hardwareConfiguration; hardwareConfiguration.hardware.cpu["intel"] = HardwareConfiguration.Hardware.Cpu(); - switch (info.hardwareInfo.processorInfo.vendor) { - case "GenuineIntel": - hardwareConfiguration.hardware.cpu["intel"] = HardwareConfiguration.Hardware.Cpu(); - break; - case "AuthenticAMD": - hardwareConfiguration.hardware.cpu["amd"] = HardwareConfiguration.Hardware.Cpu(); - break; - default: - assert(0, "Unknown processor vendor " ~ info.hardwareInfo.processorInfo.vendor); + switch (info.hardwareInfo.processorInfo.vendor) + { + case "GenuineIntel": + hardwareConfiguration.hardware.cpu["intel"] = HardwareConfiguration.Hardware.Cpu(); + break; + case "AuthenticAMD": + hardwareConfiguration.hardware.cpu["amd"] = HardwareConfiguration.Hardware.Cpu(); + break; + default: + assert(0, "Unknown processor vendor " ~ info.hardwareInfo.processorInfo.vendor); } info.hardwareInfo.processorInfo.architectureInfo.flags.split(", ").each!((a) { @@ -260,119 +314,193 @@ void createMachine(MachineType machineType, string machineName, User user) { } }); hardwareConfiguration.boot.kernelModules ~= info.softwareInfo.machineConfigInfo.kernelModules; - hardwareConfiguration.boot.initrd.kernelModules ~= info.softwareInfo.machineConfigInfo.kernelModules; - hardwareConfiguration.boot.initrd.availableKernelModules ~= info.softwareInfo.machineConfigInfo.availableKernelModules; - hardwareConfiguration.boot.extraModulePackages ~= info.softwareInfo.machineConfigInfo.extraModulePackages.map!(Literal).array; - hardwareConfiguration._literalAttrs ~= info.softwareInfo.machineConfigInfo.literalAttrs.map!(Literal).array; - hardwareConfiguration.imports ~= info.softwareInfo.machineConfigInfo.imports.map!(Literal).array; - hardwareConfiguration.services.xserver.videoDrivers ~= info.softwareInfo.machineConfigInfo.videoDrivers; + hardwareConfiguration.boot.initrd.kernelModules ~= info.softwareInfo + .machineConfigInfo.kernelModules; + hardwareConfiguration.boot.initrd.availableKernelModules ~= info + .softwareInfo.machineConfigInfo.availableKernelModules; + hardwareConfiguration.boot.extraModulePackages ~= info.softwareInfo + .machineConfigInfo.extraModulePackages.map!(Literal).array; + hardwareConfiguration._literalAttrs ~= info.softwareInfo.machineConfigInfo + .literalAttrs.map!(Literal).array; + hardwareConfiguration.imports ~= info.softwareInfo.machineConfigInfo.imports.map!(Literal) + .array; + hardwareConfiguration.services.xserver.videoDrivers ~= info.softwareInfo + .machineConfigInfo.videoDrivers; // Misc Kernel Modules - hardwareConfiguration.boot.initrd.availableKernelModules ~= ["nvme", "xhci_pci", "usbhid", "usb_storage", "sd_mod"]; + hardwareConfiguration.boot.initrd.availableKernelModules ~= [ + "nvme", "xhci_pci", "usbhid", "usb_storage", "sd_mod" + ]; + return hardwareConfiguration; +} +void initHardwareConfigurationDisko(HardwareConfiguration hardwareConfiguration, Info info) +{ // Disks - hardwareConfiguration.disko.DISKO.makeZfsPartitions.swapSizeGB = (info.hardwareInfo.memoryInfo.totalGB.to!double*1.5).to!int; - auto nvmeDevices = info.hardwareInfo.storageInfo.devices.filter!(a => a.dev.indexOf("nvme") != -1 || a.model.indexOf("SSD") != -1).array.map!(a => a.model.replace(" ", "_") ~ "_" ~ a.serial).array; - string[] disks = (nvmeDevices.length == 1 ? nvmeDevices[0] : (params.disks != "" ? params.disks : prompt!string("Enter the disks to use (comma delimited)", nvmeDevices))).split(",").map!(strip).array.map!(a => "/dev/disk/by-id/nvme-" ~ a).array; + hardwareConfiguration.disko.DISKO.makeZfsPartitions.swapSizeGB = ( + info.hardwareInfo.memoryInfo.totalGB.to!double * 1.5).to!int; + auto nvmeDevices = info.hardwareInfo + .storageInfo + .devices + .filter!(a => a.dev.indexOf("nvme") != -1 || a.model.indexOf("SSD") != -1) + .array + .map!(a => a.model.replace(" ", "_") ~ "_" ~ a.serial) + .array; + string[] disks = (nvmeDevices.length == 1 ? nvmeDevices[0] : (params.disks != "" ? params.disks + : prompt!string("Enter the disks to use (comma delimited)", nvmeDevices))).split(",") + .map!(strip) + .array + .map!(a => "/dev/disk/by-id/nvme-" ~ a) + .array; hardwareConfiguration.disko.DISKO.makeZfsPartitions.disks = disks; +} - hardwareConfiguration = hardwareConfiguration.uniqArrays; - - - string hardwareNix = hardwareConfiguration.toNix(["config", "lib", "pkgs", "modulesPath", "dirs", "dots"]) +void processHardwareConfigNix(HardwareConfiguration hardwareConfiguration, MachineType machineType, string machineName) +{ + string hardwareNix = hardwareConfiguration.toNix([ + "config", "lib", "pkgs", "modulesPath", "dirs", "dots" + ]) .replace("DISKO", "(import \"${dirs.lib}/disko.nix\")") - .replace ("makeZfsPartitions = ", "makeZfsPartitions ") + .replace("makeZfsPartitions = ", "makeZfsPartitions ") .replace("SYSTEMDBOOT", "systemd-boot") .replace("mcl.host-info.sshKey", "# mcl.host-info.sshKey"); - std.file.write("machines/" ~ machineType.to!string ~ "/" ~ machineName ~ "/" ~ "hw-config.nix", hardwareNix); - execute(["alejandra", "machines/" ~ machineType.to!string ~ "/" ~ machineName ~ "/" ~ "configuration.nix"], false); - execute(["alejandra", "machines/" ~ machineType.to!string ~ "/" ~ machineName ~ "/" ~ "hw-config..nix"], false); + string filePath = format("machines/%s/%s/hw-config.nix", machineType.to!string, machineName); + writeFile(filePath, hardwareNix, ["alejandra", filePath]); } -struct HardwareConfiguration { +void createMachine(MachineType machineType, string machineName, User user) +{ + Info info = getInfoOverSSH(); + + saveMachineConfiguration(machineType, machineName, info, user); + + HardwareConfiguration hardwareConfiguration = initHardwareConfiguration(info); + + initHardwareConfigurationDisko(hardwareConfiguration, info); + + hardwareConfiguration = hardwareConfiguration.uniqArrays; + + processHardwareConfigNix(hardwareConfiguration, machineType, machineName); +} + +struct HardwareConfiguration +{ Literal[] _literalAttrs; - Literal[] imports = []; - struct Disko { - struct INNER_DISKO { - struct MakeZfsPartition { + Literal[] imports = []; + struct Disko + { + struct INNER_DISKO + { + struct MakeZfsPartition + { string[] disks; int swapSizeGB; int espSizeGB = 4; Literal _literalConfig = "inherit config;"; } + MakeZfsPartition makeZfsPartitions; } + INNER_DISKO DISKO; } + Disko disko; - struct Boot { - struct Initrd { + struct Boot + { + struct Initrd + { string[] kernelModules; string[] availableKernelModules; } + Initrd initrd; string[] kernelModules; Literal[] extraModulePackages; - struct Loader { - struct SystemdBoot { + struct Loader + { + struct SystemdBoot + { bool enable = true; } + SystemdBoot SYSTEMDBOOT; - struct Grub { + struct Grub + { bool enable = false; bool efiSupport = true; Literal devices = Literal("builtins.attrNames config.disko.devices.disk"); bool copyKernels = true; } + Grub grub; - struct EFI { - Literal canTouchEfiVariables = mkDefault(true);; + struct EFI + { + Literal canTouchEfiVariables = mkDefault(true); + ; } + EFI efi; } + Loader loader; string[] blacklistedKernelModules; } + Boot boot; - struct Networking { + struct Networking + { // struct UseDHCP { // Literal useDHCP = mkDefault(true); // } // UseDHCP[string] interfaces; Literal useDHCP = mkDefault(true); } + Networking networking; - struct PowerManagement { + struct PowerManagement + { Literal cpuFreqGovernor = mkDefault("performance"); } + PowerManagement powerManagement; - struct Hardware { - struct Cpu { + struct Hardware + { + struct Cpu + { bool updateMicrocode = true; } + Cpu[string] cpu; bool enableAllFirmware = true; Literal enableRedistributableFirmware = mkDefault(true); } + Hardware hardware; - struct Services { - struct Xserver { + struct Services + { + struct Xserver + { bool enable = true; string[] videoDrivers; } + Xserver xserver; } + Services services; } void createMachineConfiguration() { checkifNixosMachineConfigRepo(); - auto machineType = cast(int)params.machineType != 0 ? params.machineType : prompt!MachineType("Machine type"); - auto machineName = params.machineName != "" ? params.machineName : prompt!string("Enter the name of the machine"); + auto machineType = cast(int) params.machineType != 0 ? params.machineType + : prompt!MachineType("Machine type"); + auto machineName = params.machineName != "" ? params.machineName + : prompt!string("Enter the name of the machine"); User user; user = createUser(); - machineType.createMachine( machineName, user); + machineType.createMachine(machineName, user); } Params params; @@ -382,6 +510,7 @@ export void machine_create() params = parseEnv!Params; createMachineConfiguration(); } + struct Params { string sshPath; @@ -391,7 +520,7 @@ struct Params @optional() string description; @optional() bool isNormalUser; @optional() string extraGroups; - @optional() MachineType machineType = cast(MachineType)0; + @optional() MachineType machineType = cast(MachineType) 0; @optional() string disks; void setup() diff --git a/packages/mcl/src/src/mcl/commands/shard_matrix.d b/packages/mcl/src/src/mcl/commands/shard_matrix.d index 4462194a..522382ec 100644 --- a/packages/mcl/src/src/mcl/commands/shard_matrix.d +++ b/packages/mcl/src/src/mcl/commands/shard_matrix.d @@ -1,6 +1,5 @@ module mcl.commands.shard_matrix; - import std.algorithm : map; import std.array : array; import std.conv : to, parse; @@ -11,6 +10,10 @@ import std.range : iota; import std.regex : matchFirst, regex; import std.stdio : writeln; import std.string : strip; +import std.regex : matchFirst, regex; +import std.format : format; +import std.algorithm : each; +import std.parallelism : parallel; import mcl.utils.env : parseEnv, optional; import mcl.utils.json : toJSON; @@ -50,15 +53,16 @@ ShardMatrix generateShardMatrix(string flakeRef = ".") { import std.path : isValidPath, absolutePath, buildNormalizedPath; - if (flakeRef.isValidPath) { + if (flakeRef.isValidPath) + { flakeRef = flakeRef.absolutePath.buildNormalizedPath; } const shardCountOutput = nix.eval("", [ - "--impure", - "--expr", - `(builtins.getFlake "` ~ flakeRef ~ `").outputs.legacyPackages.x86_64-linux.mcl.matrix.shardCount or 0` - ]); + "--impure", + "--expr", + `(builtins.getFlake "` ~ flakeRef ~ `").outputs.legacyPackages.x86_64-linux.mcl.matrix.shardCount or 0` + ]); infof("shardCount: '%s'", shardCountOutput); @@ -86,27 +90,37 @@ unittest else { import mcl.utils.path : rootDir; - auto flakeRef = rootDir.buildPath("packages/mcl/src/src/mcl/utils/test/nix/shard-matrix-ok"); + + auto flakeRef = rootDir.buildPath( + "packages/mcl/src/src/mcl/utils/test/nix/shard-matrix-ok"); } auto shards = generateShardMatrix(flakeRef); assert(shards.include.length == 11); - assert(shards.include[0].prefix == "legacyPackages"); - assert(shards.include[0].postfix == "shards.0"); - assert(shards.include[0].digit == 0); + foreach(i, shard; shards.include.parallel) + assertShard(shard, i.to!int); +} + +void assertShard(Shard shard, int index) { + string expectedPrefix = index == -1 ? "" : "legacyPackages"; + string expectedPostfix = index == -1 ? "" : ("shards." ~ index.to!string); + assert(shard.prefix == expectedPrefix, "Expected shard %s to have prefix '%s', but got %s".format(index, expectedPrefix, shard.prefix)); + assert(shard.postfix == expectedPostfix, "Expected shard %s to have postfix '%s', but got %s".format(index, expectedPostfix, shard.postfix)); + assert(shard.digit == index, "Expected shard %s to have digit %s, but got %s".format(index, index, shard.digit)); } + @("generateShardMatrix.fail") unittest { import mcl.utils.path : rootDir; - auto flakeRef = rootDir.buildPath("packages/mcl/src/src/mcl/utils/test/nix/shard-matrix-no-shards"); + + auto flakeRef = rootDir.buildPath( + "packages/mcl/src/src/mcl/utils/test/nix/shard-matrix-no-shards"); auto shards = generateShardMatrix(flakeRef); - assert(shards.include.length == 1); - assert(shards.include[0].prefix == ""); - assert(shards.include[0].postfix == ""); - assert(shards.include[0].digit == -1); + assert(shards.include.length == 1, "generateShardMatrix should return 1 shard, but got %s".format(shards.include.length)); + assertShard(shards.include[0], -1); } ShardMatrix splitToShards(int shardCount) @@ -124,16 +138,9 @@ ShardMatrix splitToShards(int shardCount) unittest { auto shards = splitToShards(3); - assert(shards.include.length == 3); - assert(shards.include[0].prefix == "legacyPackages"); - assert(shards.include[0].postfix == "shards.0"); - assert(shards.include[0].digit == 0); - assert(shards.include[1].prefix == "legacyPackages"); - assert(shards.include[1].postfix == "shards.1"); - assert(shards.include[1].digit == 1); - assert(shards.include[2].prefix == "legacyPackages"); - assert(shards.include[2].postfix == "shards.2"); - assert(shards.include[2].digit == 2); + assert(shards.include.length == 3, "Expectes splitToShards(3) to return 3 shards, but got %s".format(shards.include.length)); + foreach(i, shard; shards.include.parallel) + assertShard(shard, i.to!int); } @@ -144,9 +151,7 @@ void saveShardMatrix(ShardMatrix matrix, Params params) infof("Shard matrix: %s", matrixJson.toPrettyString); const envLine = "gen_matrix=" ~ matrixString; if (params.githubOutput != "") - { params.githubOutput.append(envLine); - } else { createResultDirs(); diff --git a/packages/mcl/src/src/mcl/utils/array.d b/packages/mcl/src/src/mcl/utils/array.d index 7534e649..8d69ab73 100644 --- a/packages/mcl/src/src/mcl/utils/array.d +++ b/packages/mcl/src/src/mcl/utils/array.d @@ -8,32 +8,52 @@ import std.stdio : writeln; T[] uniqIfSame(T)(T[] arr) { if (arr.length == 0) - { return arr; - } - else if (arr.all!(a => a == arr[0])) { + else if (arr.all!(a => a == arr[0])) return [arr[0]]; - } - else { + else return arr; - } } -T uniqArrays(T)(T s){ - static if (isSomeString!T){ - return s; - } - else static if (isArray!T){ - return s.sort.uniq.array.to!T; - } - else static if (is(T == struct)){ - static foreach (idx, field; T.tupleof){ +@("uniqIfSame") +unittest +{ + assert(uniqIfSame([1, 1, 1, 1]) == [1], + "uniqIfSame should return [1] for [1, 1, 1, 1], but got " ~ uniqIfSame([1, 1, 1, 1]).to!string); + assert(uniqIfSame([1, 2, 3, 4]) == [1, 2, 3, 4], + "uniqIfSame should return [1, 2, 3, 4] for [1, 2, 3, 4], but got " ~ uniqIfSame([1, 2, 3, 4]).to!string); + assert(uniqIfSame(["a", "a", "a", "a"]) == ["a"], + "uniqIfSame should return [\"a\"] for [\"a\", \"a\", \"a\", \"a\"], but got " ~ uniqIfSame(["a", "a", "a", "a"]).to!string); + assert(uniqIfSame(["a", "b", "c", "d"]) == ["a", "b", "c", "d"], + "uniqIfSame should return [\"a\", \"b\", \"c\", \"d\"] for [\"a\", \"b\", \"c\", \"d\"], but got " ~ uniqIfSame(["a", "b", "c", "d"]).to!string); +} + +T uniqArrays(T)(T s) +{ + static if (isArray!T && !isSomeString!T) + s = s.sort.uniq.array.to!T; + else static if (is(T == struct)) + static foreach (idx, field; T.tupleof) s.tupleof[idx] = s.tupleof[idx].uniqArrays; - } - return s; - } - else{ - return s; + + return s; +} + +@("uniqArrays") +unittest +{ + assert(uniqArrays([1, 2, 3, 4, 1, 2, 3, 4]) == [1, 2, 3, 4], + "uniqArrays should return [1, 2, 3, 4] for [1, 2, 3, 4, 1, 2, 3, 4], but got " ~ uniqArrays([1, 2, 3, 4, 1, 2, 3, 4]).to!string); + assert(uniqArrays("aabbccdd") == "aabbccdd", + "uniqArrays should return \"aabbccdd\" for \"aabbccdd\", but got " ~ uniqArrays("aabbccdd").to!string); + assert(uniqArrays(5) == 5, "uniqArrays should return 5 for 5, but got " ~ uniqArrays(5).to!string); + struct TestStruct + { + int[] a; + string b; } + + assert(uniqArrays(TestStruct([1, 2, 3, 4, 1, 2, 3, 4], "aabbccdd")) == TestStruct([1, 2, 3, 4], "aabbccdd"), + "uniqArrays should return TestStruct([1, 2, 3, 4], \"aabbccdd\") for TestStruct([1, 2, 3, 4, 1, 2, 3, 4], \"aabbccdd\"), but got " ~ uniqArrays(TestStruct([1, 2, 3, 4, 1, 2, 3, 4], "aabbccdd")).to!string); } diff --git a/packages/mcl/src/src/mcl/utils/cachix.d b/packages/mcl/src/src/mcl/utils/cachix.d index 3345ebfe..9662ad15 100644 --- a/packages/mcl/src/src/mcl/utils/cachix.d +++ b/packages/mcl/src/src/mcl/utils/cachix.d @@ -12,7 +12,9 @@ in (workspace && machine && deploymentId) => unittest { assert(getCachixDeploymentApiUrl("my-workspace", "my-machine", 123) == - "https://app.cachix.org/api/v1/deploy/deployment/my-workspace/my-machine/123"); + "https://app.cachix.org/api/v1/deploy/deployment/my-workspace/my-machine/123", + "getCachixDeploymentApiUrl(\"my-workspace\", \"my-machine\", 123) should return \"https://app.cachix.org/api/v1/deploy/deployment/my-workspace/my-machine/123\", but returned %s" + .fmt(getCachixDeploymentApiUrl("my-workspace", "my-machine", 123))); } diff --git a/packages/mcl/src/src/mcl/utils/coda.d b/packages/mcl/src/src/mcl/utils/coda.d index b2809216..7d1fdcd7 100644 --- a/packages/mcl/src/src/mcl/utils/coda.d +++ b/packages/mcl/src/src/mcl/utils/coda.d @@ -112,7 +112,7 @@ struct CodaApiClient auto apiToken = environment.get("CODA_API_TOKEN"); auto coda = CodaApiClient(apiToken); auto resp = coda.getDocument("6vM0kjfQP6"); - assert(resp.id == "6vM0kjfQP6"); + assert(resp.id == "6vM0kjfQP6", "Expected document ID to be 6vM0kjfQP6, but got %s".format(resp.id)); } Document[] listDocuments() @@ -127,7 +127,7 @@ struct CodaApiClient auto apiToken = environment.get("CODA_API_TOKEN"); auto coda = CodaApiClient(apiToken); auto resp = coda.listDocuments(); - assert(resp.length > 0); + assert(resp.length > 0, "Expected at least one document, but got 0"); } struct InitialPage @@ -157,10 +157,10 @@ struct CodaApiClient string url = "/docs"; JSONValue req = JSONValue( [ - "title": JSONValue(title), - "initialPage": initialPage.toJSON, - "timezone": JSONValue(timezone) - ]); + "title": JSONValue(title), + "initialPage": initialPage.toJSON, + "timezone": JSONValue(timezone) + ]); if (sourceDoc != "") req["sourceDoc"] = JSONValue(sourceDoc); if (folderID != "") @@ -189,9 +189,9 @@ struct CodaApiClient InitialPage.PageContent("canvas", InitialPage.PageContent.CanvasContent("html", "

This is rich text

"))); auto resp = coda.createDocument("Test Document", "", "Europe/Sofia", "", initialPage); - assert(resp.name == "Test Document"); + assert(resp.name == "Test Document", "Expected document name to be \"Test Document\", but got %s".format(resp.name)); coda.deleteDocument(resp.id); - assertThrown!(HTTPStatusException)(coda.getDocument(resp.id)); + assertThrown!(HTTPStatusException)(coda.getDocument(resp.id), "Expected coda.getDocument to throw an exception, but it didn't"); } Document patchDocument(string documentId, string title = "", string iconName = "") @@ -217,26 +217,30 @@ struct CodaApiClient auto resp = coda.createDocument("Test Document", "", "Europe/Sofia", "", initialPage); coda.patchDocument(resp.id, "Patched Document", ""); auto patched = coda.getDocument(resp.id); - assert(patched.name == "Patched Document"); + assert(patched.name == "Patched Document", "Expected document name to be \"Patched Document\", but got %s".format(patched.name)); coda.deleteDocument(patched.id); } - struct Table { + struct Table + { string id; string type; string tableType; string href; string browserLink; string name; - struct Parent { + struct Parent + { string id; string type; string href; string browserLink; string name; } + Parent parent; - struct ParentTable { + struct ParentTable + { string id; string type; string tableType; @@ -245,32 +249,41 @@ struct CodaApiClient string name; Parent parent; } + ParentTable parentTable; - struct DisplayColumn { + struct DisplayColumn + { string id; string type; string href; } + DisplayColumn displayColumn; int rowCount; - struct Sort { + struct Sort + { string direction; - struct Column { + struct Column + { string id; string type; string href; } + Column column; } + Sort[] sorts; string layout; - struct Filter { + struct Filter + { bool valid; bool isVolatile; bool hasUserFormula; bool hasTodayFormula; bool hasNowFormula; } + Filter filter; SysTime createdAt; SysTime updatedAt; @@ -288,7 +301,7 @@ struct CodaApiClient auto apiToken = environment.get("CODA_API_TOKEN"); auto coda = CodaApiClient(apiToken); auto resp = coda.listTables("6vM0kjfQP6"); - assert(resp.length > 0); + assert(resp.length > 0, "Expected at least one table, but got 0"); } Table getTable(string documentId, string tableId) @@ -304,10 +317,11 @@ struct CodaApiClient auto coda = CodaApiClient(apiToken); auto tables = coda.listTables("6vM0kjfQP6"); auto resp = coda.getTable("6vM0kjfQP6", tables[0].id); - assert(resp.id == tables[0].id); + assert(resp.id == tables[0].id, "Expected table ID to be %s, but got %s".format(tables[0].id, resp.id)); } - struct Column { + struct Column + { string id; string type; string href; @@ -316,30 +330,36 @@ struct CodaApiClient bool calculated; string formula; string defaultValue; - struct Format { + struct Format + { string type; bool isArray; string label; string disableIf; string action; } + Format format; - struct Parent { + struct Parent + { string id; string type; string tableType; string href; string browserLink; string name; - struct ParentParent { + struct ParentParent + { string id; string type; string href; string browserLink; string name; } + ParentParent parent; } + Parent parent; } @@ -356,7 +376,7 @@ struct CodaApiClient auto coda = CodaApiClient(apiToken); auto tables = coda.listTables("6vM0kjfQP6"); auto resp = coda.listColumns("6vM0kjfQP6", tables[0].id); - assert(resp.length > 0); + assert(resp.length > 0, "Expected at least one column, but got 0"); } Column getColumn(string documentId, string tableId, string columnId) @@ -373,12 +393,13 @@ struct CodaApiClient auto tables = coda.listTables("6vM0kjfQP6"); auto columns = coda.listColumns("6vM0kjfQP6", tables[0].id); auto resp = coda.getColumn("6vM0kjfQP6", tables[0].id, columns[0].id); - assert(resp.id == columns[0].id); + assert(resp.id == columns[0].id, "Expected column ID to be %s, but got %s".format(columns[0].id, resp.id)); } alias RowValue = SumType!(string, int, bool, string[], int[], bool[]); - struct Row { + struct Row + { string id; string type; string href; @@ -388,20 +409,23 @@ struct CodaApiClient SysTime createdAt; SysTime updatedAt; RowValue[string] values; - struct Parent { + struct Parent + { string id; string type; string tableType; string href; string browserLink; string name; - struct ParentParent { + struct ParentParent + { string id; string type; string href; string browserLink; string name; } + ParentParent parent; } } @@ -419,15 +443,17 @@ struct CodaApiClient auto coda = CodaApiClient(apiToken); auto tables = coda.listTables("6vM0kjfQP6"); auto resp = coda.listRows("6vM0kjfQP6", tables[0].id); - assert(resp.length > 0); + assert(resp.length > 0, "Expected at least one row, but got 0"); } - struct InsertRowsReturn { + struct InsertRowsReturn + { string requestId; string[] addedRowIds; } - string[] insertRows(string documentId, string tableId, RowValues[] rows, string[] keyColumns = []) + string[] insertRows(string documentId, string tableId, RowValues[] rows, string[] keyColumns = [ + ]) { string url = "/docs/%s/tables/%s/rows".format(documentId, tableId); JSONValue req = JSONValue( @@ -438,8 +464,8 @@ struct CodaApiClient req["keyColumns"] = JSONValue(keyColumns.toJSON); return post!InsertRowsReturn(url, req, false).addedRowIds; } - alias upsertRows = insertRows; + alias upsertRows = insertRows; // Can't be implemented because of the lack of support for a body in DELETE requests // void deleteRows(string documentId, string tableId, string[] rowIds) @@ -466,13 +492,13 @@ struct CodaApiClient auto tables = coda.listTables("dEJJPwdxcw"); RowValues[] rows = [ RowValues([ - CodaCell("c-p6Yjm8zaEH", "Test Name"), - ]) + CodaCell("c-p6Yjm8zaEH", "Test Name"), + ]) ]; auto resp = coda.insertRows("dEJJPwdxcw", tables[0].id, rows); - assert(resp.length > 0); + assert(resp.length > 0, "Expected at least one row after inserting rows, but got 0"); coda.deleteRow("dEJJPwdxcw", tables[0].id, resp[0]); - assertThrown!(HTTPStatusException)(coda.getRow("dEJJPwdxcw", tables[0].id, resp[0])); + assertThrown!(HTTPStatusException)(coda.getRow("dEJJPwdxcw", tables[0].id, resp[0]), "Expected coda.getRow to throw an exception, but it didn't"); } Row getRow(string documentId, string tableId, string rowId) @@ -482,16 +508,18 @@ struct CodaApiClient } @("coda.getRow") - unittest { + unittest + { auto apiToken = environment.get("CODA_API_TOKEN"); auto coda = CodaApiClient(apiToken); auto tables = coda.listTables("dEJJPwdxcw"); auto rows = coda.listRows("dEJJPwdxcw", tables[0].id); auto resp = coda.getRow("dEJJPwdxcw", tables[0].id, rows[0].id); - assert(resp.id == rows[0].id); + assert(resp.id == rows[0].id, "Expected row ID to be %s, but got %s".format(rows[0].id, resp.id)); } - struct UpdateRowReturn { + struct UpdateRowReturn + { string requestId; string id; } @@ -507,14 +535,15 @@ struct CodaApiClient } @("coda.updateRow") - unittest { + unittest + { auto apiToken = environment.get("CODA_API_TOKEN"); auto coda = CodaApiClient(apiToken); auto tables = coda.listTables("dEJJPwdxcw"); RowValues[] rows = [ RowValues([ - CodaCell("c-p6Yjm8zaEH", "Test Name"), - ]) + CodaCell("c-p6Yjm8zaEH", "Test Name"), + ]) ]; auto resp = coda.insertRows("dEJJPwdxcw", tables[0].id, rows); RowValues newRow = RowValues([ @@ -523,39 +552,43 @@ struct CodaApiClient auto updated = coda.updateRow("dEJJPwdxcw", tables[0].id, resp[0], newRow); - assert(updated == resp[0]); + assert(updated == resp[0], "Expected updated row ID to be %s, but got %s".format(resp[0], updated)); coda.deleteRow("dEJJPwdxcw", tables[0].id, resp[0]); } - struct PushButtonResponse { + struct PushButtonResponse + { string requestId; string rowId; string columnId; } - PushButtonResponse pushButton(string documentId, string tableId, string rowId, string columnId) { + PushButtonResponse pushButton(string documentId, string tableId, string rowId, string columnId) + { string url = "/docs/%s/tables/%s/rows/%s/buttons/%s".format(documentId, tableId, rowId, columnId); return post!PushButtonResponse(url); } @("coda.pushButton") - unittest { + unittest + { auto apiToken = environment.get("CODA_API_TOKEN"); auto coda = CodaApiClient(apiToken); auto tables = coda.listTables("dEJJPwdxcw"); RowValues[] rows = [ RowValues([ - CodaCell("c-p6Yjm8zaEH", "Test Name"), - ]) + CodaCell("c-p6Yjm8zaEH", "Test Name"), + ]) ]; auto buttonColumn = "c-9MA3HmNByK"; auto rowId = "i-HV8Hsf2O8H"; auto buttonResp = coda.pushButton("dEJJPwdxcw", tables[0].id, rowId, buttonColumn); - assert(buttonResp.rowId == rowId); - assert(buttonResp.columnId == buttonColumn); + assert(buttonResp.rowId == rowId, "Expected row ID to be %s, but got %s".format(rowId, buttonResp.rowId)); + assert(buttonResp.columnId == buttonColumn, "Expected column ID to be %s, but got %s".format(buttonColumn, buttonResp.columnId)); } - struct Category { + struct Category + { string name; } @@ -566,11 +599,12 @@ struct CodaApiClient } @("coda.listCategories") - unittest { + unittest + { auto apiToken = environment.get("CODA_API_TOKEN"); auto coda = CodaApiClient(apiToken); auto resp = coda.listCategories(); - assert(resp.length > 0); + assert(resp.length > 0, "Expected at least one category, but got 0"); } void triggerAutomation(string documentId, string automationId, JSONValue req) @@ -580,7 +614,8 @@ struct CodaApiClient } @("coda.triggerAutomation") - unittest { + unittest + { auto apiToken = environment.get("CODA_API_TOKEN"); auto coda = CodaApiClient(apiToken); auto tables = coda.listTables("dEJJPwdxcw"); @@ -592,7 +627,10 @@ struct CodaApiClient coda.triggerAutomation("dEJJPwdxcw", automationId, req); } - static foreach (method; [HTTP.Method.get, HTTP.Method.post, HTTP.Method.del, HTTP.Method.patch, HTTP.Method.put]) + static foreach (method; [ + HTTP.Method.get, HTTP.Method.post, HTTP.Method.del, HTTP.Method.patch, + HTTP.Method.put + ]) { mixin(q{ Response %s(Response)(string endpoint, JSONValue req = JSONValue(null), @@ -643,40 +681,32 @@ struct CodaApiClient http.addRequestHeader("Content-Type", "application/json"); http.addRequestHeader("Authorization", "Bearer " ~ this.apiToken); + JSONValue ret = parseJSON("{}"); + auto reqBody = req.toString(JSONOptions.doNotEscapeSlashes); + reqBody = (reqBody == "null") ? "" : reqBody; + static if (method == HTTP.Method.get) { auto resp = httpGet(baseEndpoint ~ endpoint, http); - return parseJSON(resp); + ret = parseJSON(resp); } else static if (method == HTTP.Method.post) { - auto reqBody = req.toString(JSONOptions.doNotEscapeSlashes); - reqBody = (reqBody == "null") ? "" : reqBody; auto resp = httpPost(baseEndpoint ~ endpoint, reqBody, http); - return parseJSON(resp); + ret = parseJSON(resp); } else static if (method == HTTP.Method.put) { - auto reqBody = req.toString(JSONOptions.doNotEscapeSlashes); - reqBody = (reqBody == "null") ? "" : reqBody; auto resp = httpPut(baseEndpoint ~ endpoint, reqBody, http); - return parseJSON(resp); + ret = parseJSON(resp); } else static if (method == HTTP.Method.del) - { - auto reqBody = req.toString(JSONOptions.doNotEscapeSlashes); - reqBody = (reqBody == "null") ? "" : reqBody; httpDelete(baseEndpoint ~ endpoint, http); - return parseJSON("{}"); - } else static if (method == HTTP.Method.patch) - { - auto reqBody = req.toString(JSONOptions.doNotEscapeSlashes); - reqBody = (reqBody == "null") ? "" : reqBody; httpPatch(baseEndpoint ~ endpoint, reqBody, http); - return parseJSON("{}"); - } else static assert(0, "Please implement " ~ method); + + return ret; } } diff --git a/packages/mcl/src/src/mcl/utils/env.d b/packages/mcl/src/src/mcl/utils/env.d index d7a9e1e8..1b379ca4 100644 --- a/packages/mcl/src/src/mcl/utils/env.d +++ b/packages/mcl/src/src/mcl/utils/env.d @@ -72,6 +72,7 @@ unittest { import std.process : environment; import std.exception : assertThrown; + import std.conv : to; environment["A"] = "1"; environment["B"] = "2"; @@ -79,14 +80,14 @@ unittest auto config = parseEnv!Config; - assert(config.a == 1); - assert(config.b == "2"); - assert(config.c == 1.0); - assert(config.opt is null); + assert(config.a == 1, "config.a should be 1, but got " ~ config.a.to!string); + assert(config.b == "2", "config.b should be \"2\", but got " ~ config.b); + assert(config.c == 1.0, "config.c should be 1.0, but got " ~ config.c.to!string); + assert(config.opt is null, "config.opt should be null, but got " ~ config.opt); environment["OPT"] = "3"; config = parseEnv!Config; - assert(config.opt == "3"); + assert(config.opt == "3", "config.opt should be \"3\", but got " ~ config.opt); environment.remove("A"); assertThrown(config = parseEnv!Config, "missing environment variables:\nA\n"); diff --git a/packages/mcl/src/src/mcl/utils/fetch.d b/packages/mcl/src/src/mcl/utils/fetch.d index 99226b14..b1697a56 100644 --- a/packages/mcl/src/src/mcl/utils/fetch.d +++ b/packages/mcl/src/src/mcl/utils/fetch.d @@ -2,6 +2,7 @@ module mcl.utils.fetch; import mcl.utils.test; import std.json : JSONValue; +import std.format : fmt = format; JSONValue fetchJson(string url, string authToken = "") { @@ -25,7 +26,12 @@ JSONValue fetchJson(string url, string authToken = "") unittest { auto json = fetchJson("https://v2.jokeapi.dev/joke/Programming?type=single&idRange=40"); - assert(json["category"].str == "Programming"); - assert(json["type"].str == "single"); - assert(json["joke"].str == "Debugging: Removing the needles from the haystack."); + string actualCategory = json["category"].str; + assert(actualCategory == "Programming", "Expected category to be 'Programming', but got '%s'".fmt(actualCategory)); + + string actualType = json["type"].str; + assert(actualType == "single", "Expected type to be 'single', but got '%s'".fmt(actualType)); + + string actualJoke = json["joke"].str; + assert(actualJoke == "Debugging: Removing the needles from the haystack.", "Expected joke to be 'Debugging: Removing the needles from the haystack.', but got '%s'".fmt(actualJoke)); } diff --git a/packages/mcl/src/src/mcl/utils/json.d b/packages/mcl/src/src/mcl/utils/json.d index 42479674..117c1fd8 100644 --- a/packages/mcl/src/src/mcl/utils/json.d +++ b/packages/mcl/src/src/mcl/utils/json.d @@ -1,161 +1,189 @@ module mcl.utils.json; import mcl.utils.test; import mcl.utils.string; -import std.traits: isNumeric, isArray, isSomeChar, ForeachType, isBoolean, isAssociativeArray; -import std.json: JSONValue, JSONOptions, JSONType; -import std.conv: to; -import std.string: strip; -import std.range: front; -import std.stdio: writeln; -import std.algorithm: map; -import std.array: join, array, replace, split; -import std.datetime: SysTime; -import std.sumtype: SumType, isSumType; -import core.stdc.string: strlen; +import std.traits : isNumeric, isArray, isSomeChar, ForeachType, isBoolean, isAssociativeArray; +import std.json : JSONValue, JSONOptions, JSONType; +import std.conv : to; +import std.string : strip; +import std.range : front; +import std.stdio : writeln; +import std.algorithm : map; +import std.array : join, array, replace, split; +import std.datetime : SysTime; +import std.sumtype : SumType, isSumType; +import std.format : format; +import core.stdc.string : strlen; + +string getStrOrDefault(JSONValue value, string defaultValue = "") +{ + return value.isNull ? defaultValue : value.str; +} + +string jsonValueToString(in JSONValue value) +{ + return value.toString(JSONOptions.doNotEscapeSlashes).strip("\""); +} bool tryDeserializeJson(T)(in JSONValue value, out T result) { - try { + try + { result = value.fromJSON!T; return true; - } catch (Exception e) { + } + catch (Exception e) + { return false; } } -T fromJSON(T)(in JSONValue value) { - if (value.isNull) { - return T.init; - } - static if (is(T == JSONValue)) { - return value; - } - else static if (is(T == bool) || is(T == string) || isSomeChar!T || isNumeric!T || is(T == enum)) { - return value.toString(JSONOptions.doNotEscapeSlashes).strip("\"").to!T; - } - else static if (isSumType!T) { - static foreach (SumTypeVariant; T.Types) - {{ - SumTypeVariant result; - if (tryDeserializeJson!SumTypeVariant(value, result)) { - return T(result); - } - }} +T fromJSON(T)(in JSONValue value) +{ + T result; + if (value.isNull) + result = T.init; - throw new Exception("Failed to deserialize JSON value"); - } - else static if (isArray!T) { - static if ( isBoolean!(ForeachType!T)) { - if (value.type == JSONType.string && isBoolean!(ForeachType!T)) { - return value.str.map!(a => a == '1').array; + static if (is(T == JSONValue)) + result = value; + else static if (is(T == bool) || is(T == string) || isSomeChar!T || isNumeric!T || is(T == enum)) + result = jsonValueToString(value).to!T; + else static if (isSumType!T) + { + bool sumTypeDecoded = false; + static foreach (SumTypeVariant; T.Types) + { + { + SumTypeVariant sumTypeResult; + if (tryDeserializeJson!SumTypeVariant(value, sumTypeResult)) + { + sumTypeDecoded = true; + result = sumTypeResult; + } } } - - if (value.type != JSONType.array) { - return [value.fromJSON!(ForeachType!T)]; + if (!sumTypeDecoded) + throw new Exception("Failed to deserialize JSON value"); + } + else static if (isArray!T) + { + static if (isBoolean!(ForeachType!T)) + { + if (value.type == JSONType.string && isBoolean!(ForeachType!T)) + result = value.str.map!(a => a == '1').array; } + if (value.type != JSONType.array) + result = [value.fromJSON!(ForeachType!T)]; + else + result = value.array.map!(a => a.fromJSON!(ForeachType!T)).array; - return value.array.map!(a => a.fromJSON!(ForeachType!T)).array; } - else static if (is(T == SysTime)) { - return SysTime.fromISOExtString(value.toString(JSONOptions.doNotEscapeSlashes).strip("\"")); + else static if (is(T == SysTime)) + { + result = SysTime.fromISOExtString(jsonValueToString(value)); } - else static if (is(T == struct)) { - T result; - static foreach (idx, field; T.tupleof) { - if ((__traits(identifier, field).replace("_", "") in value.object) && !value[__traits(identifier, field).replace("_", "")].isNull) { - result.tupleof[idx] = value[__traits(identifier, field).replace("_", "")].fromJSON!(typeof(field)); + else static if (is(T == struct)) + { + static foreach (idx, field; T.tupleof) + { + if ((__traits(identifier, field).replace("_", "") in value.object) && !value[__traits(identifier, field) + .replace("_", "")].isNull) + { + result.tupleof[idx] = value[__traits(identifier, field) + .replace("_", "")].fromJSON!(typeof(field)); } } - return result; } - else static if (isAssociativeArray!T) { - T result; - foreach (key, val; value.object) { - if (key in result) { + else static if (isAssociativeArray!T) + { + foreach (key, val; value.object) + { + if (key in result) result[key] = val.fromJSON!(typeof(result[key])); - } } - return result; - } - else { - static assert(false, "Unsupported type: `", T, "` ", isSumType!T); } + else + static assert(false, "Unsupported type: `", T, "` ", isSumType!T); + return result; } @("fromJSON") -unittest { +unittest +{ auto x = fromJSON!(SumType!(int, string))(JSONValue("1")); auto y = fromJSON!(SumType!(int, string))(JSONValue(1)); } JSONValue toJSON(T)(in T value, bool simplify = false) { + JSONValue result; static if (is(T == enum)) - { - return JSONValue(value.enumToString); - } + result = JSONValue(value.enumToString); else static if (is(T == bool) || is(T == string) || isSomeChar!T || isNumeric!T) - return JSONValue(value); - else static if ((isArray!T && isSomeChar!(ForeachType!T)) ) { - return JSONValue(value.idup[0..(strlen(value.ptr)-1)]); - } + result = JSONValue(value); + else static if ((isArray!T && isSomeChar!(ForeachType!T))) + result = JSONValue(value.idup[0 .. (strlen(value.ptr) - 1)]); else static if (isArray!T) { if (simplify && value.length == 1) - return value.front.toJSON(simplify); - else if (simplify && isBoolean!(ForeachType!T) ) { - static if (isBoolean!(ForeachType!T)) { - return JSONValue((value.map!(a => a ? '1' : '0').array).to!string); - } - else {assert(0);} + result = value.front.toJSON(simplify); + else if (simplify && isBoolean!(ForeachType!T)) + { + static if (isBoolean!(ForeachType!T)) + result = JSONValue((value.map!(a => a ? '1' : '0').array).to!string); + else + assert(0); } - else { - JSONValue[] result; + else + { + JSONValue[] arrayResult; foreach (elem; value) - result ~= elem.toJSON(simplify); - return JSONValue(result); + arrayResult ~= elem.toJSON(simplify); + result = JSONValue(arrayResult); } } - else static if (is(T == SysTime)) { - return JSONValue(value.toISOExtString()); - } + else static if (is(T == SysTime)) + result = JSONValue(value.toISOExtString()); else static if (is(T == struct)) { - JSONValue[string] result; + JSONValue[string] structResult; auto name = ""; static foreach (idx, field; T.tupleof) { name = __traits(identifier, field).strip("_"); - result[name] = value.tupleof[idx].toJSON(simplify); + structResult[name] = value.tupleof[idx].toJSON(simplify); } - return JSONValue(result); + result = JSONValue(structResult); } else static assert(false, "Unsupported type: `" ~ __traits(identifier, T) ~ "`"); + + return result; + } -version(unittest) +version (unittest) { enum TestEnum { - @StringRepresentation("supercalifragilisticexpialidocious") - a, + @StringRepresentation("supercalifragilisticexpialidocious") a, b, c } + struct TestStruct { int a; string b; bool c; } + struct TestStruct2 { int a; TestStruct b; } + struct TestStruct3 { int a; @@ -166,18 +194,50 @@ version(unittest) @("toJSON") unittest { - import std.stdio: writeln; - assert(1.toJSON == JSONValue(1)); - assert(true.toJSON == JSONValue(true)); - assert("test".toJSON == JSONValue("test")); - assert([1, 2, 3].toJSON == JSONValue([1, 2, 3])); - assert(["a", "b", "c"].toJSON == JSONValue(["a", "b", "c"])); - assert([TestEnum.a, TestEnum.b, TestEnum.c].toJSON == JSONValue(["supercalifragilisticexpialidocious", "b", "c"])); - TestStruct testStruct = { 1, "test", true }; - assert(testStruct.toJSON == JSONValue(["a": JSONValue(1), "b": JSONValue("test"), "c": JSONValue(true)])); - TestStruct2 testStruct2 = { 1, testStruct }; - assert(testStruct2.toJSON == JSONValue(["a": JSONValue(1), "b": JSONValue(["a": JSONValue(1), "b": JSONValue("test"), "c": JSONValue(true)])])); - TestStruct3 testStruct3 = { 1, testStruct2 }; - assert(testStruct3.toJSON == JSONValue(["a": JSONValue(1), "b": JSONValue(["a": JSONValue(1), "b": JSONValue(["a": JSONValue(1), "b": JSONValue("test"), "c": JSONValue(true)])])])); -} + void assertEqual(JSONValue actual, JSONValue expected) { + assert(actual == expected, format("Expected %s, but got %s", expected, actual)); + } + + assertEqual(1.toJSON, JSONValue(1)); + assertEqual(true.toJSON, JSONValue(true)); + assertEqual("test".toJSON, JSONValue("test")); + assertEqual([1, 2, 3].toJSON, JSONValue([1, 2, 3])); + assertEqual(["a", "b", "c"].toJSON, JSONValue(["a", "b", "c"])); + assertEqual([TestEnum.a, TestEnum.b, TestEnum.c].toJSON, JSONValue(["supercalifragilisticexpialidocious", "b", "c"])); + TestStruct testStruct = {1, "test", true}; + auto actual = testStruct.toJSON; + auto expected = JSONValue([ + "a": JSONValue(1), + "b": JSONValue("test"), + "c": JSONValue(true) + ]); + assertEqual(actual, expected); + + TestStruct2 testStruct2 = {1, testStruct}; + actual = testStruct2.toJSON; + expected = JSONValue([ + "a": JSONValue(1), + "b": JSONValue([ + "a": JSONValue(1), + "b": JSONValue("test"), + "c": JSONValue(true) + ]) + ]); + assertEqual(actual, expected); + + TestStruct3 testStruct3 = {1, testStruct2}; + actual = testStruct3.toJSON; + expected = JSONValue([ + "a": JSONValue(1), + "b": JSONValue([ + "a": JSONValue(1), + "b": JSONValue([ + "a": JSONValue(1), + "b": JSONValue("test"), + "c": JSONValue(true) + ]) + ]) + ]); + assertEqual(actual, expected); +} diff --git a/packages/mcl/src/src/mcl/utils/nix.d b/packages/mcl/src/src/mcl/utils/nix.d index e40e410e..4eecdc11 100644 --- a/packages/mcl/src/src/mcl/utils/nix.d +++ b/packages/mcl/src/src/mcl/utils/nix.d @@ -75,7 +75,6 @@ struct NixCommand "`" ~ commandName ~ "` is not a valid Nix command." ); - static if (is(T == JSONValue)) args = ["--json"] ~ args; @@ -94,104 +93,136 @@ struct NixCommand } } -struct Literal { +struct Literal +{ string value; alias value this; - ref Literal opAssign(string value) { this.value = value; return this; } - this(string value) { this.value = value; } + ref Literal opAssign(string value) + { + this.value = value; + return this; + } + + this(string value) + { + this.value = value; + } } -Literal mkDefault(T)(T value) { -import std.traits : isNumeric, isSomeString; -import std.json : JSONValue, JSONOptions; +Literal mkDefault(T)(T value) +{ + import std.traits : isNumeric, isSomeString; + import std.json : JSONValue, JSONOptions; + string ret = "lib.mkDefault "; static if (is(T == Literal)) ret ~= value; else static if (is(T == bool) || isSomeString!T || isNumeric!T) - ret ~=JSONValue(value).toString(JSONOptions.doNotEscapeSlashes); + ret ~= JSONValue(value).toString(JSONOptions.doNotEscapeSlashes); else static assert(false, "Unsupported type: `" ~ T.stringof ~ "`"); return Literal(ret); } -string toNix(T)(in T value, string[] inputs = [], bool topLevel = true, int depth = -1) { -import std.traits : isNumeric, isAssociativeArray, isSomeString, isArray, ForeachType, hasUDA; -import std.json : JSONValue,JSONOptions; -import std.string : join; -import std.array : replicate; -import std.algorithm : map, startsWith, all; -import std.ascii : isUpper; +string toNix(T)(in T value, string[] inputs = [], bool topLevel = true, int depth = -1) +{ + import std.traits : isNumeric, isAssociativeArray, isSomeString, isArray, ForeachType, hasUDA; + import std.json : JSONValue, JSONOptions; + import std.string : join; + import std.array : replicate; + import std.algorithm : map, startsWith, all; + import std.ascii : isUpper; depth++; string res; if (inputs.length) - res ~= "{" ~ inputs.map!(a => a == "dots" ? "..." : a ).join(", ") ~ "}: "; + res ~= "{" ~ inputs.map!(a => a == "dots" ? "..." : a).join(", ") ~ "}: "; static if (is(T == Literal)) res ~= value; else static if (is(T == bool) || isSomeString!T || isNumeric!T) - res ~=JSONValue(value).toString(JSONOptions.doNotEscapeSlashes); + res ~= JSONValue(value).toString(JSONOptions.doNotEscapeSlashes); else static if (is(T == struct)) { string[] result; string tempResult; static foreach (idx, field; T.tupleof) { - tempResult = "\t".replicate(depth+1); - static if (is(typeof(field) == Literal)) { + tempResult = "\t".replicate(depth + 1); + static if (is(typeof(field) == Literal)) + { static if (__traits(identifier, field).startsWith("_literal")) tempResult ~= value.tupleof[idx]; else tempResult ~= __traits(identifier, field) ~ " = " ~ value.tupleof[idx] ~ ";"; } - else static if (isArray!(typeof(field)) && is(ForeachType!(typeof(field)) == Literal) && __traits(identifier, field).startsWith("_literal")) + else static if (isArray!(typeof(field)) && is(ForeachType!(typeof(field)) == Literal) && __traits( + identifier, field).startsWith("_literal")) res ~= value.tupleof[idx].map!(a => a.toNix([], false, 0)).join("\n"); - else static if (is(typeof(field) == struct) && field.tupleof.length == 1 && !__traits(identifier, field.tupleof[0]).all!(isUpper)) { - tempResult ~= __traits(identifier, field) ~ "." ~ value.tupleof[idx].toNix([], false, -2) ~ ";"; + else static if (is(typeof(field) == struct) && field.tupleof.length == 1 && !__traits(identifier, field + .tupleof[0]).all!(isUpper)) + { + tempResult ~= __traits(identifier, field) ~ "." ~ value.tupleof[idx].toNix([ + ], false, -2) ~ ";"; } - else static if (isAssociativeArray!(typeof(field))) { - if (value.tupleof[idx].array.length == 1) { - tempResult ~= __traits(identifier, field) ~ "." ~ value.tupleof[idx].toNix([], false, -2) ~ ";"; + else static if (isAssociativeArray!(typeof(field))) + { + if (value.tupleof[idx].array.length == 1) + { + tempResult ~= __traits(identifier, field) ~ "." ~ value.tupleof[idx].toNix([ + ], false, -2) ~ ";"; } - else { - tempResult ~= __traits(identifier, field) ~ " = " ~ value.tupleof[idx].toNix([], false, depth) ~ ";"; + else + { + tempResult ~= __traits(identifier, field) ~ " = " ~ value.tupleof[idx].toNix([ + ], false, depth) ~ ";"; } } - else { - tempResult ~= __traits(identifier, field) ~ " = " ~ value.tupleof[idx].toNix([], false, depth) ~ ";"; + else + { + tempResult ~= __traits(identifier, field) ~ " = " ~ value.tupleof[idx].toNix([ + ], false, depth) ~ ";"; } result ~= tempResult; } static if (T.tupleof.length != 1) - res ~= ("{\n" ~ result.join("\n") ~"\n" ~ "\t".replicate(depth) ~ "}" ~ (topLevel ? "" : ";") ); + res ~= ("{\n" ~ result.join("\n") ~ "\n" ~ "\t".replicate(depth) ~ "}" ~ (topLevel ? "" + : ";")); else res ~= result[0]; } - else static if (isAssociativeArray!T) { + else static if (isAssociativeArray!T) + { string[] result; string tempResult; foreach (key, val; value) { - tempResult = "\t".replicate(depth+1) ~ key.to!string; - static if (is(typeof(val) == struct) && val.tupleof.length == 1) { + tempResult = "\t".replicate(depth + 1) ~ key.to!string; + static if (is(typeof(val) == struct) && val.tupleof.length == 1) + { tempResult ~= "." ~ val.toNix([], false, -2) ~ ";"; } - else static if (isAssociativeArray!(typeof(val))) { - if (value.tupleof[idx].array.length == 1) { + else static if (isAssociativeArray!(typeof(val))) + { + if (value.tupleof[idx].array.length == 1) + { tempResult ~= "." ~ val.toNix([], false, -2) ~ ";"; } - else { + else + { tempResult ~= " = " ~ val.toNix([], false, depth) ~ ";"; } } - else { + else + { tempResult ~= " = " ~ val.toNix([], false, depth) ~ ";"; } result ~= tempResult; } if (value.length != 1) - res ~= ("{\n" ~ result.join("\n") ~ "\n" ~ "\t".replicate(depth) ~ "}" ~ (topLevel ? "" : ";")); + res ~= ("{\n" ~ result.join("\n") ~ "\n" ~ "\t".replicate(depth) ~ "}" ~ (topLevel ? "" + : ";")); else res ~= result[0]; @@ -199,19 +230,23 @@ import std.ascii : isUpper; else static if (is(T == U[], U)) { string[] result; - if (value.length > 1) { + if (value.length > 1) + { result ~= "["; - foreach (elem; value){ - result ~= "\t".replicate(depth+1) ~ elem.toNix([], false, depth); + foreach (elem; value) + { + result ~= "\t".replicate(depth + 1) ~ elem.toNix([], false, depth); } result ~= "\t".replicate(depth) ~ "];"; res ~= result.join("\n"); } - else if (value.length == 1) { + else if (value.length == 1) + { res ~= "[" ~ value[0].toNix([], false, depth) ~ "]"; } - else { + else + { res ~= "[]"; } } @@ -226,23 +261,31 @@ import std.ascii : isUpper; @("toNix") unittest { + void assertToNix(T)(T input, string expected) + { + auto actual = toNix(input); + assert(actual == expected, fmt("For input '%s', expected '%s', but got '%s'", input, expected, actual)); + } + struct TestStruct { int a; string b; bool c; } - assert(toNix(TestStruct(1, "hello", true)) == "{\n\ta = 1;\n\tb = \"hello\";\n\tc = true;\n}"); - assert(toNix(true) == "true"); - assert(toNix("hello") == "\"hello\""); - assert(toNix(1) == "1"); + + assertToNix(TestStruct(1, "hello", true), "{\n\ta = 1;\n\tb = \"hello\";\n\tc = true;\n}"); + assertToNix(true, "true"); + assertToNix("hello", "\"hello\""); + assertToNix(1, "1"); struct TestStruct2 { int a; TestStruct b; } - assert(toNix(TestStruct2(1, TestStruct(2, "hello", false))) == "{\n\ta = 1;\n\tb = {\n\t\ta = 2;\n\t\tb = \"hello\";\n\t\tc = false;\n\t};\n}"); + + assertToNix(TestStruct2(1, TestStruct(2, "hello", false)), "{\n\ta = 1;\n\tb = {\n\t\ta = 2;\n\t\tb = \"hello\";\n\t\tc = false;\n\t};\n}"); } @("nix.run") @@ -255,8 +298,8 @@ unittest auto p = __FILE__.absolutePath.dirName; - string output = nix().run(p ~ "/test/test.nix", [ "--file"]); - assert(output == "Hello World", "Expected 'Hello World', got: " ~ output); + string output = nix().run(p ~ "/test/test.nix", ["--file"]); + assert(output == "Hello World", "Expected 'Hello World', but got '" ~ output ~ "'"); } @("nix.build!JSONValue") @@ -273,7 +316,9 @@ unittest JSONValue output = nix().build!JSONValue(p ~ "/test/test.nix", ["--file"]); assert(output.type == JSONType.array, "Expected an array, got: " ~ output.type.to!string); output = output.array.front; - assert(execute([output["outputs"]["out"].str ~ "/bin/helloWorld"]).strip == "Hello World"); + auto actual = execute([output["outputs"]["out"].str ~ "/bin/helloWorld"]).strip; + auto expected = "Hello World"; + assert(actual == expected, fmt("Expected %s, but got %s", expected, actual)); } @("nix.eval!JSONValue") @@ -287,5 +332,6 @@ unittest auto expectedOutputFile = inputFile.setExtension("json"); auto output = nix().eval!JSONValue(inputFile, ["--file"]); - assert(output == expectedOutputFile.readText.parseJSON); + assert(output == expectedOutputFile.readText.parseJSON, "Expected " ~ expectedOutputFile.readText ~ ", but got " ~ output + .toString()); } diff --git a/packages/mcl/src/src/mcl/utils/number.d b/packages/mcl/src/src/mcl/utils/number.d index 178864e7..61b18ce6 100644 --- a/packages/mcl/src/src/mcl/utils/number.d +++ b/packages/mcl/src/src/mcl/utils/number.d @@ -11,10 +11,11 @@ string humanReadableSize(T)(T size) if (isNumeric!T) auto sizes = [" B", " KB", " MB", " GB", " TB", " PB", " EB", " ZB", " YB"]; - for (size_t i = 1; i < sizes.length; i++) { + for (size_t i = 1; i < sizes.length; i++) + { if (size < pow(1024, i)) return (round(cast(real)(size / pow( - 1024, i - 1)))).to!string ~ sizes[i - 1]; + 1024, i - 1)))).to!string ~ sizes[i - 1]; } return size.to!string ~ "B"; } diff --git a/packages/mcl/src/src/mcl/utils/path.d b/packages/mcl/src/src/mcl/utils/path.d index 89b5bb4e..29d53ae7 100644 --- a/packages/mcl/src/src/mcl/utils/path.d +++ b/packages/mcl/src/src/mcl/utils/path.d @@ -3,6 +3,7 @@ module mcl.utils.path; import std.process : execute; import std.string : strip; import std.file : mkdirRecurse, rmdir, exists; +import std.format : format; import std.path : buildNormalizedPath, absolutePath; immutable string rootDir, resultDir, gcRootsDir; @@ -32,33 +33,32 @@ string getTopLevel() @("rootDir") unittest { - assert(rootDir == getTopLevel()); + assert(rootDir == getTopLevel, "Expected rootDir to return %s, got %s".format(getTopLevel(), rootDir)); } @("resultDir") unittest { - assert(resultDir == rootDir.buildNormalizedPath(".result")); + auto expected = rootDir.buildNormalizedPath(".result"); + assert(resultDir == expected, "Expected resultDir to return %s, got %s".format(expected, resultDir)); } @("gcRootsDir") unittest { - assert(gcRootsDir == resultDir.buildNormalizedPath("gc-roots")); + auto expected = resultDir.buildNormalizedPath("gc-roots/"); + assert(gcRootsDir == expected, "Expected gcRootsDir to return %s, got %s".format(expected, gcRootsDir)); } -void createResultDirs() -{ - mkdirRecurse(gcRootsDir); -} +void createResultDirs() => mkdirRecurse(gcRootsDir); @("createResultDirs") unittest { createResultDirs(); - assert(gcRootsDir.exists); + assert(gcRootsDir.exists, "Expected gcRootsDir to exist, but it doesn't"); // rmdir(gcRootsDir()); // rmdir(resultDir()); - // assert(!gcRootsDir.exists); + // assert(!gcRootsDir.exists, "Expected gcRootsDir to not exist, but it does"); } diff --git a/packages/mcl/src/src/mcl/utils/process.d b/packages/mcl/src/src/mcl/utils/process.d index 9d54987d..3910c9bc 100644 --- a/packages/mcl/src/src/mcl/utils/process.d +++ b/packages/mcl/src/src/mcl/utils/process.d @@ -4,16 +4,20 @@ import std.process : ProcessPipes; import std.string : split, strip; import core.sys.posix.unistd : geteuid; import std.json : JSONValue, parseJSON; +import std.format : format; bool isRoot() => geteuid() == 0; string bold(string s) => "\033[1m" ~ s ~ "\033[0m"; -T execute(T = string)(string args, bool printCommand = true, bool returnErr = false) if (is(T == string) || is(T == ProcessPipes) || is(T == JSONValue)) +T execute(T = string)(string args, bool printCommand = true, bool returnErr = false) + if (is(T == string) || is(T == ProcessPipes) || is(T == JSONValue)) { return execute!T(args.split(" "), printCommand, returnErr); } -T execute(T = string)(string[] args, bool printCommand = true, bool returnErr = false) if (is(T == string) || is(T == ProcessPipes) || is(T == JSONValue)) + +T execute(T = string)(string[] args, bool printCommand = true, bool returnErr = false) + if (is(T == string) || is(T == ProcessPipes) || is(T == JSONValue)) { import std.exception : enforce; import std.format : format; @@ -55,7 +59,8 @@ T execute(T = string)(string[] args, bool printCommand = true, bool returnErr = output = stderr; } - static if (is(T == string)) { + static if (is(T == string)) + { return output.strip; } else @@ -70,8 +75,11 @@ unittest { import std.exception : assertThrown; - assert(execute(["echo", "hello"]) == "hello"); - assert(execute(["true"]) == ""); + auto actual = execute(["echo", "hello"]); + assert(actual == "hello", format("Expected '%s', but got '%s'", "hello", actual)); + + actual = execute(["true"]); + assert(actual == "", format("Expected '%s', but got '%s'", "", actual)); // assertThrown(execute(["false"]), "Command `false` failed with status 1"); } @@ -84,7 +92,6 @@ void spawnProcessInline(string[] args) const bold = "\033[1m"; const normal = "\033[0m"; - tracef("$ %s%-(%s %)%s", bold, args, normal); auto pid = spawnProcess(args); diff --git a/packages/mcl/src/src/mcl/utils/string.d b/packages/mcl/src/src/mcl/utils/string.d index 31fb50df..6e8e60cc 100644 --- a/packages/mcl/src/src/mcl/utils/string.d +++ b/packages/mcl/src/src/mcl/utils/string.d @@ -3,6 +3,7 @@ module mcl.utils.string; import mcl.utils.test; import std.conv : to; import std.exception : assertThrown; +import std.format : format; string lowerCaseFirst(string r) { @@ -30,22 +31,25 @@ string camelCaseToCapitalCase(string camelCase) @("camelCaseToCapitalCase") unittest { - assert(camelCaseToCapitalCase("camelCase") == "CAMEL_CASE"); - - assert(camelCaseToCapitalCase("") == ""); - assert(camelCaseToCapitalCase("_") == "_"); - assert(camelCaseToCapitalCase("a") == "A"); - assert(camelCaseToCapitalCase("ab") == "AB"); - assert(camelCaseToCapitalCase("aB") == "A_B"); - assert(camelCaseToCapitalCase("aBc") == "A_BC"); - assert(camelCaseToCapitalCase("aBC") == "A_BC"); - assert(camelCaseToCapitalCase("aBCD") == "A_BCD"); - assert(camelCaseToCapitalCase("aBcD") == "A_BC_D"); - - assert(camelCaseToCapitalCase("rpcUrl") == "RPC_URL"); - assert(camelCaseToCapitalCase("parsedJSON") == "PARSED_JSON"); - assert(camelCaseToCapitalCase("fromXmlToJson") == "FROM_XML_TO_JSON"); - assert(camelCaseToCapitalCase("fromXML2JSON") == "FROM_XML2JSON"); + void assertCCToCC(string input, string expected) { + auto actual = camelCaseToCapitalCase(input); + assert(actual == expected, format("For input '%s', expected '%s', but got '%s'", input, expected, actual)); + } + + assertCCToCC("camelCase", "CAMEL_CASE"); + assertCCToCC("", ""); + assertCCToCC("_", "_"); + assertCCToCC("a", "A"); + assertCCToCC("ab", "AB"); + assertCCToCC("aB", "A_B"); + assertCCToCC("aBc", "A_BC"); + assertCCToCC("aBC", "A_BC"); + assertCCToCC("aBCD", "A_BCD"); + assertCCToCC("aBcD", "A_BC_D"); + assertCCToCC("rpcUrl", "RPC_URL"); + assertCCToCC("parsedJSON", "PARSED_JSON"); + assertCCToCC("fromXmlToJson", "FROM_XML_TO_JSON"); + assertCCToCC("fromXML2JSON", "FROM_XML2JSON"); } string kebabCaseToCamelCase(string kebabCase) @@ -65,23 +69,30 @@ string kebabCaseToCamelCase(string kebabCase) @("kebabCaseToCamelCase") unittest { - assert(kebabCaseToCamelCase("kebab-case") == "kebabCase"); - assert(kebabCaseToCamelCase("kebab-case-") == "kebabCase"); - assert(kebabCaseToCamelCase("kebab-case--") == "kebabCase"); - assert(kebabCaseToCamelCase("kebab-case--a") == "kebabCaseA"); - assert(kebabCaseToCamelCase("kebab-case--a-") == "kebabCaseA"); - - assert(kebabCaseToCamelCase( - "once-upon-a-midnight-dreary-while-i-pondered-weak-and-weary" ~ - "-over-many-a-quaint-and-curious-volume-of-forgotten-lore" ~ - "-while-i-nodded-nearly-napping-suddenly-there-came-a-tapping" ~ - "-as-of-someone-gently-rapping-rapping-at-my-chamber-door" ~ - "-tis-some-visitor-i-muttered-tapping-at-my-chamber-door" ~ - "-only-this-and-nothing-more") == "onceUponAMidnightDrearyWhileIPonderedWeakAndWeary" ~ - "OverManyAQuaintAndCuriousVolumeOfForgottenLore" ~ "WhileINoddedNearlyNappingSuddenlyThereCameATapping" ~ - "AsOfSomeoneGentlyRappingRappingAtMyChamberDoor" ~ - "TisSomeVisitorIMutteredTappingAtMyChamberDoor" ~ - "OnlyThisAndNothingMore"); + void assertKCToCC(string input, string expected) { + auto actual = kebabCaseToCamelCase(input); + assert(actual == expected, format("For input '%s', expected '%s', but got '%s'", input, expected, actual)); + } + + assertKCToCC("kebab-case", "kebabCase"); + assertKCToCC("kebab-case-", "kebabCase"); + assertKCToCC("kebab-case--", "kebabCase"); + assertKCToCC("kebab-case--a", "kebabCaseA"); + assertKCToCC("kebab-case--a-", "kebabCaseA"); + + assertKCToCC( + "once-upon-a-midnight-dreary-while-i-pondered-weak-and-weary" ~ + "-over-many-a-quaint-and-curious-volume-of-forgotten-lore" ~ + "-while-i-nodded-nearly-napping-suddenly-there-came-a-tapping" ~ + "-as-of-someone-gently-rapping-rapping-at-my-chamber-door" ~ + "-tis-some-visitor-i-muttered-tapping-at-my-chamber-door" ~ + "-only-this-and-nothing-more", + "onceUponAMidnightDrearyWhileIPonderedWeakAndWeary" ~ + "OverManyAQuaintAndCuriousVolumeOfForgottenLore" ~ "WhileINoddedNearlyNappingSuddenlyThereCameATapping" ~ + "AsOfSomeoneGentlyRappingRappingAtMyChamberDoor" ~ + "TisSomeVisitorIMutteredTappingAtMyChamberDoor" ~ + "OnlyThisAndNothingMore" + ); } @@ -98,7 +109,7 @@ string enumToString(E)(in E value) if (is(E == enum)) { static foreach (enumMember; EnumMembers!E) { - case enumMember: + case enumMember: { static if (!hasUDA!(enumMember, StringRepresentation)) { @@ -118,6 +129,11 @@ string enumToString(E)(in E value) if (is(E == enum)) @("enumToString") unittest { + void assertEnumToString(T)(T input, string expected) if (is(T == enum)) { + auto actual = enumToString(input); + assert(actual == expected, format("For input '%s', expected '%s', but got '%s'", input, expected, actual)); + } + enum TestEnum { a1, @@ -125,9 +141,9 @@ unittest c3 } - assert(enumToString(TestEnum.a1) == "a1"); - assert(enumToString(TestEnum.b2) == "b2"); - assert(enumToString(TestEnum.c3) == "c3"); + assertEnumToString(TestEnum.a1, "a1"); + assertEnumToString(TestEnum.b2, "b2"); + assertEnumToString(TestEnum.c3, "c3"); enum TestEnumWithRepr { @@ -136,13 +152,12 @@ unittest @StringRepresentation("field-3") c } - assert(enumToString(TestEnumWithRepr.a) == "field1"); - assert(enumToString(TestEnumWithRepr.b) == "field_2"); - assert(enumToString(TestEnumWithRepr.c) == "field-3"); + assertEnumToString(TestEnumWithRepr.a, "field1"); + assertEnumToString(TestEnumWithRepr.b, "field_2"); + assertEnumToString(TestEnumWithRepr.c, "field-3"); } -enum size_t getMaxEnumMemberNameLength(E) = () -{ +enum size_t getMaxEnumMemberNameLength(E) = () { import std.traits : EnumMembers; size_t max = 0; @@ -165,7 +180,7 @@ unittest @StringRepresentation("c123") c } - static assert(getMaxEnumMemberNameLength!EnumLen == 4); + static assert(getMaxEnumMemberNameLength!EnumLen == 4, "getMaxEnumMemberNameLength should return 4 for EnumLen"); } struct MaxWidth @@ -184,28 +199,30 @@ void writeRecordAsTable(bool ansiColors = true, T, Writer)(in T obj, auto ref Wr w.formattedWrite("│"); static foreach (idx, field; T.tupleof) - {{ - // If the field is an enum, get the maximum length of the enum member names - static if (is(typeof(field) == enum)) - const width = getMaxEnumMemberNameLength!(typeof(field)); - - // If the field is a bool, set the width to "false".length - else static if (is(typeof(field) : bool)) - const width = 5; - - // If the field has a UDA MaxWidth, set the width to the value of the UDA - else static if (hasUDA!(field, MaxWidth)) - const width = getUDAs!(field, MaxWidth)[0].value; - - else - const width = 0; - - w.formattedWrite( - " %s%s%s: %s%*-s%s │", - gray, __traits(identifier, field), normal, - bold, width, obj.tupleof[idx], normal - ); - }} + { + { + // If the field is an enum, get the maximum length of the enum member names + static if (is(typeof(field) == enum)) + const width = getMaxEnumMemberNameLength!(typeof(field)); + + // If the field is a bool, set the width to "false".length + else static if (is(typeof(field) : bool)) + const width = 5; + + // If the field has a UDA MaxWidth, set the width to the value of the UDA + else static if (hasUDA!(field, MaxWidth)) + const width = getUDAs!(field, MaxWidth)[0].value; + + else + const width = 0; + + w.formattedWrite( + " %s%s%s: %s%*-s%s │", + gray, __traits(identifier, field), normal, + bold, width, obj.tupleof[idx], normal + ); + } + } w.formattedWrite("\n"); } @@ -226,5 +243,6 @@ unittest const t = TestStruct(1, 20, true, false, "test"); auto result = appender!string; t.writeRecordAsTable!false(result); - assert(result.data == "│ num: 1 │ otherNum: 20 │ bool1: true │ bool2: false │ someString: test │\n"); + assert( + result.data == "│ num: 1 │ otherNum: 20 │ bool1: true │ bool2: false │ someString: test │\n"); } diff --git a/packages/mcl/src/src/mcl/utils/test/disable_logging.d b/packages/mcl/src/src/mcl/utils/test/disable_logging.d index 6863cffa..f06edbfd 100644 --- a/packages/mcl/src/src/mcl/utils/test/disable_logging.d +++ b/packages/mcl/src/src/mcl/utils/test/disable_logging.d @@ -5,6 +5,7 @@ version (unittest) shared static this() { import std.logger : sharedLog, LogLevel, NullLogger; + sharedLog = cast(shared NullLogger) new NullLogger(LogLevel.all); } } diff --git a/packages/mcl/src/src/mcl/utils/test/eval.json b/packages/mcl/src/src/mcl/utils/test/eval.json index 7c07f315..7bd4301c 100644 --- a/packages/mcl/src/src/mcl/utils/test/eval.json +++ b/packages/mcl/src/src/mcl/utils/test/eval.json @@ -1,33 +1,34 @@ { - "$schema": "../../books-schema.json", - "sources": [ - "https://middle-earth.xenite.org/what-are-the-various-books-named-in-the-lord-of-the-rings/" - ], - "books": [{ - "title": "The Red Book of Westmarch", - "author": "Bilbo/Frodo/Sam", - "category": [] - }, - { - "title": "Herblore of the Shire ", - "author": "Meriadoc Brandybuck, Master of Buckland", - "category": [] - }, - { - "title": "Reckoning of Years", - "author": "Meriadoc Brandybuck, Master of Buckland", - "category": [ - "Scholarly" - ] - }, - { - "title": "The Tale of Years", - "category": [] - }, - { - "title": "The Book of Mazarbul", - "author": "Ori", - "category": [] - } - ] -} \ No newline at end of file + "$schema": "../../books-schema.json", + "sources": [ + "https://middle-earth.xenite.org/what-are-the-various-books-named-in-the-lord-of-the-rings/" + ], + "books": [ + { + "title": "The Red Book of Westmarch", + "author": "Bilbo/Frodo/Sam", + "category": [] + }, + { + "title": "Herblore of the Shire ", + "author": "Meriadoc Brandybuck, Master of Buckland", + "category": [] + }, + { + "title": "Reckoning of Years", + "author": "Meriadoc Brandybuck, Master of Buckland", + "category": [ + "Scholarly" + ] + }, + { + "title": "The Tale of Years", + "category": [] + }, + { + "title": "The Book of Mazarbul", + "author": "Ori", + "category": [] + } + ] +} diff --git a/packages/mcl/src/src/mcl/utils/test/nix/shard-matrix-no-shards/flake.nix b/packages/mcl/src/src/mcl/utils/test/nix/shard-matrix-no-shards/flake.nix index 14739010..f8cc757a 100644 --- a/packages/mcl/src/src/mcl/utils/test/nix/shard-matrix-no-shards/flake.nix +++ b/packages/mcl/src/src/mcl/utils/test/nix/shard-matrix-no-shards/flake.nix @@ -1,7 +1,4 @@ { - outputs = - { ... }: - { - - }; + outputs = {...}: { + }; } diff --git a/packages/mcl/src/src/mcl/utils/user_info.d b/packages/mcl/src/src/mcl/utils/user_info.d index c3e50cbc..d9c51b3e 100644 --- a/packages/mcl/src/src/mcl/utils/user_info.d +++ b/packages/mcl/src/src/mcl/utils/user_info.d @@ -2,9 +2,7 @@ module mcl.utils.user_info; import std.typecons : Nullable, nullable; -version (Posix): - -import core.sys.posix.sys.types : gid_t; +version (Posix) : import core.sys.posix.sys.types : gid_t; import core.sys.posix.unistd : getgid; import core.sys.posix.grp : getgrnam; @@ -36,7 +34,6 @@ unittest } } - Nullable!gid_t getGroupId(in char[] groupName) { import std.internal.cstring : tempCString; diff --git a/packages/mcl/test.sh b/packages/mcl/test.sh new file mode 100755 index 00000000..2781f23a --- /dev/null +++ b/packages/mcl/test.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env sh +#export LD_DEBUG=all +export LD_LIBRARY_PATH=$(nix eval --raw nixpkgs#curl.out.outPath)/lib:$LD_LIBRARY_PATH +dub test --build="unittest" --compiler ldc2 -- -e 'coda|fetchJson' diff --git a/shells/default.nix b/shells/default.nix index 92346449..6bbf8320 100644 --- a/shells/default.nix +++ b/shells/default.nix @@ -26,6 +26,8 @@ in inputs'.dlang-nix.packages.dmd inputs'.dlang-nix.packages.dub act + curl + pkg-config ]; shellHook = ''