Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

koboldai: init #3

Merged
merged 4 commits into from
Feb 25, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions AUTHORS
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
MAINTAINERS:

Matthew Croughan <[email protected]>
Max Headroom <[email protected]>
21 changes: 9 additions & 12 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,22 +1,19 @@
# nixified.ai

The goal of nixified.ai is to simplify and make available a large repository of
AI executable code, that would otherwise be impractical to run yourself, due to
package management issues.
AI executable code that would otherwise be impractical to run yourself, due to
package management and complexity issues.

The main outputs of the `flake.nix` at the moment are as follows:

###### Linux
These outputs will run on Windows via [NixOS-WSL](https://github.com/nix-community/NixOS-WSL). It is able to utilize the GPU of the Windows host automatically, as our wrapper script sets `LD_LIBRARY_PATH` to make use of the host drivers.

- `.#invokeai-amd`
- `.#invokeai-nvidia`
##### KoboldAI ( A WebUI for GPT Writing )

###### Windows
- `nix run .#koboldai-amd`
- `nix run .#koboldai-nvidia`

These outputs will run on Windows via [NixOS-WSL](https://github.com/nix-community/NixOS-WSL)
##### InvokeAI ( A Stable Diffusion WebUI )

- `.#invokeai-amd-wsl`
- `.#invokeai-nvidia-wsl`

They can be ran using `nix run`, such as `nix run .#invokeai-nvidia-wsl` on a
Windows machine via the WSL, or `nix run .#invokeai-nvidia` on a Linux host.
- `nix run .#invokeai-amd`
- `nix run .#invokeai-nvidia`
18 changes: 18 additions & 0 deletions flake.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 5 additions & 0 deletions flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,10 @@
url = "github:invoke-ai/InvokeAI/v2.2.5";
flake = false;
};
koboldai-src = {
url = "github:koboldai/koboldai-client/1.19.2";
flake = false;
};
flake-parts = {
url = "github:hercules-ci/flake-parts";
inputs.nixpkgs-lib.follows = "nixpkgs";
Expand All @@ -28,6 +32,7 @@
./modules/dependency-sets
./modules/aipython3
./projects/invokeai
./projects/koboldai
];
};
}
1 change: 1 addition & 0 deletions modules/aipython3/overlays.nix
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ pkgs: {
callPackage = final.callPackage;
rmCallPackage = path: args: rm (callPackage path args);
in {
apispec-webframeworks = callPackage ../../packages/apispec-webframeworks { };
pydeprecate = callPackage ../../packages/pydeprecate { };
taming-transformers-rom1504 =
callPackage ../../packages/taming-transformers-rom1504 { };
Expand Down
36 changes: 36 additions & 0 deletions packages/apispec-webframeworks/default.nix
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
{ lib, python3Packages }:

python3Packages.buildPythonPackage rec {
pname = "apispec-webframeworks";
version = "0.5.2";
disabled = python3Packages.pythonOlder "3.6";

src = python3Packages.fetchPypi {
inherit pname version;
hash = "sha256-DbNbJnkUs/jFYqygJhlX28tBdvJV6swiUgJ3AQgY3PM=";
};

propagatedBuildInputs = with python3Packages; [
apispec
packaging
];

nativeCheckInputs = with python3Packages; [
pytestCheckHook
mock
flask
tornado
bottle
];

doCheck = false;

pythonImportsCheck = [ "apispec_webframeworks" ];

meta = with lib; {
description = "apispec plugin for integrating with various web frameworks";
homepage = "https://github.com/marshmallow-code/apispec-webframeworks";
license = licenses.mit;
maintainers = [ maintainers.sikmir ];
};
}
8 changes: 0 additions & 8 deletions projects/invokeai/default.nix
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,6 @@
invokeai-nvidia = mkInvokeAIVariant {
aipython3 = aipython3-nvidia;
};
invokeai-amd-wsl = mkInvokeAIVariant {
aipython3 = aipython3-amd;
wsl = true;
};
invokeai-nvidia-wsl = mkInvokeAIVariant {
aipython3 = aipython3-nvidia;
wsl = true;
};
};
};
}
15 changes: 12 additions & 3 deletions projects/invokeai/package.nix
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
# misc
, lib
, src
# configuration
, wsl ? false
}:

let
Expand Down Expand Up @@ -60,8 +58,19 @@ aipython3.buildPythonPackage {
nativeBuildInputs = [ aipython3.pythonRelaxDepsHook ];
pythonRemoveDeps = [ "clip" "pyreadline3" "flaskwebgui" ];
pythonRelaxDeps = [ "protobuf" ];
makeWrapperArgs = [
'' --run '
if [ -d "/usr/lib/wsl/lib" ]
then
echo "Running via WSL (Windows Subsystem for Linux), setting LD_LIBRARY_PATH=/usr/lib/wsl/lib"
set -x
export LD_LIBRARY_PATH="/usr/lib/wsl/lib"
set +x
fi
'
''
];
postFixup = ''
${lib.optionalString wsl "makeWrapperArgs+=( --set LD_LIBRARY_PATH '/usr/lib/wsl/lib' )"}
chmod +x $out/bin/*
wrapPythonPrograms
'';
Expand Down
20 changes: 20 additions & 0 deletions projects/koboldai/default.nix
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
{ inputs, lib, ... }:

{
perSystem = { config, pkgs, ... }: let
inherit (config.dependencySets) aipython3-amd aipython3-nvidia;

src = inputs.koboldai-src;

mkKoboldAIVariant = args: pkgs.callPackage ./package.nix ({ inherit src; } // args);
in {
packages = {
koboldai-nvidia = mkKoboldAIVariant {
aipython3 = aipython3-nvidia;
};
koboldai-amd = mkKoboldAIVariant {
aipython3 = aipython3-amd;
};
};
};
}
109 changes: 109 additions & 0 deletions projects/koboldai/package.nix
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
{ aipython3
, lib
, src
, wsl ? false
, fetchFromGitHub
, writeShellScriptBin
, runCommand
, tmpDir ? "/tmp/nix-koboldai"
, stateDir ? "$HOME/.koboldai/state"
}:
let
overrides = {
transformers = aipython3.transformers.overrideAttrs (old: rec {
propagatedBuildInputs = old.propagatedBuildInputs ++ [ aipython3.huggingface-hub ];
pname = "transformers";
version = "4.24.0";
src = fetchFromGitHub {
owner = "huggingface";
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-aGtTey+QK12URZcGNaRAlcaOphON4ViZOGdigtXU1g0=";
};
});
bleach = aipython3.bleach.overrideAttrs (old: rec {
pname = "bleach";
version = "4.1.0";
src = fetchFromGitHub {
owner = "mozilla";
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-YuvH8FvZBqSYRt7ScKfuTZMsljJQlhFR+3tg7kABF0Y=";
};
});
};
# The original kobold-ai program wants to write models settings and user
# scripts to the current working directory, but tries to write to the
# /nix/store erroneously due to mismanagement of the current working
# directory in its source code. The patching below replicates the original
# functionality of the program by making symlinks in the source code
# directory that point to ${tmpDir}
#
# The wrapper script we have made for the program will then create another
# symlink that points to ${stateDir}, ultimately the default symlink trail
# looks like the following
#
# /nix/store/kobold-ai/models -> /tmp/nix-koboldai -> ~/.koboldai/state
patchedSrc = runCommand "koboldAi-patchedSrc" {} ''
cp -r --no-preserve=mode ${src} ./src
cd src
rm -rf models settings userscripts
cd -
substituteInPlace ./src/aiserver.py --replace 'os.system("")' 'STATE_DIR = os.path.expandvars("${stateDir}")'
substituteInPlace ./src/aiserver.py --replace 'cache_dir="cache"' "cache_dir=os.path.join(STATE_DIR, 'cache')"
substituteInPlace ./src/aiserver.py --replace 'shutil.rmtree("cache/")' 'shutil.rmtree(os.path.join(STATE_DIR, "cache"))'
substituteInPlace ./src/aiserver.py --replace "app.config['SESSION_TYPE'] = 'filesystem'" "app.config['SESSION_TYPE'] = 'memcached'"
mv ./src $out
ln -s ${tmpDir}/models/ $out/models
ln -s ${tmpDir}/settings/ $out/settings
ln -s ${tmpDir}/userscripts/ $out/userscripts
'';
koboldPython = aipython3.python.withPackages (_: with aipython3; [
overrides.bleach
overrides.transformers
colorama
flask
flask-socketio
flask-session
eventlet
dnspython
markdown
sentencepiece
protobuf
marshmallow
loguru
termcolor
psutil
torch-bin
torchvision-bin
apispec
apispec-webframeworks
lupa
memcached
]);
in
(writeShellScriptBin "koboldai" ''
if [ -d "/usr/lib/wsl/lib" ]
then
echo "Running via WSL (Windows Subsystem for Linux), setting LD_LIBRARY_PATH"
set -x
export LD_LIBRARY_PATH="/usr/lib/wsl/lib"
set +x
fi
rm -rf ${tmpDir}
mkdir -p ${tmpDir}
mkdir -p ${stateDir}/models ${stateDir}/cache ${stateDir}/settings ${stateDir}/userscripts
ln -s ${stateDir}/models/ ${tmpDir}/models
ln -s ${stateDir}/settings/ ${tmpDir}/settings
ln -s ${stateDir}/userscripts/ ${tmpDir}/userscripts
${koboldPython}/bin/python ${patchedSrc}/aiserver.py $@
'').overrideAttrs
(_: {
meta = {
maintainers = [ lib.maintainers.matthewcroughan ];
license = lib.licenses.agpl3;
description = "browser-based front-end for AI-assisted writing with multiple local & remote AI models";
homepage = "https://github.com/KoboldAI/KoboldAI-Client";
mainProgram = "koboldai";
};
})