56 lines
2 KiB
Nix
56 lines
2 KiB
Nix
|
|
{
|
||
|
|
den.aspects = {
|
||
|
|
kirakira = {
|
||
|
|
nixos =
|
||
|
|
{ pkgs, ... }:
|
||
|
|
{
|
||
|
|
# ----- overrides
|
||
|
|
nixpkgs.config = {
|
||
|
|
packageOverrides = pkgs: {
|
||
|
|
llama-cpp =
|
||
|
|
(pkgs.llama-cpp.override {
|
||
|
|
rocmSupport = true;
|
||
|
|
rocmGpuTargets = [ "gfx1031" ];
|
||
|
|
blasSupport = true;
|
||
|
|
cudaSupport = false;
|
||
|
|
metalSupport = false;
|
||
|
|
}).overrideAttrs
|
||
|
|
(oldAttrs: rec {
|
||
|
|
version = "7205";
|
||
|
|
src = pkgs.fetchFromGitHub {
|
||
|
|
owner = "ggml-org";
|
||
|
|
repo = "llama.cpp";
|
||
|
|
tag = "b${version}";
|
||
|
|
hash = "sha256-1CcYbc8RWAPVz8hoxKEmbAgQesC1oGFZ3fhfuU5vmOc=";
|
||
|
|
leaveDotGit = true;
|
||
|
|
postFetch = ''
|
||
|
|
git -C "$out" rev-parse --short HEAD > $out/COMMIT
|
||
|
|
find "$out" -name .git -print0 | xargs -0 rm -rf
|
||
|
|
'';
|
||
|
|
};
|
||
|
|
cmakeFlags = (oldAttrs.cmakeFlags or []) ++ [
|
||
|
|
"-DGGML_NATIVE=ON"
|
||
|
|
];
|
||
|
|
preConfigure = ''
|
||
|
|
export NIX_ENFORCE_NO_NATIVE=0
|
||
|
|
${oldAttrs.preConfigure or ""}
|
||
|
|
'';
|
||
|
|
});
|
||
|
|
|
||
|
|
# llama-swap from GitHub releases
|
||
|
|
llama-swap = pkgs.runCommand "llama-swap" { } ''
|
||
|
|
mkdir -p $out/bin
|
||
|
|
tar -xzf ${
|
||
|
|
pkgs.fetchurl {
|
||
|
|
url = "https://github.com/mostlygeek/llama-swap/releases/download/v175/llama-swap_175_linux_amd64.tar.gz";
|
||
|
|
hash = "sha256-zeyVz0ldMxV4HKK+u5TtAozfRI6IJmeBo92IJTgkGrQ=";
|
||
|
|
}
|
||
|
|
} -C $out/bin
|
||
|
|
chmod +x $out/bin/llama-swap
|
||
|
|
'';
|
||
|
|
};
|
||
|
|
};
|
||
|
|
};
|
||
|
|
};
|
||
|
|
};
|
||
|
|
}
|