Skip to content

Commit

Permalink
build(nix): Package python scripts independently of main derivation
Browse files Browse the repository at this point in the history
  • Loading branch information
ditsuke committed Feb 26, 2024
1 parent 48ab2fd commit ad57699
Show file tree
Hide file tree
Showing 14 changed files with 300 additions and 265 deletions.
3 changes: 1 addition & 2 deletions .devops/nix/package.nix
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
useRocm ? config.rocmSupport,
useVulkan ? false,
llamaVersion ? "0.0.0", # Arbitrary version, substituted by the flake
gguf-py,
}@inputs:

let
Expand Down Expand Up @@ -151,7 +150,7 @@ effectiveStdenv.mkDerivation (finalAttrs: {
# TODO: Package up each Python script or service appropriately.
# If we were to migrate to buildPythonPackage and prepare the `pyproject.toml`,
# we could make those *.py into setuptools' entrypoints
substituteInPlace ./*.py --replace "/usr/bin/env python" "${llama-python-base-with-gguf}/bin/python"
# substituteInPlace ./*.py --replace "/usr/bin/env python" "${llama-python-base-with-gguf}/bin/python"
'';

nativeBuildInputs =
Expand Down
24 changes: 24 additions & 0 deletions .devops/nix/python-scripts.nix
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
buildPythonPackage,
poetry-core,
breakpointHook,
python3Packages,
gguf-py
}@inputs:

buildPythonPackage {
pname = "llama-scripts";
src = ../../.;
version = "0.0.0";
pyproject = true;
nativeBuildInputs = [ poetry-core ];
projectDir = ../../.;
propagatedBuildInputs = with python3Packages; [
numpy
sentencepiece
transformers
protobuf
torchWithoutCuda
gguf-py
];
}
5 changes: 4 additions & 1 deletion .devops/nix/scope.nix
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
newScope,
python3,
llamaVersion ? "0.0.0",
poetry2nix,
}:

let
Expand All @@ -19,7 +20,6 @@ in

lib.makeScope newScope (self: {
inherit llamaVersion;
pp = python3.pkgs;
gguf-py = self.callPackage ./package-gguf-py.nix {
inherit
buildPythonPackage
Expand All @@ -28,6 +28,9 @@ lib.makeScope newScope (self: {
pytestCheckHook
;
};
python-scripts = self.callPackage ./python-scripts.nix {
inherit buildPythonPackage poetry-core poetry2nix;
};
llama-cpp = self.callPackage ./package.nix { };
docker = self.callPackage ./docker.nix { };
docker-min = self.callPackage ./docker.nix { interactive = false; };
Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion convert-hf-to-gguf.py → bin/convert_hf_to_gguf.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
sys.path.insert(1, str(Path(__file__).parent / 'gguf-py'))
import gguf

from convert import HfVocab
from bin.convert import HfVocab


###### MODEL DEFINITIONS ######
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,7 @@ def add_tensors(self, gguf_writer):


def handle_metadata(cfg, hp):
import convert
import bin.convert as convert
assert cfg.model_metadata_dir.is_dir(), 'Metadata dir is not a directory'
hf_config_path = cfg.model_metadata_dir / "config.json"
orig_config_path = cfg.model_metadata_dir / "params.json"
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
21 changes: 17 additions & 4 deletions flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -109,11 +109,14 @@
# Cf. https://nixos.org/manual/nix/unstable/command-ref/new-cli/nix3-flake.html?highlight=flake#flake-format
flake.overlays.default = (
final: prev: {
llamaPackages = final.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
llamaPackages = final.callPackage .devops/nix/scope.nix {
inherit llamaVersion;
};
inherit (final.llamaPackages) llama-cpp;
}
);

debug = true;
systems = [
"aarch64-darwin"
"aarch64-linux"
Expand All @@ -132,6 +135,7 @@
...
}:
{
debug = true;
# Unlike `.#packages`, legacyPackages may contain values of
# arbitrary types (including nested attrsets) and may even throw
# exceptions. This attribute isn't recursed into by `nix flake
Expand All @@ -141,9 +145,18 @@
# access them as `nix build .#llamaPackages.${scriptName}` using
# the same path you would with an overlay.
legacyPackages = {
llamaPackages = pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
llamaPackagesCuda = pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
llamaPackagesRocm = pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
llamaPackages = pkgs.callPackage .devops/nix/scope.nix {
inherit llamaVersion;
poetry2nix = import inputs.poetry2nix { pkgs = pkgs; };
};
llamaPackagesCuda = pkgsCuda.callPackage .devops/nix/scope.nix {
inherit llamaVersion;
poetry2nix = import inputs.poetry2nix { pkgs = pkgs; };
};
llamaPackagesRocm = pkgsRocm.callPackage .devops/nix/scope.nix {
inherit llamaVersion;
poetry2nix = import inputs.poetry2nix { pkgs = pkgs; };
};
};

# We don't use the overlay here so as to avoid making too many instances of nixpkgs,
Expand Down
166 changes: 166 additions & 0 deletions gguf-py/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions gguf-py/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ version = "0.7.0"
description = "Read and write ML models in GGUF for GGML"
authors = ["GGML <[email protected]>"]
packages = [
{include = "gguf"},
{include = "gguf/py.typed"},
{include = "scripts"},
{ include = "gguf" },
{ include = "gguf/py.typed" },
{ include = "scripts" },
]
readme = "README.md"
homepage = "https://ggml.ai"
Expand Down
Loading

0 comments on commit ad57699

Please sign in to comment.