From 7cf1821677ec7677fbc4a9d085eb7586a6c5f0dd Mon Sep 17 00:00:00 2001 From: Jonathan Lessinger Date: Fri, 17 Nov 2023 11:26:55 -0500 Subject: [PATCH] [AIC-py] usability fixes for llama cookbook Summary - expose CLI args for paths for easier usage - use extension pypi package - add readme for installation + usage --- cookbooks/llama/python/README.md | 16 +++++++++ cookbooks/llama/python/ask_llama.py | 30 +++++++++++++---- cookbooks/llama/python/llama.py | 1 - cookbooks/llama/python/pyproject.toml | 43 +++++++++++++++++++++++++ cookbooks/llama/python/requirements.txt | 2 ++ 5 files changed, 85 insertions(+), 7 deletions(-) create mode 100644 cookbooks/llama/python/README.md delete mode 120000 cookbooks/llama/python/llama.py create mode 100644 cookbooks/llama/python/pyproject.toml create mode 100644 cookbooks/llama/python/requirements.txt diff --git a/cookbooks/llama/python/README.md b/cookbooks/llama/python/README.md new file mode 100644 index 000000000..dee21785a --- /dev/null +++ b/cookbooks/llama/python/README.md @@ -0,0 +1,16 @@ +Example app using llama aiconfig extension. + +1. Install (example using anaconda) + `conda create -n aiconfig-llama-cookbook` + `conda activate aiconfig-llama-cookbook` + `conda install pip` + `pip install -r python/requirements.txt` + +2. Download a model, e.g. + `curl -L https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q4_K_M.gguf --output ./models/llama-2-7b-chat.Q4_K_M.gguf` +3. cd into cookbook root dir + `$ pwd` + aiconfig/cookbooks/llama +4. Create an AIConfig like this: https://github.com/lastmile-ai/aiconfig/blob/e92e5a3c80b9c2b74a9432f0441318a951d54d0c/cookbooks/llama/llama-aiconfig.json +5. Run with your local paths: + `python python/ask_llama.py --aiconfig-path='../llama/llama-aiconfig.json' --model-path='../../models/llama-2-7b-chat.Q4_K_M.gguf' 2> ask-llama.err` diff --git a/cookbooks/llama/python/ask_llama.py b/cookbooks/llama/python/ask_llama.py index 8b3114033..ce0d1e03a 100644 --- a/cookbooks/llama/python/ask_llama.py +++ b/cookbooks/llama/python/ask_llama.py @@ -1,15 +1,33 @@ import asyncio +import sys from aiconfig.model_parser import InferenceOptions from llama import LlamaModelParser from aiconfig import AIConfigRuntime +import argparse async def main(): - llama_model_parser = LlamaModelParser( - model_path="models/llama-2-7b-chat.Q4_K_M.gguf" + parser = argparse.ArgumentParser() + parser.add_argument( + "--aiconfig-path", + type=str, + required=True, + help="Relative or absolute path to aiconfig json, e.g. cookbooks/llama/llama-aiconfig.json", ) + parser.add_argument( + "--model-path", + type=str, + required=True, + help="Relative or absolute path to model", + ) + args = parser.parse_args() + return await run(args.aiconfig_path, args.model_path) + + +async def run(aiconfig_path: str, model_path: str): + llama_model_parser = LlamaModelParser(model_path=model_path) for lm in [ "llama-2-7b-chat", @@ -18,7 +36,7 @@ async def main(): ]: AIConfigRuntime.register_model_parser(llama_model_parser, lm) - config = AIConfigRuntime.load("cookbooks/llama/llama-aiconfig.json") + config = AIConfigRuntime.load(aiconfig_path) def stream_callback(data, accumulated_message, index): print(data, end="", flush=True) @@ -38,9 +56,9 @@ def stream_callback(data, accumulated_message, index): await config.run("prompt13b", params={}, options=inference_options) print("\n\nRunning prompt13b_code...") - code_res = await config.run("prompt13b_code", params={}, options=inference_options) - print(f"\n\n\n\nCode response:\n{code_res}") + await config.run("prompt13b_code", params={}, options=inference_options) if __name__ == "__main__": - asyncio.run(main()) + res = asyncio.run(main()) + sys.exit(res) diff --git a/cookbooks/llama/python/llama.py b/cookbooks/llama/python/llama.py deleted file mode 120000 index 5c686eff8..000000000 --- a/cookbooks/llama/python/llama.py +++ /dev/null @@ -1 +0,0 @@ -/Users/jonathan/Projects/aiconfig/extensions/llama/python/llama.py \ No newline at end of file diff --git a/cookbooks/llama/python/pyproject.toml b/cookbooks/llama/python/pyproject.toml new file mode 100644 index 000000000..32f8039ad --- /dev/null +++ b/cookbooks/llama/python/pyproject.toml @@ -0,0 +1,43 @@ +[build-system] +requires = ["setuptools", "wheel"] + +[project] +name = "python-aiconfig-llama-cookbook" +version = "0.0.1" +authors = [ + { name="Jonathan Lessinger", email="jonathan@lastmileai.dev" }, +] +description = "LLama cookbook using AIConfig Library" +readme = "README.md" +requires-python = ">=3.7" +classifiers = [ + "Intended Audience :: Developers", + "Programming Language :: Python :: 3", +] +dynamic = ["dependencies"] + +[tool.setuptools.dynamic] +dependencies = {file = ["requirements.txt"]} + +[project.urls] +"Homepage" = "https://github.com/lastmile-ai/aiconfig" +"Bug Tracker" = "https://github.com/lastmile-ai/aiconfig/issues" + +# Black formatting +[tool.black] +line-length = 99 +include = '\.pyi?$' +exclude = ''' +/( + .eggs # exclude a few common directories in the + | .git # root of the project + | .hg + | .mypy_cache + | .tox + | venv + | _build + | buck-out + | build + | dist + )/ +''' \ No newline at end of file diff --git a/cookbooks/llama/python/requirements.txt b/cookbooks/llama/python/requirements.txt new file mode 100644 index 000000000..66ff60a78 --- /dev/null +++ b/cookbooks/llama/python/requirements.txt @@ -0,0 +1,2 @@ +python-aiconfig-llama +python-aiconfig \ No newline at end of file