Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[AIC-PY] + publish script for any pypi pkg, small mods to llama ext #265

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
178 changes: 138 additions & 40 deletions cookbooks/Cli-Mate/cli-mate.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
from dataclasses import dataclass
from textwrap import dedent
import warnings

from result import Err, Ok, Result

warnings.filterwarnings("ignore")

import asyncio
from asyncio import AbstractEventLoop
import argparse
import signal
import sys
Expand Down Expand Up @@ -81,12 +86,110 @@ async def mod_code(
return 0


@dataclass
class Help:
pass


@dataclass
class Run:
user_input: str


class Reload:
# TODO
# if should_reload and source_code_file is not None:
# user_input = deprefix(user_input.strip(), "reload")
# with open(source_code_file.strip(), "r", encoding="utf8") as file:
# source_code = file.read()
# llm_input = f"QUERY ABOUT SOURCE CODE:\n{user_input}\nSOURCE CODE:\n```{source_code}\n```"
pass


class Clear:
pass


class Pass:
pass


class MultilineToggle:
pass


Command = Pass | Help | Run | Reload | Clear | MultilineToggle


def _get_command(user_input: str) -> Command:
normed = user_input.strip().lower()
if normed in ["h", "help", "?"]:
return Help()
elif normed in ["r", "reload"]:
return Reload()
elif normed in ["c", "clear"]:
return Clear()
elif normed in ["m", "multiline"]:
return MultilineToggle()
else:
return Run(user_input=user_input)


def _print_help():
print(
dedent(
"""
Exit loop: Ctrl-D
Toggle multiline input mode: m or multiline
Clear screen: c or clear
Reload source code: r or reload
"""
)
)


async def _run_llm(
runtime: AIConfigRuntime, llm_input: str
) -> Result[ExecuteResult, str]:
# Dynamically generate the prompt name and prompt object
new_prompt_name = f"prompt{len(runtime.prompts)+1}" # Prompt{number of prompts}
new_prompt = Prompt(name=new_prompt_name, input=llm_input)

# Add the new prompt and run the model
runtime.add_prompt(new_prompt.name, new_prompt)

def callback(delta: Any, _: Any, __: int):
if state["interrupt"]:
raise InterruptException()

print(delta.get("content", ""), end="", flush=True)

options = InferenceOptions(stream=True, stream_callback=callback)
state["interrupt"] = False
try:
result = await runtime.run(new_prompt_name, {}, options=options)
print(flush=True)
return Ok(result)
except InterruptException:
return Err("interrupted")


async def _get_raw_input(
event_loop: AbstractEventLoop, session: PromptSession[str], is_multiline: bool
) -> str:
def _prompt(): # type: ignore
return session.prompt(
"> ",
multiline=is_multiline,
)

return await event_loop.run_in_executor(None, _prompt)


async def loop(aiconfig_path: str, source_code_file: str | None):
runtime = AIConfigRuntime.load(aiconfig_path)
event_loop = asyncio.get_event_loop()

session = PromptSession()

state["interrupt"] = False

def signal_handler(_: int, __: FrameType | None):
Expand All @@ -95,52 +198,47 @@ def signal_handler(_: int, __: FrameType | None):

signal.signal(signal.SIGINT, signal_handler)

i = 0
is_multiline = False
print("Enter 'h', 'help', or '?' for help.", flush=True)
while True:
try:
user_input = await event_loop.run_in_executor(
None, session.prompt, "Query: [ctrl-D to exit] "
)
raw_input = await _get_raw_input(event_loop, session, is_multiline)
except KeyboardInterrupt:
continue
except EOFError:
print("Exiting")
break

if user_input.strip() == "":
continue

should_reload = user_input.strip().startswith("reload") or i == 0
if should_reload and source_code_file is not None:
user_input = deprefix(user_input.strip(), "reload")
with open(source_code_file.strip(), "r", encoding="utf8") as file:
source_code = file.read()
prompt = f"QUERY ABOUT SOURCE CODE:\n{user_input}\nSOURCE CODE:\n```{source_code}\n```"
else:
prompt = user_input

# Dynamically generate the prompt name and prompt object
new_prompt_name = f"prompt{len(runtime.prompts)+1}" # Prompt{number of prompts}
new_prompt = Prompt(name=new_prompt_name, input=prompt)

# Add the new prompt and run the model
runtime.add_prompt(new_prompt.name, new_prompt)

def callback(delta: Any, _: Any, __: int):
if state["interrupt"]:
raise InterruptException()

print(delta.get("content", ""), end="", flush=True)

options = InferenceOptions(stream=True, stream_callback=callback)
state["interrupt"] = False
try:
result = await runtime.run(new_prompt_name, {}, options=options)
# print(f"{result=}")
print(flush=True)
i += 1
except InterruptException:
continue
command = _get_command(raw_input)

match command:
case Pass():
pass
case Help():
_print_help()
case MultilineToggle():
is_multiline = not is_multiline
print(f"Multiline input mode: {'on' if is_multiline else 'off'}")
if is_multiline:
print("Hit option-enter to submit.")
case Run(user_input=user_input):
prompt = f"""
INSTRUCTIONS: respond to the following query as concisely as possible.
Do not output more tokens than necessary.
QUERY: {user_input}
"""
llm_res = await _run_llm(runtime, prompt)
match llm_res:
case Ok(_):
# TODO somethign with res?
pass
case Err(msg):
print(msg)
case Reload():
# TODO
pass
case Clear():
print("\033c", end="")


async def main():
Expand Down
5 changes: 5 additions & 0 deletions extensions/llama/python/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
AIConfig Model Parser for LLama cpp (Python bindings).

Usage: See cookbook:

https://github.com/lastmile-ai/aiconfig/blob/c64224ed48ccb7f8cbd2d3a1b2e8bd250aeb9ff2/cookbooks/llama/python/ask_llama.py#L4
43 changes: 43 additions & 0 deletions extensions/llama/python/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
[build-system]
requires = ["setuptools", "wheel"]

[project]
name = "python-aiconfig-llama"
version = "0.0.1"
authors = [
{ name="Jonathan Lessinger", email="[email protected]" },
]
description = "LLama extension for AIConfig Library"
readme = "README.md"
requires-python = ">=3.7"
classifiers = [
"Intended Audience :: Developers",
"Programming Language :: Python :: 3",
]
dynamic = ["dependencies"]

[tool.setuptools.dynamic]
dependencies = {file = ["requirements.txt"]}

[project.urls]
"Homepage" = "https://github.com/lastmile-ai/aiconfig"
"Bug Tracker" = "https://github.com/lastmile-ai/aiconfig/issues"

# Black formatting
[tool.black]
line-length = 99
include = '\.pyi?$'
exclude = '''
/(
.eggs # exclude a few common directories in the
| .git # root of the project
| .hg
| .mypy_cache
| .tox
| venv
| _build
| buck-out
| build
| dist
)/
'''
37 changes: 37 additions & 0 deletions scripts/pypipublish.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
#! /bin/zsh
# pypipublish.sh
# Usage: Run ./scripts/pypipublish.sh path/to/project/root conda-env-name
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Wouldn't it make more sense to create an empty environment rather than depend on an existing one?

# path/to/project/root is anywhere with a pyproject.toml.

# NOTE: This assumes you have the aiconfig conda environment created.
# You will be prompted for a username and password. For the username, use __token__.
# For the password, use the token value, including the pypi- prefix.
# To get a PyPi token, go here:
# Need to get a token from here (scroll down to API Tokens): https://pypi.org/manage/account/
# If you have issues, read the docs: https://packaging.python.org/en/latest/tutorials/packaging-projects/
Copy link
Member

@Ankush-lastmile Ankush-lastmile Nov 22, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is publishing with account username and password an expected flow? I've noticed that the official publishing guide suggests using token

I think it would make more sense to expect .env or equivalent rather than typing it in at script run?


# If you want to upload to testpypi, run pypipublish-test.sh.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

does this exist? I don't see it in this diff


if [ -z "$2" ]
then
echo "Usage: pypipublish.sh path/to/project/root conda-env-name"
exit 1
fi


cd "$1"
if [ ! -f "pyproject.toml" ]
then
echo "File pyproject.toml does not exist in the current directory"
exit 1
fi

rm -rf ./dist

source /opt/homebrew/Caskroom/miniconda/base/etc/profile.d/conda.sh && conda activate "$2"\
&& python3 -m pip install --upgrade build \
&& python3 -m build \
&& python3 -m pip install --upgrade twine \
&& python3 -m twine upload dist/*

cd -
Loading