forked from geekan/MetaGPT
-
Notifications
You must be signed in to change notification settings - Fork 0
/
hello_world.py
45 lines (33 loc) · 1.29 KB
/
hello_world.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time : 2023/5/6 14:13
@Author : alexanderwu
@File : hello_world.py
"""
import asyncio
from metagpt.llm import LLM
from metagpt.logs import logger
async def ask_and_print(question: str, llm: LLM, system_prompt) -> str:
logger.info(f"Q: {question}")
rsp = await llm.aask(question, system_msgs=[system_prompt])
logger.info(f"A: {rsp}")
return rsp
async def lowlevel_api_example(llm: LLM):
logger.info("low level api example")
logger.info(await llm.aask_batch(["hi", "write python hello world."]))
hello_msg = [{"role": "user", "content": "count from 1 to 10. split by newline."}]
logger.info(await llm.acompletion(hello_msg))
logger.info(await llm.acompletion_text(hello_msg))
# streaming mode, much slower
await llm.acompletion_text(hello_msg, stream=True)
# check completion if exist to test llm complete functions
if hasattr(llm, "completion"):
logger.info(llm.completion(hello_msg))
async def main():
llm = LLM()
await ask_and_print("what's your name?", llm, "I'm a helpful AI assistant.")
await ask_and_print("who are you?", llm, "just answer 'I am a robot' if the question is 'who are you'")
await lowlevel_api_example(llm)
if __name__ == "__main__":
asyncio.run(main())