Skip to content

Commit

Permalink
Reformatted docs/docs/integrations/chat/kinetica.ipynb
Browse files Browse the repository at this point in the history
  • Loading branch information
am-kinetica committed Feb 21, 2024
1 parent a73f7af commit 1c2281e
Showing 1 changed file with 33 additions and 19 deletions.
52 changes: 33 additions & 19 deletions docs/docs/integrations/chat/kinetica.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -98,13 +98,14 @@
"outputs": [],
"source": [
"from langchain_community.chat_models.kinetica import KineticaChatLLM\n",
"\n",
"kinetica_llm = KineticaChatLLM()\n",
"\n",
"# Test table we will create\n",
"table_name = \"demo.user_profiles\"\n",
"\n",
"# LLM Context we will create\n",
"kinetica_ctx = 'demo.test_llm_ctx'"
"kinetica_ctx = \"demo.test_llm_ctx\""
]
},
{
Expand Down Expand Up @@ -250,15 +251,17 @@
"from typing import Generator\n",
"\n",
"Faker.seed(5467)\n",
"faker = Faker(locale='en-US')\n",
"faker = Faker(locale=\"en-US\")\n",
"\n",
"\n",
"def profile_gen(count: int) -> Generator:\n",
" for id in range(0, count):\n",
" rec = dict(id=id, **faker.simple_profile())\n",
" rec['birthdate'] = pd.Timestamp(rec['birthdate'])\n",
" yield rec\n",
" for id in range(0, count):\n",
" rec = dict(id=id, **faker.simple_profile())\n",
" rec[\"birthdate\"] = pd.Timestamp(rec[\"birthdate\"])\n",
" yield rec\n",
"\n",
"\n",
"load_df = pd.DataFrame.from_records(data=profile_gen(100), index='id')\n",
"load_df = pd.DataFrame.from_records(data=profile_gen(100), index=\"id\")\n",
"load_df.head()"
]
},
Expand Down Expand Up @@ -359,10 +362,13 @@
"source": [
"from gpudb import GPUdbTable\n",
"\n",
"gpudb_table = GPUdbTable.from_df(load_df, db=kinetica_llm.kdbc, \n",
" table_name=table_name, \n",
" clear_table=True,\n",
" load_data=True)\n",
"gpudb_table = GPUdbTable.from_df(\n",
" load_df,\n",
" db=kinetica_llm.kdbc,\n",
" table_name=table_name,\n",
" clear_table=True,\n",
" load_data=True,\n",
")\n",
"\n",
"# See the Kinetica column types\n",
"gpudb_table.type_as_df()"
Expand Down Expand Up @@ -403,7 +409,7 @@
"\n",
"from gpudb import GPUdbException\n",
"\n",
"sql=f\"\"\"\n",
"sql = f\"\"\"\n",
"CREATE OR REPLACE CONTEXT {kinetica_ctx}\n",
"(\n",
" TABLE = demo.test_profiles\n",
Expand All @@ -418,15 +424,17 @@
")\n",
"\"\"\"\n",
"\n",
"\n",
"def _check_error(response: dict) -> None:\n",
" status = response['status_info']['status']\n",
" if (status != 'OK'):\n",
" message = response['status_info']['message']\n",
" raise GPUdbException('[%s]: %s' % (status, message))\n",
" status = response[\"status_info\"][\"status\"]\n",
" if status != \"OK\":\n",
" message = response[\"status_info\"][\"message\"]\n",
" raise GPUdbException(\"[%s]: %s\" % (status, message))\n",
"\n",
"\n",
"response = kinetica_llm.kdbc.execute_sql(sql)\n",
"_check_error(response)\n",
"response['status_info']"
"response[\"status_info\"]"
]
},
{
Expand Down Expand Up @@ -509,7 +517,11 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain_community.chat_models.kinetica import KineticaSqlOutputParser, KineticaSqlResponse\n",
"from langchain_community.chat_models.kinetica import (\n",
" KineticaSqlOutputParser,\n",
" KineticaSqlResponse,\n",
")\n",
"\n",
"chain = prompt_template | kinetica_llm | KineticaSqlOutputParser(kdbc=kinetica_llm.kdbc)"
]
},
Expand Down Expand Up @@ -602,7 +614,9 @@
],
"source": [
"# Here you must ask a question relevant to the LLM context provided in the prompt template.\n",
"response: KineticaSqlResponse = chain.invoke({\"input\": \"What are the female users ordered by username?\"})\n",
"response: KineticaSqlResponse = chain.invoke(\n",
" {\"input\": \"What are the female users ordered by username?\"}\n",
")\n",
"\n",
"print(f\"SQL: {response.sql}\")\n",
"response.dataframe.head()"
Expand Down

0 comments on commit 1c2281e

Please sign in to comment.