diff --git a/fern/docs/pages/gettingstarted.mdx b/fern/docs/pages/gettingstarted.mdx index 378f82a..bc8f4e3 100644 --- a/fern/docs/pages/gettingstarted.mdx +++ b/fern/docs/pages/gettingstarted.mdx @@ -54,11 +54,12 @@ You can then use our Python client or REST API to prompt one of our LLMs! ```python filename="main.py" import os import json -import predictionguard as pg +from predictionguard import PredictionGuard # Set your Prediction Guard token as an environmental variable. +os.environ["PREDICTIONGUARD_API_KEY"] = "" -os.environ["PREDICTIONGUARD_TOKEN"] = "" +client = PredictionGuard() # Define our prompt. @@ -73,7 +74,7 @@ messages = [ } ] -result = pg.Chat.create( +result = client.chat.completions.create( model="Neural-Chat-7B", messages=messages ) diff --git a/fern/docs/pages/guides/ada.mdx b/fern/docs/pages/guides/ada.mdx index 21d5e1d..8c289b4 100644 --- a/fern/docs/pages/guides/ada.mdx +++ b/fern/docs/pages/guides/ada.mdx @@ -21,7 +21,7 @@ For this demo we have selecteed a public dataset from Kaggle - Jobs and Salaries ## Installation and Setup - Install the Python SDK with `pip install predictionguard` -- Get a Prediction Guard access token (as described [here](https://docs.predictionguard.com/)) and set it as the environment variable `PREDICTIONGUARD_TOKEN`. +- Get a Prediction Guard access token (as described [here](https://docs.predictionguard.com/)) and set it as the environment variable `PREDICTIONGUARD_API_KEY`. ## Setup