From 53446b0b7d4fd5d77e98bc1855015ab093bac62f Mon Sep 17 00:00:00 2001 From: Laura Schauer Date: Thu, 11 Jul 2024 14:50:37 +0200 Subject: [PATCH] Adds anthropic model (#396) Adds claude 3 opus for both sap and third party providers. --- prospector/README.md | 8 ++-- prospector/llm/instantiation.py | 4 ++ prospector/llm/models/anthropic.py | 74 ++++++++++++++++++++++++++++++ prospector/requirements.in | 1 + prospector/requirements.txt | 4 ++ 5 files changed, 88 insertions(+), 3 deletions(-) create mode 100644 prospector/llm/models/anthropic.py diff --git a/prospector/README.md b/prospector/README.md index 968e9be13..d1230216b 100644 --- a/prospector/README.md +++ b/prospector/README.md @@ -55,6 +55,7 @@ To quickly set up Prospector, follow these steps. This will run Prospector in it By default, Prospector saves the results in a HTML file named *prospector-report.html*. Open this file in a web browser to view what Prospector was able to find! + ### 🤖 LLM Support To use Prospector with LLM support, you simply set required parameters for the API access to the LLM in *config.yaml*. These parameters can vary depending on your choice of provider, please follow what fits your needs (drop-downs below). If you do not want to use LLM support, keep the `llm_service` block in your *config.yaml* file commented out. @@ -87,7 +88,7 @@ You also need to point the `ai_core_sk` parameter to a file contianing the secre
Use personal third party provider -Implemented third party providers are **OpenAI**, **Google** and **Mistral**. +Implemented third party providers are **OpenAI**, **Google**, **Mistral**, and **Anthropic**. 1. You will need the following parameters in *config.yaml*: ```yaml @@ -101,14 +102,15 @@ Implemented third party providers are **OpenAI**, **Google** and **Mistral**. 1. [OpenAI](https://platform.openai.com/docs/models) 2. [Google](https://ai.google.dev/gemini-api/docs/models/gemini) 3. [Mistral](https://docs.mistral.ai/getting-started/models/) + 4. [Anthropic](https://docs.anthropic.com/en/docs/about-claude/models) The `temperature` parameter is optional. The default value is 0.0, but you can change it to something else. -2. Make sure to add your OpenAI API key to your `.env` file as `[OPENAI|GOOGLE|MISTRAL]_API_KEY`. +2. Make sure to add your OpenAI API key to your `.env` file as `[OPENAI|GOOGLE|MISTRAL|ANTHROPIC]_API_KEY`.
-#### +#### How to use LLM Support for different things You can set the `use_llm_<...>` parameters in *config.yaml* for fine-grained control over LLM support in various aspects of Prospector's phases. Each `use_llm_<...>` parameter allows you to enable or disable LLM support for a specific aspect: diff --git a/prospector/llm/instantiation.py b/prospector/llm/instantiation.py index db924e722..6331e439c 100644 --- a/prospector/llm/instantiation.py +++ b/prospector/llm/instantiation.py @@ -4,11 +4,13 @@ import requests from dotenv import load_dotenv +from langchain_anthropic import ChatAnthropic from langchain_core.language_models.llms import LLM from langchain_google_vertexai import ChatVertexAI from langchain_mistralai import ChatMistralAI from langchain_openai import ChatOpenAI +from llm.models.anthropic import Anthropic from llm.models.gemini import Gemini from llm.models.mistral import Mistral from llm.models.openai import OpenAI @@ -26,6 +28,7 @@ # "gpt-4o": OpenAI, # currently TBD "gemini-1.0-pro": Gemini, "mistral-large": Mistral, + "claude-3-opus": Anthropic, } @@ -37,6 +40,7 @@ "gpt-3.5-turbo": (ChatOpenAI, "OPENAI_API_KEY"), "gemini-pro": (ChatVertexAI, "GOOGLE_API_KEY"), "mistral-large-latest": (ChatMistralAI, "MISTRAL_API_KEY"), + "claude-3-opus-20240229": (ChatAnthropic, "ANTRHOPIC_API_KEY"), } diff --git a/prospector/llm/models/anthropic.py b/prospector/llm/models/anthropic.py new file mode 100644 index 000000000..3fdee294d --- /dev/null +++ b/prospector/llm/models/anthropic.py @@ -0,0 +1,74 @@ +from typing import Any, Dict, List, Optional + +import requests +from langchain_core.language_models.llms import LLM + +import llm.instantiation as instantiation +from log.logger import logger + + +class Anthropic(LLM): + model_name: str + deployment_url: str + temperature: float + ai_core_sk_filepath: str + + @property + def _llm_type(self) -> str: + return "SAP Anthropic" + + @property + def _identifying_params(self) -> Dict[str, Any]: + """Return a dictionary of identifying parameters.""" + return { + "model_name": self.model_name, + "deployment_url": self.deployment_url, + "temperature": self.temperature, + "ai_core_sk_filepath": self.ai_core_sk_filepath, + } + + def _call( + self, prompt: str, stop: Optional[List[str]] = None, **kwargs: Any + ) -> str: + endpoint = f"{self.deployment_url}/invoke" + headers = instantiation.get_headers(self.ai_core_sk_filepath) + data = { + "anthropic_version": "bedrock-2023-05-31", + "max_tokens": 100, + "messages": [ + { + "role": "user", + "content": f"{prompt}", + } + ], + "temperature": self.temperature, + } + + try: + response = requests.post(endpoint, headers=headers, json=data) + response.raise_for_status() + return self.parse(response.json()) + except requests.exceptions.HTTPError as http_error: + logger.error( + f"HTTP error occurred when sending a request through AI Core: {http_error}" + ) + raise + except requests.exceptions.Timeout as timeout_err: + logger.error( + f"Timeout error occured when sending a request through AI Core: {timeout_err}" + ) + raise + except requests.exceptions.ConnectionError as conn_err: + logger.error( + f"Connection error occurred when sending a request through AI Core: {conn_err}" + ) + raise + except requests.exceptions.RequestException as req_err: + logger.error( + f"A request error occured when sending a request through AI Core: {req_err}" + ) + raise + + def parse(self, message) -> str: + """Parse the returned JSON object from OpenAI.""" + return message["content"][0]["text"] diff --git a/prospector/requirements.in b/prospector/requirements.in index 720c5295e..23febfc2b 100644 --- a/prospector/requirements.in +++ b/prospector/requirements.in @@ -6,6 +6,7 @@ fastapi google-cloud-aiplatform==1.49.0 Jinja2 langchain +langchain_anthropic langchain_openai langchain_google_vertexai langchain_mistralai diff --git a/prospector/requirements.txt b/prospector/requirements.txt index 1a5a11dee..b965949e1 100644 --- a/prospector/requirements.txt +++ b/prospector/requirements.txt @@ -8,6 +8,7 @@ aiohttp==3.9.5 aiosignal==1.3.1 annotated-types==0.7.0 +anthropic==0.30.1 antlr4-python3-runtime==4.9.3 anyio==4.4.0 appdirs==1.4.4 @@ -27,6 +28,7 @@ confection==0.1.5 cymem==2.0.8 dataclasses-json==0.6.6 datasketch==1.6.5 +defusedxml==0.7.1 distro==1.9.0 dnspython==2.6.1 docstring-parser==0.16 @@ -60,9 +62,11 @@ huggingface-hub==0.23.3 idna==3.7 iniconfig==2.0.0 jinja2==3.1.4 +jiter==0.5.0 jsonpatch==1.33 jsonpointer==2.4 langchain==0.2.2 +langchain-anthropic==0.1.15 langchain-community==0.2.3 langchain-core==0.2.4 langchain-google-vertexai==1.0.5