Skip to content

Commit

Permalink
test
Browse files Browse the repository at this point in the history
  • Loading branch information
andrewtruong committed Dec 11, 2024
1 parent 9eb44cf commit 7c448bf
Show file tree
Hide file tree
Showing 4 changed files with 183 additions and 59 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
interactions:
- request:
body: '{"messages":[{"role":"user","content":"tell me a joke"}],"model":"gpt-4o"}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate, zstd
connection:
- keep-alive
content-length:
- '74'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.57.2
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.57.2
x-stainless-retry-count:
- '0'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.13.0rc2
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: !!binary |
H4sIAAAAAAAAAwAAAP//jFJNa9wwEL37V0x1yWVd7P3KspcSSKE5thvooSlGK40tJbJGSOOSNOx/
L/Z+2KEp9KLDe/Me743mNQMQVostCGUkqza4/Eav1f1O/v4aVvvbL/Pdt7tVHUp1s/vsnhsx6xW0
f0TFZ9VHRW1wyJb8kVYRJWPvWl4vFpvNYl2UA9GSRtfLmsD5kvJ5MV/mxSYv1iehIaswiS38yAAA
Xoe3j+g1PostFLMz0mJKskGxvQwBiEiuR4RMySaWnsVsJBV5Rj+k/m5eQJO/YkhP6JDJJ6htYxhQ
KgPEBuOnB//g7w2eJ438hcAGoek4fZgaR6y7JPtevnPuhB8uSR01IdI+nfgLXltvk6kiykS+T5WY
ghjYQwbwc9hI96akCJHawBXTE/resCyPdmL8ggm5PJFMLN2Iz1ezd9wqjSytS5ONCiWVQT0qx/XL
TluaENmk899h3vM+9ra++R/7kVAKA6OuQkRt1dvC41jE/kD/NXbZ8RBYpJfE2Fa19Q3GEO3xRupQ
qWslC9xLJUV2yP4AAAD//wMA4O+DUSwDAAA=
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8f01fe3aabd037cf-YYZ
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Wed, 11 Dec 2024 02:20:01 GMT
Server:
- cloudflare
Set-Cookie:
- __cf_bm=xqe_jHZdTV5LijJQYQ3GMY5MjtVrCyxbFO4glgLvgD0-1733883601-1.0.1.1-p.DDUca_cHppJu2hXzzA0CXU1mtalxHUNfBWVgPIQj.UkU603pbNscCvSIi4_Zjlz9Zuc3.hjlvoyZxcDBJTsw;
path=/; expires=Wed, 11-Dec-24 02:50:01 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=WEjxXqkGswaEDhllTROGX_go9tgaWNJcUJ3cCd50xDI-1733883601764-0.0.1.1-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
openai-organization:
- wandb
openai-processing-ms:
- '607'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=31536000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '30000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '29999979'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_8592a74b531c806f65c63c7471101cb6
status:
code: 200
message: OK
version: 1
30 changes: 30 additions & 0 deletions tests/integrations/openai/test_autopatch.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,3 +84,33 @@ def redact_inputs(inputs: dict[str, Any]) -> dict[str, Any]:

call = calls[0]
assert all(v == "REDACTED" for v in call.inputs.values())


@pytest.mark.skip_clickhouse_client # TODO:VCR recording does not seem to allow us to make requests to the clickhouse db in non-recording mode
@pytest.mark.vcr(
filter_headers=["authorization"], allowed_hosts=["api.wandb.ai", "localhost"]
)
def test_configuration_with_dicts(client_creator):
def redact_inputs(inputs: dict[str, Any]) -> dict[str, Any]:
return dict.fromkeys(inputs, "REDACTED")

autopatch_settings = {
"openai": {
"op_settings": {"postprocess_inputs": redact_inputs},
}
}

openai_sdk._openai_patcher = None

with client_creator(autopatch_settings=autopatch_settings) as client:
oaiclient = OpenAI()
oaiclient.chat.completions.create(
model="gpt-4o",
messages=[{"role": "user", "content": "tell me a joke"}],
)

calls = list(client.get_calls())
assert len(calls) == 1

call = calls[0]
assert all(v == "REDACTED" for v in call.inputs.values())
25 changes: 10 additions & 15 deletions weave/integrations/openai/openai_sdk.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from __future__ import annotations

import dataclasses
import importlib
from functools import wraps
from typing import TYPE_CHECKING, Any, Callable
Expand Down Expand Up @@ -327,7 +326,7 @@ def _openai_stream_options_is_set(inputs: dict) -> bool:
return True
return False

op_kwargs = dataclasses.asdict(settings)
op_kwargs = settings.model_dump()
op = weave.op(_add_stream_options(fn), **op_kwargs)
op._set_on_input_handler(openai_on_input_handler)
return add_accumulator(
Expand Down Expand Up @@ -363,7 +362,7 @@ def _openai_stream_options_is_set(inputs: dict) -> bool:
return True
return False

op_kwargs = dataclasses.asdict(settings)
op_kwargs = settings.model_dump()
op = weave.op(_add_stream_options(fn), **op_kwargs)
op._set_on_input_handler(openai_on_input_handler)
return add_accumulator(
Expand Down Expand Up @@ -393,21 +392,17 @@ def get_openai_patcher(

base = settings.op_settings

completions_create_settings = dataclasses.replace(
base,
name=base.name or "openai.chat.completions.create",
completions_create_settings = base.model_copy(
update={"name": base.name or "openai.chat.completions.create"}
)
async_completions_create_settings = dataclasses.replace(
base,
name=base.name or "openai.chat.completions.create",
async_completions_create_settings = base.model_copy(
update={"name": base.name or "openai.chat.completions.create"}
)
completions_parse_settings = dataclasses.replace(
base,
name=base.name or "openai.beta.chat.completions.parse",
completions_parse_settings = base.model_copy(
update={"name": base.name or "openai.beta.chat.completions.parse"}
)
async_completions_parse_settings = dataclasses.replace(
base,
name=base.name or "openai.beta.chat.completions.parse",
async_completions_parse_settings = base.model_copy(
update={"name": base.name or "openai.beta.chat.completions.parse"}
)

_openai_patcher = MultiPatcher(
Expand Down
85 changes: 41 additions & 44 deletions weave/trace/autopatch.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,52 @@
check if libraries are installed and imported and patch in the case that they are.
"""

from __future__ import annotations
from typing import Any, Callable, Optional

from dataclasses import dataclass, field
from typing import Any, Callable
from pydantic import BaseModel, Field, validate_call

from weave.trace.weave_client import Call


def autopatch(settings: AutopatchSettings | None = None) -> None:
class OpSettings(BaseModel):
"""Op settings for a specific integration.
These currently subset the `op` decorator args to provide a consistent interface
when working with auto-patched functions. See the `op` decorator for more details."""

name: Optional[str] = None
call_display_name: Optional[str | Callable[[Call], str]] = None
postprocess_inputs: Optional[Callable[[dict[str, Any]], dict[str, Any]]] = None
postprocess_output: Optional[Callable[[Any], Any]] = None


class IntegrationSettings(BaseModel):
"""Configuration for a specific integration."""

enabled: bool = True
op_settings: OpSettings = Field(default_factory=OpSettings)


class AutopatchSettings(BaseModel):
"""Settings for auto-patching integrations."""

anthropic: IntegrationSettings = Field(default_factory=IntegrationSettings)
cerebras: IntegrationSettings = Field(default_factory=IntegrationSettings)
cohere: IntegrationSettings = Field(default_factory=IntegrationSettings)
dspy: IntegrationSettings = Field(default_factory=IntegrationSettings)
google_ai_studio: IntegrationSettings = Field(default_factory=IntegrationSettings)
groq: IntegrationSettings = Field(default_factory=IntegrationSettings)
instructor: IntegrationSettings = Field(default_factory=IntegrationSettings)
langchain: IntegrationSettings = Field(default_factory=IntegrationSettings)
litellm: IntegrationSettings = Field(default_factory=IntegrationSettings)
llamaindex: IntegrationSettings = Field(default_factory=IntegrationSettings)
mistral: IntegrationSettings = Field(default_factory=IntegrationSettings)
notdiamond: IntegrationSettings = Field(default_factory=IntegrationSettings)
openai: IntegrationSettings = Field(default_factory=IntegrationSettings)
vertexai: IntegrationSettings = Field(default_factory=IntegrationSettings)


@validate_call
def autopatch(settings: Optional[AutopatchSettings] = None) -> None:
from weave.integrations.anthropic.anthropic_sdk import anthropic_patcher
from weave.integrations.cerebras.cerebras_sdk import cerebras_patcher
from weave.integrations.cohere.cohere_sdk import cohere_patcher
Expand Down Expand Up @@ -81,43 +118,3 @@ def reset_autopatch() -> None:
google_genai_patcher.undo_patch()
notdiamond_patcher.undo_patch()
vertexai_patcher.undo_patch()


@dataclass
class OpSettings:
"""Op settings for a specific integration.
These currently subset the `op` decorator args to provide a consistent interface
when working with auto-patched functions. See the `op` decorator for more details."""

name: str | None = None
call_display_name: str | Callable[[Call], str] | None = None
postprocess_inputs: Callable[[dict[str, Any]], dict[str, Any]] | None = None
postprocess_output: Callable[[Any], Any] | None = None


@dataclass
class IntegrationSettings:
"""Configuration for a specific integration."""

enabled: bool = True
op_settings: OpSettings = field(default_factory=OpSettings)


@dataclass
class AutopatchSettings:
"""Settings for auto-patching integrations."""

anthropic: IntegrationSettings = field(default_factory=IntegrationSettings)
cerebras: IntegrationSettings = field(default_factory=IntegrationSettings)
cohere: IntegrationSettings = field(default_factory=IntegrationSettings)
dspy: IntegrationSettings = field(default_factory=IntegrationSettings)
google_ai_studio: IntegrationSettings = field(default_factory=IntegrationSettings)
groq: IntegrationSettings = field(default_factory=IntegrationSettings)
instructor: IntegrationSettings = field(default_factory=IntegrationSettings)
langchain: IntegrationSettings = field(default_factory=IntegrationSettings)
litellm: IntegrationSettings = field(default_factory=IntegrationSettings)
llamaindex: IntegrationSettings = field(default_factory=IntegrationSettings)
mistral: IntegrationSettings = field(default_factory=IntegrationSettings)
notdiamond: IntegrationSettings = field(default_factory=IntegrationSettings)
openai: IntegrationSettings = field(default_factory=IntegrationSettings)
vertexai: IntegrationSettings = field(default_factory=IntegrationSettings)

0 comments on commit 7c448bf

Please sign in to comment.