diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml
index b2d6a1729..463fcbe9a 100644
--- a/.github/workflows/daily.yml
+++ b/.github/workflows/daily.yml
@@ -3,9 +3,7 @@ name: "os-support-tests"
on:
workflow_dispatch:
# Scheduled workflows will only run on the default branch.
- schedule:
- - cron: '0 0 * * *' # runs once a day at midnight in the timezone of your GitHub repository
-
+
defaults:
run:
shell: bash
diff --git a/.github/workflows/sdk-tests.yml b/.github/workflows/sdk-tests.yml
index 107ac8705..012bfa3f4 100644
--- a/.github/workflows/sdk-tests.yml
+++ b/.github/workflows/sdk-tests.yml
@@ -129,6 +129,27 @@ jobs:
password: ${{ secrets.GH_PAT }}
ports:
- "3000:3000"
+ postgres:
+ image: postgres:15
+ ports:
+ - "5433:5432"
+ env:
+ POSTGRES_DB: "il_sdk"
+ POSTGRES_USER: "il_sdk"
+ POSTGRES_PASSWORD: "test"
+ studio-backend:
+ image: registry.gitlab.aleph-alpha.de/product/studio/backend:latest
+ ports:
+ - "8000:8000"
+ env:
+ DATABASE_URL: "postgres:5432"
+ POSTGRES_DB: "il_sdk"
+ POSTGRES_USER: "il_sdk"
+ POSTGRES_PASSWORD: "test"
+ AUTHORIZATION_SERVICE_URL: "none"
+ credentials:
+ username: "unused"
+ password: ${{ secrets.GL_STUDIO_CONTAINER_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
@@ -164,6 +185,7 @@ jobs:
ARGILLA_API_URL: "http://localhost:6900/"
ARGILLA_API_KEY: "argilla.apikey"
CLIENT_URL: "https://api.aleph-alpha.com"
+ STUDIO_URL: "http://localhost:8000/"
run: |
./scripts/test.sh
run-notebooks:
@@ -186,6 +208,27 @@ jobs:
env:
ARGILLA_ELASTICSEARCH: "http://argilla-elastic-search:9200"
ARGILLA_ENABLE_TELEMETRY: 0
+ postgres:
+ image: postgres:15
+ ports:
+ - "5433:5432"
+ env:
+ POSTGRES_DB: "il_sdk"
+ POSTGRES_USER: "il_sdk"
+ POSTGRES_PASSWORD: "test"
+ studio-backend:
+ image: registry.gitlab.aleph-alpha.de/product/studio/backend:latest
+ ports:
+ - "8000:8000"
+ env:
+ DATABASE_URL: "postgres:5432"
+ POSTGRES_DB: "il_sdk"
+ POSTGRES_USER: "il_sdk"
+ POSTGRES_PASSWORD: "test"
+ AUTHORIZATION_SERVICE_URL: "none"
+ credentials:
+ username: "unused"
+ password: ${{ secrets.GL_STUDIO_CONTAINER_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
@@ -217,5 +260,6 @@ jobs:
ARGILLA_API_URL: "http://localhost:6900/"
ARGILLA_API_KEY: "argilla.apikey"
CLIENT_URL: "https://api.aleph-alpha.com"
+ STUDIO_URL: "http://localhost:8000"
run: |
./scripts/notebook_runner.sh
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 77de72112..c6e5d5ebc 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,13 +6,13 @@
...
### Features
-...
+- Add `StudioClient` as connector to PhariaStudio for submitting traces.
### Fixes
...
### Deprecations
-...
+ - Deprecate old Trace Viewer as the new `StudioClient` replaces it. This affects `Tracer.submit_to_trace_viewer`.
## 5.0.3
diff --git a/README.md b/README.md
index b260363a6..a4268d738 100644
--- a/README.md
+++ b/README.md
@@ -149,7 +149,7 @@ The how-tos are quick lookups about how to do things. Compared to the tutorials,
| [...define a task](./src/documentation/how_tos/how_to_define_a_task.ipynb) | How to come up with a new task and formulate it |
| [...implement a task](./src/documentation/how_tos/how_to_implement_a_task.ipynb) | Implement a formulated task and make it run with the Intelligence Layer |
| [...debug and log a task](./src/documentation/how_tos/how_to_log_and_debug_a_task.ipynb) | Tools for logging and debugging in tasks |
-| [...run the trace viewer](./src/documentation/how_tos/how_to_run_the_trace_viewer.ipynb) | Downloading and running the trace viewer for debugging traces |
+| [...use PhariaStudio with traces](./src/documentation/how_tos/how_to_use_pharia_studio_with_traces.ipynb) | Submitting Traces to PhariaStudio for debugging |
| **Analysis Pipeline** | |
| [...implement a simple evaluation and aggregation logic](./src/documentation/how_tos/how_to_implement_a_simple_evaluation_and_aggregation_logic.ipynb) | Basic examples of evaluation and aggregation logic |
| [...create a dataset](./src/documentation/how_tos/how_to_create_a_dataset.ipynb) | Create a dataset used for running a task |
diff --git a/docker-compose.yaml b/docker-compose.yaml
index 263b62008..190f65855 100644
--- a/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -38,6 +38,26 @@ services:
image: ghcr.io/aleph-alpha/trace-viewer-trace-viewer:main
ports:
- 3000:3000
+
+ # export GITLAB_TOKEN=...
+ # (optional) export GITLAB_TOKEN=$(op item get YOUR_TOKEN --format json --fields password | jq .value | tr -d '"')
+ # echo $GITLAB_TOKEN | docker login registry.gitlab.aleph-alpha.de -u your_email@for_gitlab --password-stdin
+ # docker compose pull to update containers
+ studio-backend:
+ image: registry.gitlab.aleph-alpha.de/product/studio/backend:latest
+ ports:
+ - 8000:8000
+ depends_on:
+ postgres:
+ condition: service_started
+ restart: true
+ environment:
+ DATABASE_URL: postgres:5432
+ POSTGRES_DB: il_sdk
+ POSTGRES_USER: il_sdk
+ POSTGRES_PASSWORD: test
+
+ AUTHORIZATION_SERVICE_URL: "none"
postgres:
image: postgres:15
ports:
diff --git a/env.sample b/env.sample
index c687dc0f8..a7e94aab4 100644
--- a/env.sample
+++ b/env.sample
@@ -5,3 +5,4 @@ ARGILLA_API_KEY="argilla.apikey"
HUGGING_FACE_TOKEN=token
# local dev builds run on 5173
TRACE_VIEWER_URL="http://localhost:3000"
+STUDIO_URL=http://localhost:8000
diff --git a/src/documentation/how_tos/how_to_log_and_debug_a_task.ipynb b/src/documentation/how_tos/how_to_log_and_debug_a_task.ipynb
index 9b62fccc3..d30aa7c7c 100644
--- a/src/documentation/how_tos/how_to_log_and_debug_a_task.ipynb
+++ b/src/documentation/how_tos/how_to_log_and_debug_a_task.ipynb
@@ -7,10 +7,12 @@
"outputs": [],
"source": [
"import random\n",
+ "from uuid import uuid4\n",
"\n",
"from aleph_alpha_client import Prompt\n",
"from dotenv import load_dotenv\n",
"\n",
+ "from intelligence_layer.connectors import StudioClient\n",
"from intelligence_layer.core import (\n",
" CompleteInput,\n",
" InMemoryTracer,\n",
@@ -37,10 +39,7 @@
" - To create custom logging messages in a trace use `task_span.log()`.\n",
" - To map a complex execution flow of a task into a single trace, pass the `task_span` of the `do_run` to other execution methods (e.g. `Task.run()` or `model.complete()`). \n",
" - If the execution method is not provided by the intelligence layer, the tracing of input and output has to happen manually. See the implementation of `Task.run()` for an example.\n",
- " - Use the [trace viewer](./how_to_run_the_trace_viewer.ipynb) to view and inspect a trace\n",
- " - Use and display an `InMemoryTracer` in a notebook to automatically send the trace data to the trace viewer.\n",
- " - Note: This also works for traces of the `Runner` and the `Evaluator`.\n",
- " - To create persistent traces, use the `FileTracer` instead. This creates files which can manually be uploaded in the trace viewer UI."
+ " - Use the [submit trace functionality of the `StudioClient`](./how_to_use_pharia_studio_with_traces.ipynb) to view and inspect a trace in PhariaStudio"
]
},
{
@@ -77,9 +76,13 @@
"\n",
"tracer = InMemoryTracer()\n",
"DummyTask().run(\"\", tracer)\n",
- "# ! make sure to run the trace viewer docker container to get the improved display !\n",
- "# display an InMemoryTracer in a notebook and send the data to the trace viewer\n",
- "display(tracer)\n",
+ "\n",
+ "project_name = str(uuid4())\n",
+ "studio_client = StudioClient(project=project_name)\n",
+ "my_project = studio_client.create_project(project=project_name)\n",
+ "\n",
+ "submitted_trace_id = studio_client.submit_from_tracer(tracer)\n",
+ "\n",
"\n",
"pass"
]
diff --git a/src/documentation/how_tos/how_to_run_the_trace_viewer.ipynb b/src/documentation/how_tos/how_to_run_the_trace_viewer.ipynb
deleted file mode 100644
index 4daf1fed6..000000000
--- a/src/documentation/how_tos/how_to_run_the_trace_viewer.ipynb
+++ /dev/null
@@ -1,35 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Running the Trace Viewer\n",
- "\n",
- "Make sure you have your access to the Jfrog instance at https://alephalpha.jfrog.io. \n",
- "Then login to the container registry with docker with your JFrog user name and a JFrog token as the password with the following command:\n",
- "\n",
- "```bash\n",
- "docker login alephalpha.jfrog.io\n",
- "```\n",
- "\n",
- "Note: If you do not already have a JFrog token, you can find it on the website under the \"Set me up\" option, either in the resource of interest or under your profile name.\n",
- "\n",
- "Afterwards, run the container locally to start the trace viewer:\n",
- "\n",
- "```bash\n",
- "docker run -p 3000:3000 alephalpha.jfrog.io/container-images/trace-viewer:latest\n",
- "```\n",
- "\n",
- "Finally, visit `http://localhost:3000`, where you can upload a trace to interact with the data."
- ]
- }
- ],
- "metadata": {
- "language_info": {
- "name": "python"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/src/documentation/how_tos/how_to_use_pharia_studio_with_traces.ipynb b/src/documentation/how_tos/how_to_use_pharia_studio_with_traces.ipynb
new file mode 100644
index 000000000..5161f0295
--- /dev/null
+++ b/src/documentation/how_tos/how_to_use_pharia_studio_with_traces.ipynb
@@ -0,0 +1,96 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from uuid import uuid4\n",
+ "\n",
+ "from intelligence_layer.connectors import StudioClient\n",
+ "from intelligence_layer.core import InMemoryTracer, Task, TaskSpan"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# How to use PhariaStudio for Debugging in a SaaS Configuration\n",
+ "
\n",
+ "\n",
+ "Make sure your account has permissions to use the PhariaStudio application.\n",
+ "\n",
+ "For an on-prem or local installation, please contact the PhariaStudio team.\n",
+ "
"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "\n",
+ "0. Generate a trace of your `Task` of interest.\n",
+ "1. Initialize a `StudioClient` with a project.\n",
+ " - Use an existing project or create a new one with the `StudioClient.create_project` function.\n",
+ "2. Submit your traces with the client\n",
+ " 1. Submit a single trace via `Tracer.export_for_viewing` and `StudioClient.submit_trace`\n",
+ " 2. [Recommended] submit multiple traces via `StudioClient.submit_from_tracer`. \n",
+ "\n",
+ "### Example"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Step 0\n",
+ "class DummyTask(Task[str, str]):\n",
+ " def do_run(self, input: str, task_span: TaskSpan) -> str:\n",
+ " return f\"{input} -> output\"\n",
+ "\n",
+ "\n",
+ "tracer = InMemoryTracer()\n",
+ "DummyTask().run(\"My Dummy Run\", tracer=tracer)\n",
+ "\n",
+ "# Step 1\n",
+ "project_name = str(uuid4())\n",
+ "studio_client = StudioClient(project=project_name)\n",
+ "my_project = studio_client.create_project(project=project_name)\n",
+ "\n",
+ "# Step 2.1\n",
+ "trace_to_submit = tracer.export_for_viewing()\n",
+ "trace_id = studio_client.submit_trace(trace_to_submit) # only works for single traces\n",
+ "\n",
+ "# Step 2.2\n",
+ "tracer2 = InMemoryTracer()\n",
+ "DummyTask().run(\"My Dummy Run2\", tracer=tracer2)\n",
+ "DummyTask().run(\"My Dummy Run3\", tracer=tracer2)\n",
+ "ids_of_submitted_traces = studio_client.submit_from_tracer(tracer2)"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "intelligence-layer-aL2cXmJM-py3.11",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.11.8"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/src/intelligence_layer/connectors/__init__.py b/src/intelligence_layer/connectors/__init__.py
index f5a246df9..026cbb3f5 100644
--- a/src/intelligence_layer/connectors/__init__.py
+++ b/src/intelligence_layer/connectors/__init__.py
@@ -44,5 +44,6 @@
QdrantInMemoryRetriever as QdrantInMemoryRetriever,
)
from .retrievers.qdrant_in_memory_retriever import RetrieverType as RetrieverType
+from .studio.studio import StudioClient as StudioClient
__all__ = [symbol for symbol in dir() if symbol and symbol[0].isupper()]
diff --git a/src/intelligence_layer/connectors/document_index/document_index.py b/src/intelligence_layer/connectors/document_index/document_index.py
index 6ce714eff..e09bf69b8 100644
--- a/src/intelligence_layer/connectors/document_index/document_index.py
+++ b/src/intelligence_layer/connectors/document_index/document_index.py
@@ -295,7 +295,7 @@ class DocumentIndexClient:
Document Index is a tool for managing collections of documents, enabling operations such as creation, deletion, listing, and searching.
Documents can be stored either in the cloud or in a local deployment.
- Args:
+ Attributes:
token: A valid token for the document index API.
base_document_index_url: The url of the document index' API.
diff --git a/src/intelligence_layer/connectors/studio/studio.py b/src/intelligence_layer/connectors/studio/studio.py
new file mode 100644
index 000000000..a6888649a
--- /dev/null
+++ b/src/intelligence_layer/connectors/studio/studio.py
@@ -0,0 +1,193 @@
+import os
+from collections import defaultdict
+from collections.abc import Sequence
+from typing import Optional
+from urllib.parse import urljoin
+
+import requests
+from pydantic import BaseModel
+from requests.exceptions import ConnectionError, MissingSchema
+
+from intelligence_layer.core.tracer.tracer import ( # Import to be fixed with PHS-731
+ ExportedSpan,
+ ExportedSpanList,
+ Tracer,
+)
+
+
+class StudioProject(BaseModel):
+ name: str
+ description: Optional[str]
+
+
+class StudioClient:
+ """Client for communicating with PhariaStudio.
+
+ Attributes:
+ project_id: The unique identifier of the project currently in use.
+ url: The url of your current PhariaStudio instance.
+ """
+
+ def __init__(
+ self,
+ project: str,
+ studio_url: Optional[str] = None,
+ auth_token: Optional[str] = None,
+ ) -> None:
+ """Initializes the client.
+
+ Runs a health check to check for a valid url of the Studio connection.
+ It does not check for a valid authentication token, which happens later.
+
+ Args:
+ project: The human readable identifier provided by the user.
+ studio_url: The url of your current PhariaStudio instance.
+ auth_token: The authorization bearer token of the user. This corresponds to the user's Aleph Alpha token.
+ """
+ self._token = auth_token if auth_token is not None else os.getenv("AA_TOKEN")
+ if self._token is None:
+ raise ValueError(
+ "'AA_TOKEN' is not set and auth_token is not given as a parameter. Please provide one or the other."
+ )
+ self._headers = {
+ "Content-Type": "application/json",
+ "Accept": "application/json",
+ "Authorization": f"Bearer {self._token}",
+ }
+
+ temp_url = studio_url if studio_url is not None else os.getenv("STUDIO_URL")
+ if temp_url is None:
+ raise ValueError(
+ "'STUDIO_URL' is not set and url is not given as a parameter. Please provide one or the other."
+ )
+ self.url = temp_url
+
+ self._check_connection()
+
+ self._project_name = project
+ self._project_id: int | None = None
+
+ def _check_connection(self) -> None:
+ try:
+ url = urljoin(self.url, "/health")
+ response = requests.get(
+ url,
+ headers=self._headers,
+ )
+ response.raise_for_status()
+ except MissingSchema:
+ raise ValueError(
+ "The given url of the studio client is invalid. Make sure to include http:// in your url."
+ ) from None
+ except ConnectionError:
+ raise ValueError(
+ "The given url of the studio client does not point to a server."
+ ) from None
+ except requests.HTTPError:
+ raise ValueError(
+ f"The given url of the studio client does not point to a healthy studio: {response.status_code}: {response.json()}"
+ ) from None
+
+ @property
+ def project_id(self) -> int:
+ if self._project_id is None:
+ project_id = self._get_project(self._project_name)
+ if project_id is None:
+ raise ValueError(
+ f"Project {self._project_name} was not available. Consider creating it with `StudioClient.create_project`."
+ )
+ self._project_id = project_id
+ return self._project_id
+
+ def _get_project(self, project: str) -> int | None:
+ url = urljoin(self.url, "/api/projects")
+ response = requests.get(
+ url,
+ headers=self._headers,
+ )
+ response.raise_for_status()
+ all_projects = response.json()
+ try:
+ project_of_interest = next(
+ proj for proj in all_projects if proj["name"] == project
+ )
+ return int(project_of_interest["id"])
+ except StopIteration:
+ return None
+
+ def create_project(self, project: str, description: Optional[str] = None) -> int:
+ """Creates a project in PhariaStudio.
+
+ Projects are uniquely identified by the user provided name.
+
+ Args:
+ project: User provided name of the project.
+ description: Description explaining the usage of the project. Defaults to None.
+
+ Returns:
+ The ID of the newly created project.
+ """
+ url = urljoin(self.url, "/api/projects")
+ data = StudioProject(name=project, description=description)
+ response = requests.post(
+ url,
+ data=data.model_dump_json(),
+ headers=self._headers,
+ )
+ match response.status_code:
+ case 409:
+ raise ValueError("Project already exists")
+ case _:
+ response.raise_for_status()
+ return int(response.text)
+
+ def submit_trace(self, data: Sequence[ExportedSpan]) -> str:
+ """Sends the provided spans to Studio as a singular trace.
+
+ The method fails if the span list is empty, has already been created or if
+ spans belong to multiple traces.
+
+ Args:
+ data: Spans to create the trace from. Created by exporting from a `Tracer`.
+
+ Returns:
+ The ID of the created trace.
+ """
+ if len(data) == 0:
+ raise ValueError("Tried to upload an empty trace")
+ return self._upload_trace(ExportedSpanList(data))
+
+ def submit_from_tracer(self, tracer: Tracer) -> list[str]:
+ """Sends all trace data from the Tracer to Studio.
+
+ Args:
+ tracer: Tracer to extract data from.
+
+ Returns:
+ List of created trace IDs.
+ """
+ traces = defaultdict(list)
+ for span in tracer.export_for_viewing():
+ traces[span.context.trace_id].append(span)
+
+ return [self.submit_trace(value) for value in traces.values()]
+
+ def _upload_trace(self, trace: ExportedSpanList) -> str:
+ url = urljoin(self.url, f"/api/projects/{self.project_id}/traces")
+ response = requests.post(
+ url,
+ data=trace.model_dump_json(),
+ headers=self._headers,
+ )
+ match response.status_code:
+ case 409:
+ raise ValueError(
+ f"Trace with id {trace.root[0].context.trace_id} already exists."
+ )
+ case 422:
+ raise ValueError(
+ f"Uploading the trace failed with 422. Response: {response.json()}"
+ )
+ case _:
+ response.raise_for_status()
+ return str(response.json())
diff --git a/src/intelligence_layer/core/tracer/tracer.py b/src/intelligence_layer/core/tracer/tracer.py
index b2e78dec1..1983a7105 100644
--- a/src/intelligence_layer/core/tracer/tracer.py
+++ b/src/intelligence_layer/core/tracer/tracer.py
@@ -1,5 +1,6 @@
import os
import traceback
+import warnings
from abc import ABC, abstractmethod
from collections.abc import Mapping, Sequence
from contextlib import AbstractContextManager
@@ -208,6 +209,10 @@ def export_for_viewing(self) -> Sequence[ExportedSpan]:
...
def submit_to_trace_viewer(self) -> bool:
+ warnings.warn(
+ "TraceViewer will be removed soon. Use the Studio Trace functionality instead.",
+ DeprecationWarning,
+ )
return submit_to_trace_viewer(self.export_for_viewing())
diff --git a/tests/connectors/studio/test_studio.py b/tests/connectors/studio/test_studio.py
new file mode 100644
index 000000000..7bfff9682
--- /dev/null
+++ b/tests/connectors/studio/test_studio.py
@@ -0,0 +1,153 @@
+import os
+import time
+from collections.abc import Sequence
+from typing import Any
+from unittest.mock import patch
+from uuid import uuid4
+
+import pytest
+from dotenv import load_dotenv
+from pytest import fixture
+
+from intelligence_layer.connectors import StudioClient
+from intelligence_layer.core import ExportedSpan, InMemoryTracer, Task, TaskSpan
+
+
+class TracerTestSubTask(Task[None, None]):
+ def do_run(self, input: None, task_span: TaskSpan) -> None:
+ task_span.log("subtask", "value")
+
+
+class TracerTestTask(Task[str, str]):
+ sub_task = TracerTestSubTask()
+
+ def do_run(self, input: str, task_span: TaskSpan) -> str:
+ time.sleep(0.001)
+ with task_span.span("span") as sub_span:
+ time.sleep(0.001)
+ sub_span.log("message", "a value")
+ time.sleep(0.001)
+ self.sub_task.run(None, sub_span)
+ time.sleep(0.001)
+ self.sub_task.run(None, task_span)
+ try:
+ with task_span.task_span("Error task", "myInput"):
+ raise ValueError("oops")
+ except Exception as _:
+ pass
+ time.sleep(0.001)
+ return "output"
+
+
+@fixture
+def test_trace() -> Sequence[ExportedSpan]:
+ tracer = InMemoryTracer()
+ task = TracerTestTask()
+ task.run("my input", tracer)
+ return tracer.export_for_viewing()
+
+
+@fixture
+def studio_client() -> StudioClient:
+ load_dotenv()
+ project_name = str(uuid4())
+ client = StudioClient(project_name)
+ client.create_project(project_name)
+ return client
+
+
+def test_cannot_connect_to_non_existing_project() -> None:
+ project_name = "non-existing-project"
+ with pytest.raises(ValueError, match=project_name):
+ StudioClient(project="non-existing-project").project_id # noqa: B018
+
+
+def test_cannot_create_the_same_project_twice() -> None:
+ project_name = str(uuid4())
+ client = StudioClient(project="IL-default-project")
+ client.create_project(project_name)
+ with pytest.raises(ValueError, match="already exists"):
+ client.create_project(project_name)
+
+
+def test_can_upload_trace(
+ test_trace: Sequence[ExportedSpan], studio_client: StudioClient
+) -> None:
+ id = studio_client.submit_trace(test_trace)
+
+ assert id == str(test_trace[0].context.trace_id)
+
+
+def test_cannot_upload_empty_trace(studio_client: StudioClient) -> None:
+ with pytest.raises(ValueError, match="empty"):
+ studio_client.submit_trace([])
+
+
+def test_cannot_upload_same_trace_twice(
+ test_trace: Sequence[ExportedSpan], studio_client: StudioClient
+) -> None:
+ studio_client.submit_trace(test_trace)
+ with pytest.raises(ValueError):
+ studio_client.submit_trace(test_trace)
+
+
+def test_submit_trace_cannot_upload_lists_with_multiple_traces(
+ studio_client: StudioClient,
+) -> None:
+ tracer = InMemoryTracer()
+ with tracer.span("test"):
+ pass
+ with tracer.span("test2"):
+ pass
+ data = tracer.export_for_viewing()
+
+ with pytest.raises(ValueError):
+ studio_client.submit_trace(data)
+ # TODO
+
+
+def test_handles_invalid_url() -> None:
+ with pytest.raises(ValueError, match="invalid"):
+ StudioClient(str(uuid4), studio_url="unknown-url")
+
+
+def test_handles_valid_but_incorrect_url() -> None:
+ with pytest.raises(ValueError, match="does not point to a server"):
+ StudioClient(str(uuid4), studio_url="http://invalid-test-url-123456543")
+
+
+def test_handles_no_auth_configured() -> None:
+ def mock_return(var: Any) -> Any:
+ if var == "AA_TOKEN":
+ return None
+ else:
+ return os.environ[var]
+
+ with patch("os.getenv", side_effect=mock_return) as _: # noqa: SIM117
+ with pytest.raises(ValueError, match="auth_token"):
+ StudioClient(str(uuid4))
+
+
+def test_submit_from_tracer_can_upload_lists_with_multiple_traces(
+ studio_client: StudioClient,
+) -> None:
+ tracer = InMemoryTracer()
+ task = TracerTestTask()
+ task.run("my input", tracer)
+ task.run("my second input", tracer)
+
+ id_list = set(str(span.context.trace_id) for span in tracer.export_for_viewing())
+
+ trace_id_list = set(studio_client.submit_from_tracer(tracer))
+
+ assert trace_id_list == id_list
+
+
+def test_submit_from_tracer_works_with_empty_tracer(
+ studio_client: StudioClient,
+) -> None:
+ tracer = InMemoryTracer()
+
+ empty_trace_id_list = studio_client.submit_from_tracer(tracer)
+
+ assert len(empty_trace_id_list) == 0