From 38eaee7ee97337fd65c5e3553e6c18048cee8f42 Mon Sep 17 00:00:00 2001 From: Kai Liu Date: Wed, 20 Mar 2024 07:17:33 +0100 Subject: [PATCH 01/10] Add sqlite data layer and a simple sanity check --- backend/chainlit/data/sqlite.py | 459 ++++++++++++++++++ backend/pyproject.toml | 1 + .../data_layer_sqlite/.chainlit/config.toml | 62 +++ cypress/e2e/data_layer_sqlite/main.py | 28 ++ cypress/e2e/data_layer_sqlite/spec.cy.ts | 37 ++ 5 files changed, 587 insertions(+) create mode 100644 backend/chainlit/data/sqlite.py create mode 100644 cypress/e2e/data_layer_sqlite/.chainlit/config.toml create mode 100644 cypress/e2e/data_layer_sqlite/main.py create mode 100644 cypress/e2e/data_layer_sqlite/spec.cy.ts diff --git a/backend/chainlit/data/sqlite.py b/backend/chainlit/data/sqlite.py new file mode 100644 index 0000000000..70bcf0dc0e --- /dev/null +++ b/backend/chainlit/data/sqlite.py @@ -0,0 +1,459 @@ +import asyncio +import os +import uuid +from datetime import datetime +from typing import Optional, Union, cast +from urllib.parse import urlparse + +import literalai +from chainlit.data import ChainlitDataLayer +from literalai import ( + ChatGeneration, + CompletionGeneration, + FeedbackStrategy, + PageInfo, + PaginatedResponse, +) +from literalai.step import StepType +from sqlalchemy import JSON, Column, DateTime, ForeignKey, Integer, String +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine +from sqlalchemy.future import select +from sqlalchemy.orm import DeclarativeBase + +from chainlit import config + + +class Base(DeclarativeBase): + pass + + +class User(Base): + __tablename__ = "user" + id = Column(String, primary_key=True, index=True, default=lambda: str(uuid.uuid4())) + identifier = Column(String, unique=True, index=True) + created_at = Column(DateTime, default=datetime.utcnow) + metadata_ = Column(JSON) + + +class Thread(Base): + __tablename__ = "thread" + id = Column(String, primary_key=True, index=True, default=lambda: str(uuid.uuid4())) + name = Column(String, nullable=True) + user_id = Column(Integer, ForeignKey("user.id"), nullable=True) + created_at = Column(DateTime, default=datetime.utcnow) + metadata_ = Column(JSON, nullable=True) + tags = Column(JSON, nullable=True) + + +class Step(Base): + __tablename__ = "step" + id = Column(String, primary_key=True, index=True, default=lambda: str(uuid.uuid4())) + thread_id = Column(String, ForeignKey("thread.id")) + parent_id = Column(String, ForeignKey("step.id")) + created_at = Column(DateTime, default=datetime.utcnow) + start_time = Column(DateTime) + end_time = Column(DateTime, nullable=True) + name = Column(String) + type = Column(String) + input = Column(JSON) + output = Column(JSON) + generation = Column(JSON) + metadata_ = Column(JSON) + + +class Feedback(Base): + __tablename__ = "feedback" + id = Column(String, primary_key=True, index=True, default=lambda: str(uuid.uuid4())) + step_id = Column(String, ForeignKey("step.id")) + value = Column(Integer) + comment = Column(String) + strategy = Column(String) + + +class Attachment(Base): + __tablename__ = "attachment" + id = Column(String, primary_key=True, index=True, default=lambda: str(uuid.uuid4())) + thread_id = Column(String, ForeignKey("thread.id")) + step_id = Column(String, ForeignKey("step.id")) + name = Column(String) + mime = Column(String) + object_key = Column(String) + url = Column(String) + metadata_ = Column(JSON) + + +class SQLiteClient: + def __init__(self, database_url=None): + if database_url is None: + database_url = "sqlite+aiosqlite:///chainlit.db" + parsed_url = urlparse(database_url) + db_path = os.path.abspath(os.path.join(parsed_url.netloc, parsed_url.path)) + os.makedirs(os.path.dirname(db_path), exist_ok=True) + + self.engine = create_async_engine(database_url, echo=True) + self.api = API(self.engine) + asyncio.run(self.create_tables()) + + async def create_tables(self): + async with self.engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + +class API: + def __init__(self, engine): + self.async_sessionmaker = async_sessionmaker( + bind=engine, class_=AsyncSession, expire_on_commit=False + ) + + async def get_user(self, identifier): + async with self.async_sessionmaker() as session: + result = await session.execute( + select(User).filter_by(identifier=identifier) + ) + user = result.scalars().first() + if user: + return literalai.User( + id=user.id, + identifier=user.identifier, + metadata=user.metadata_, + created_at=user.created_at.isoformat(), + ) + return None + + async def create_user(self, identifier, metadata): + async with self.async_sessionmaker() as session: + new_user = User( + id=str(uuid.uuid4()), identifier=identifier, metadata_=metadata + ) + session.add(new_user) + await session.commit() + return literalai.User( + id=new_user.id, + identifier=new_user.identifier, + metadata=new_user.metadata_, + created_at=new_user.created_at.isoformat(), + ) + + async def update_user(self, id, metadata): + async with self.async_sessionmaker() as session: + user = await session.get(User, id) + if user: + user.metadata_ = metadata + await session.commit() + return literalai.User( + id=user.id, + identifier=user.identifier, + metadata=user.metadata_, + created_at=user.created_at.isoformat(), + ) + return None + + async def create_feedback(self, step_id, value, comment, strategy): + async with self.async_sessionmaker() as session: + new_feedback = Feedback( + id=str(uuid.uuid4()), + step_id=step_id, + value=value, + comment=comment, + strategy=strategy, + ) + session.add(new_feedback) + await session.commit() + return new_feedback + + async def update_feedback(self, id, update_params): + async with self.async_sessionmaker() as session: + feedback = await session.get(Feedback, id) + if feedback: + feedback.comment = update_params.comment + feedback.strategy = update_params.strategy + feedback.value = update_params.value + await session.commit() + return feedback + + async def get_attachment(self, id): + async with self.async_sessionmaker() as session: + return await session.get(Attachment, id) + + async def delete_attachment(self, id): + logger.info(f"###### delete_attachment id {id}") + async with self.async_sessionmaker() as session: + attachment = await session.get(Attachment, id) + if attachment: + # Delete the attachment if found + await session.delete(attachment) + await session.commit() + + async def upload_file(self, content, mime, thread_id): + return {"object_key": f"projects/todo/threads/{thread_id}/files/todo"} + + async def delete_step(self, id): + async with self.async_sessionmaker() as session: + step = await session.get(Step, id) + if step: + await session.delete(step) + await session.commit() + query = select(Feedback).filter_by(step_id=id) + result = await session.execute(query) + feedbacks = result.scalars().all() + for feedback in feedbacks: + await session.delete(feedback) + await session.commit() + query = select(Attachment).filter_by(step_id=id) + result = await session.execute(query) + attachments = result.scalars().all() + for attachment in attachments: + await self.delete_attachment(str(attachment.id)) + + async def send_steps(self, steps): + + def convert_to_datetime(value): + if value is None: + return None + return datetime.fromisoformat(value.rstrip("Z")) + + async with self.async_sessionmaker() as session: + for step_dict in steps: + step_id = step_dict.get("id") + if "attachments" in step_dict: + for attachment_dict in step_dict["attachments"]: + attachment = Attachment( + id=attachment_dict.get("id"), + step_id=step_id, + name=attachment_dict.get("name"), + mime=attachment_dict.get("mime"), + object_key=attachment_dict.get("objectKey"), + url=attachment_dict.get("url"), + metadata_=attachment_dict.get("metadata"), + ) + session.add(attachment) + else: + existing_step = await session.get(Step, step_id) + if existing_step: + existing_step.end_time = convert_to_datetime( + step_dict.get("endTime") + ) + existing_step.generation = step_dict.get("generation") + existing_step.name = step_dict.get("name") + existing_step.metadata_.update(step_dict.get("metadata")) + existing_step.input = step_dict.get("input") + existing_step.output = step_dict.get("output") + else: + step = Step( + created_at=convert_to_datetime(step_dict.get("createdAt")), + start_time=convert_to_datetime(step_dict.get("startTime")), + end_time=convert_to_datetime(step_dict.get("endTime")), + generation=step_dict.get("generation"), + id=step_dict.get("id"), + parent_id=step_dict.get("parentId"), + name=step_dict.get("name"), + thread_id=step_dict.get("threadId"), + type=step_dict.get("type"), + metadata_=step_dict.get("metadata"), + input=step_dict.get("input"), + output=step_dict.get("output"), + ) + session.add(step) + await session.commit() + + async def get_thread(self, id): + async with self.async_sessionmaker() as session: + stmt = select(Thread).filter_by(id=id) + result = await session.execute(stmt) + thread = result.scalars().first() + if thread is None: + return None + async with self.async_sessionmaker() as session: + stmt = select(Step).filter_by(thread_id=id) + result = await session.execute(stmt) + thread_steps = result.scalars().all() + + steps = [] + if thread_steps: + for step in thread_steps: + if config.ui.hide_cot and step.parent_id: + continue + if not config.features.prompt_playground and step.generation: + step.generation = None + elif step.generation is not None: + step.generation = literalai.BaseGeneration.from_dict( + step.generation + ) + async with self.async_sessionmaker() as session: + stmt = select(Attachment).filter(Attachment.step_id == step.id) + result = await session.execute(stmt) + attachments_ = [] + attachments = result.scalars().all() + for attachment in attachments: + attachments_.append(self.attachment_to_attachment(attachment)) + step.attachments = attachments_ + async with self.async_sessionmaker() as session: + stmt = select(Feedback).filter(Feedback.step_id == step.id) + result = await session.execute(stmt) + feedback = result.scalars().first() + steps.append(self.step_to_step(step, feedback)) + user = None + if thread.user_id: + async with self.async_sessionmaker() as session: + result = await session.execute( + select(User).filter_by(id=thread.user_id) + ) + persisted_user = result.scalars().first() + user = literalai.User( + id=thread.user_id or "", + identifier=persisted_user.identifier or "", + metadata=persisted_user.metadata_, + ) + + thread_ = literalai.Thread( + id=str(thread.id), + name=str(thread.name) or None, + steps=steps, + metadata=thread.metadata_, + user=user, + tags=thread.tags, + ) + thread_.created_at = thread.created_at.isoformat() + return thread_ + + async def delete_thread(self, id): + async with self.async_sessionmaker() as session: + thread = await session.get(Thread, id) + if thread: + await session.delete(thread) + await session.commit() + query = select(Step).filter_by(thread_id=id) + result = await session.execute(query) + steps = result.scalars().all() + for step in steps: + await self.delete_step(str(step.id)) + + async def list_threads(self, first, after, filters): + query = ( + select(Thread) + .join(User) + .filter_by(identifier=filters.participantsIdentifier.value[0]) + ) + if filters.search: + query = query.filter(Thread.name.ilike(f"%{filters.search.value}%")) + + if filters.feedbacksValue: + query = ( + query.join(Step) + .join(Feedback) + .filter_by(value=filters.feedbacksValue.value[0]) + ) + + if after: + query = query.offset(after) + + query = query.limit(first) + + async with self.async_sessionmaker() as session: + result = await session.execute(query) + threads = result.scalars().all() + + async def convert_thread_to_dict(thread): + user = None + if thread.user_id: + async with self.async_sessionmaker() as session: + result = await session.execute( + select(User).filter_by(id=thread.user_id) + ) + persisted_user = result.scalars().first() + user = { + "id": thread.user_id or "", + "identifier": persisted_user.identifier or "", + "metadata": persisted_user.metadata_, + } + return { + "createdAt": thread.created_at.isoformat() or "", + "id": thread.id, + "name": thread.name or None, + "metadata": thread.metadata_, + "user": user, + "tags": thread.tags, + } + + threads_data = [await convert_thread_to_dict(thread) for thread in threads] + return PaginatedResponse(data=threads_data, pageInfo=PageInfo.from_dict({})) + + async def upsert_thread(self, thread_id, name, participant_id, metadata, tags): + async with self.async_sessionmaker() as session: + stmt = select(Thread).filter_by(id=thread_id) + result = await session.execute(stmt) + thread = result.scalars().first() + if thread: + if name is not None: + thread.name = name + if participant_id is not None: + thread.user_id = participant_id + if metadata is not None: + if thread.metadata_: + thread.metadata_.update(metadata) + else: + thread.metadata_ = metadata + if tags is not None: + thread.tags = tags + else: + thread = Thread( + id=thread_id, + name=name, + user_id=participant_id, + metadata_=metadata or {}, + tags=tags, + ) + session.add(thread) + await session.commit() + + def step_to_step(self, step: Step, feedback: Feedback) -> "literalai.Step": + step_ = literalai.Step( + name=str(step.name), + type=cast(StepType, str(step.type)), + id=str(step.id), + thread_id=str(step.thread_id), + parent_id=str(step.parent_id), + ) + step_.metadata = dict(step.metadata_) if step.metadata_ else {} + step_.input = dict(step.input) if step.input else {} + step_.output = dict(step.output) if step.output else {} + step_.start_time = step.start_time.isoformat() if step.start_time else None + step_.end_time = step.end_time.isoformat() if step.end_time else None + step_.created_at = step.created_at.isoformat() if step.created_at else None + step_.generation = ( + cast(Optional[Union[ChatGeneration, CompletionGeneration]], step.generation) + if step.generation + else None + ) + step_.feedback = self.feedback_to_feedback(feedback) if feedback else None + return step_ + + def feedback_to_feedback(self, feedback: Feedback) -> "literalai.Feedback": + return literalai.Feedback( + id=str(feedback.id), + step_id=str(feedback.step_id), + value=float(feedback.value), + comment=str(feedback.comment) if feedback.comment else "", + strategy=cast(FeedbackStrategy, str(feedback.strategy)), + ) + + def attachment_to_attachment( + self, attachment: Attachment + ) -> "literalai.Attachment": + metadata = dict(attachment.metadata_) if attachment.metadata_ else {} + return literalai.Attachment( + thread_id=str(attachment.thread_id) if attachment.thread_id else "", + step_id=str(attachment.step_id), + metadata=dict(metadata) if metadata else {}, + object_key=str(attachment.object_key) if attachment.object_key else "", + mime=str(attachment.mime), + name=str(attachment.name), + url=str(attachment.url) if attachment.url else "", + id=str(attachment.id), + ) + + +class SQLiteDataLayer(ChainlitDataLayer): + def __init__(self, database_url=None): + super().__init__("dummy", "") + self.client = SQLiteClient(database_url) diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 7b02912a16..cb02fe065a 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -49,6 +49,7 @@ packaging = "^23.1" python-multipart = "^0.0.9" pyjwt = "^2.8.0" numpy = "^1.26" +aiosqlite = "^0.20.0" [tool.poetry.group.tests] optional = true diff --git a/cypress/e2e/data_layer_sqlite/.chainlit/config.toml b/cypress/e2e/data_layer_sqlite/.chainlit/config.toml new file mode 100644 index 0000000000..0c509af72c --- /dev/null +++ b/cypress/e2e/data_layer_sqlite/.chainlit/config.toml @@ -0,0 +1,62 @@ +[project] +# Whether to enable telemetry (default: true). No personal data is collected. +enable_telemetry = true + +# List of environment variables to be provided by each user to use the app. +user_env = [] + +# Duration (in seconds) during which the session is saved when the connection is lost +session_timeout = 3600 + +# Enable third parties caching (e.g LangChain cache) +cache = false + +# Follow symlink for asset mount (see https://github.com/Chainlit/chainlit/issues/317) +# follow_symlink = false + +[features] +# Show the prompt playground +prompt_playground = true + +[UI] +# Name of the app and chatbot. +name = "Chatbot" + +# Description of the app and chatbot. This is used for HTML tags. +# description = "" + +# Large size content are by default collapsed for a cleaner ui +default_collapse_content = true + +# The default value for the expand messages settings. +default_expand_messages = false + +# Hide the chain of thought details from the user in the UI. +hide_cot = false + +# Link to your github repo. This will add a github button in the UI's header. +# github = "" + +# Override default MUI light theme. (Check theme.ts) +[UI.theme.light] + #background = "#FAFAFA" + #paper = "#FFFFFF" + + [UI.theme.light.primary] + #main = "#F80061" + #dark = "#980039" + #light = "#FFE7EB" + +# Override default MUI dark theme. (Check theme.ts) +[UI.theme.dark] + #background = "#FAFAFA" + #paper = "#FFFFFF" + + [UI.theme.dark.primary] + #main = "#F80061" + #dark = "#980039" + #light = "#FFE7EB" + + +[meta] +generated_by = "0.6.402" diff --git a/cypress/e2e/data_layer_sqlite/main.py b/cypress/e2e/data_layer_sqlite/main.py new file mode 100644 index 0000000000..4a9288b6fd --- /dev/null +++ b/cypress/e2e/data_layer_sqlite/main.py @@ -0,0 +1,28 @@ +from typing import List, Optional + +import chainlit.data as cl_data +from chainlit.data.sqlite import SQLiteDataLayer +from literalai.helper import utc_now + +import chainlit as cl + +cl_data._data_layer = SQLiteDataLayer() + + +@cl.on_chat_start +async def main(): + await cl.Message("Hello, send me a message!", disable_feedback=True).send() + + +@cl.on_message +async def handle_message(): + await cl.sleep(2) + await cl.Message("Ok!").send() + + +@cl.password_auth_callback +def auth_callback(username: str, password: str) -> Optional[cl.User]: + if (username, password) == ("admin", "admin"): + return cl.User(identifier="admin") + else: + return None diff --git a/cypress/e2e/data_layer_sqlite/spec.cy.ts b/cypress/e2e/data_layer_sqlite/spec.cy.ts new file mode 100644 index 0000000000..c299762775 --- /dev/null +++ b/cypress/e2e/data_layer_sqlite/spec.cy.ts @@ -0,0 +1,37 @@ +import { runTestServer, submitMessage } from '../../support/testUtils'; + +function login() { + cy.get("[id='email']").type('admin'); + cy.get("[id='password']").type('admin{enter}'); +} + +function feedback() { + submitMessage('Hello'); + cy.get('.negative-feedback-off').should('have.length', 1); + cy.get('.positive-feedback-off').should('have.length', 1).click(); + cy.get('#feedbackSubmit').click(); + cy.get('.positive-feedback-on').should('have.length', 1); +} + +describe('Data Layer with Sqlite', () => { + before(() => { + runTestServer(); + }); + + describe('Data Features with Sqlite file persistence', () => { + it('should login, submit feedback, wait for user input to create steps, browse thread history, delete a thread and then resume a thread', () => { + login(); + feedback(); + }); + }); +}); + +describe('DB file existence check', () => { + it('should check if db file was created', () => { + const filePath = 'cypress/e2e/data_layer_sqlite/chainlit.db'; + + cy.readFile(filePath).then((content) => { + expect(content).to.exist; + }); + }); +}); From 42989dc5c43d452fe0ca0dbb834f8e8234b23036 Mon Sep 17 00:00:00 2001 From: Quy Tang <3761730+qtangs@users.noreply.github.com> Date: Sun, 22 Sep 2024 11:22:48 +0800 Subject: [PATCH 02/10] feat: add new SQLAlchemyORM data layer to support sqlite along with postgres and others --- backend/chainlit/data/sql_alchemy_orm.py | 908 +++++++++++++++++++++++ 1 file changed, 908 insertions(+) create mode 100644 backend/chainlit/data/sql_alchemy_orm.py diff --git a/backend/chainlit/data/sql_alchemy_orm.py b/backend/chainlit/data/sql_alchemy_orm.py new file mode 100644 index 0000000000..7b049d472e --- /dev/null +++ b/backend/chainlit/data/sql_alchemy_orm.py @@ -0,0 +1,908 @@ +import datetime +import json +import logging +import os +import ssl +import uuid +from typing import Any, Callable, Dict, List, Optional, Union, cast + +import aiofiles +import aiohttp +import sqlalchemy +from chainlit.data import queue_until_user_message +from chainlit.data.base import BaseDataLayer, BaseStorageClient +from chainlit.element import Element, ElementDict +from chainlit.step import StepDict +from chainlit.types import ( + Feedback, + FeedbackDict, + PageInfo, + PaginatedResponse, + Pagination, + ThreadDict, + ThreadFilter, +) +from chainlit.user import PersistedUser, User +from pydantic.dataclasses import Field, dataclass +from sqlalchemy import ( + ARRAY, + JSON, + UUID, + Boolean, + Column, + ForeignKey, + Index, + Integer, + MetaData, + SelectBase, + String, + Table, + TextClause, + UpdateBase, + asc, + desc, + insert, + or_, + select, + update, +) +from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker +from sqlalchemy.sql.type_api import TypeEngine + +logger = logging.getLogger("sqlalchemydatalayer") + + +@dataclass +class SQLDialectSettings: + type_replacements: Dict[str, str] = Field( + description="A dictionary of SQLAlchemy types (as string) " + "to replace some special types such as ARRAY or JSON", + default_factory=dict, + ) + + +# Use SQL_ALCHEMY_DIALECT_SETTINGS from environment variable if set +# else use the default here +SQL_ALCHEMY_DIALECT_SETTINGS: Dict[str, Dict] = ( + json.loads(os.environ["SQL_ALCHEMY_DIALECT_SETTINGS"]) + if "SQL_ALCHEMY_DIALECT_SETTINGS" in os.environ + else { + "mysql": { + "type_replacements": {"ARRAY": "JSON"}, + }, + "postgresql": { + "type_replacements": {"JSON": "JSONB"}, + }, + "sqlite": { + "type_replacements": {"ARRAY": "JSON"}, + }, + } +) + + +# Helper function to convert string such as "ARRAY", "JSON" to SQLAlchemy type +def get_sqlalchemy_type( + dialect_name: str, + type_replacements: Dict[str, str], + type_name: str, + default: TypeEngine[Any], +) -> TypeEngine: + type_class_name = type_replacements.get(type_name) + + if not type_class_name: + return default + + if hasattr(sqlalchemy, type_class_name): + return getattr(sqlalchemy, type_class_name) + + if hasattr(sqlalchemy.dialects, dialect_name): + dialect_class = getattr(sqlalchemy.dialects, dialect_name) + if hasattr(dialect_class, type_class_name): + return getattr(dialect_class, type_class_name) + + return default + + +class SQLAlchemyORMDataLayer(BaseDataLayer): + """ + A Chainlit Data Layer that integrates with SQLAlchemy to manage database operations. + This uses SQLAlchemy's ORM (Object-Relational Mapping) to interact with the database instead of writing raw SQL queries. + + By using ORM, it can potentially support multiple dialects including PostgreSQL, MySQL, SQLite, Oracle, and MSSQL, as well as many others listed here: https://docs.sqlalchemy.org/en/20/dialects/#support-levels-for-included-dialects + """ # noqa: E501 + + dialect_name: str + metadata: MetaData + users: Table + threads: Table + steps: Table + elements: Table + feedbacks: Table + + def __init__( + self, + url: str, + connect_args: Optional[Dict] = None, + ssl_require: bool = False, + storage_provider: Optional[BaseStorageClient] = None, + log_level: str = "INFO", + ): + """ + Create a new Chainlit Data Layer using SQLAlchemy. + + Supported dialects: https://docs.sqlalchemy.org/en/20/dialects/#support-levels-for-included-dialects + + :param url: Database connection string. + :param ssl_require: Whether SSL is required for the connection. + :param storage_provider: Optional storage client for file-based elements. + :param log_level: Log level for this class. Use DEBUG to see SQLs generated by the engine. + """ # noqa: E501 + logger.setLevel(log_level) + logger.info("SQLAlchemyORM: __init__") + + connect_args = connect_args or {} + if ssl_require: + # Create an SSL context to require an SSL connection + ssl_context = ssl.create_default_context() + ssl_context.check_hostname = False + ssl_context.verify_mode = ssl.CERT_NONE + connect_args["ssl"] = ssl_context + self.engine: AsyncEngine = create_async_engine( + url, + connect_args=connect_args, + echo=log_level == "DEBUG", # Enable SQL logging + ) + self.async_session = sessionmaker( # type: ignore + bind=self.engine, expire_on_commit=False, class_=AsyncSession + ) + if storage_provider: + self.storage_provider: Optional[BaseStorageClient] = storage_provider + logger.info("SQLAlchemyDataLayer storage client initialized") + else: + self.storage_provider = None + logger.warning( + "SQLAlchemyDataLayer storage client is not initialized and " + "some elements will not be persisted!" + ) + + self.init_objects() + + def init_objects(self): + logger.info(f"SQLAlchemyORM: Initializing objects") + + self.dialect_name = self.engine.dialect.name + + # Note: Default settings is applied for dialect not in the dictionary + dialect_settings: SQLDialectSettings = self.get_dialect_settings( + self.dialect_name + ) + + self.metadata = MetaData() + + self.users = Table( + "users", + self.metadata, + Column("id", UUID(as_uuid=False), primary_key=True), + Column("identifier", String, unique=True, nullable=False), + Column( + "metadata", + get_sqlalchemy_type( + self.dialect_name, + dialect_settings.type_replacements, + "JSON", + default=JSON, # type: ignore + ), + nullable=False, + ), + Column("createdAt", String), + Index("ix_users_identifier", "identifier", unique=True), + ) + + self.threads = Table( + "threads", + self.metadata, + Column("id", UUID(as_uuid=False), primary_key=True), + Column("createdAt", String), + Column("updatedAt", String), + Column("name", String), + Column( + "userId", + UUID(as_uuid=False), + ForeignKey("users.id", ondelete="CASCADE"), + ), + Column("userIdentifier", String), + Column( + "tags", + get_sqlalchemy_type( + self.dialect_name, + dialect_settings.type_replacements, + "ARRAY", + default=ARRAY(String), + ), + ), + Column( + "metadata", + get_sqlalchemy_type( + self.dialect_name, + dialect_settings.type_replacements, + "JSON", + default=JSON, # type: ignore + ), + ), + Index("ix_threads_userId", "userId"), + ) + + self.steps = Table( + "steps", + self.metadata, + Column("id", UUID(as_uuid=False), primary_key=True), + Column("name", String, nullable=False), + Column("type", String, nullable=False), + Column("threadId", UUID(as_uuid=False), nullable=False), + Column("parentId", UUID(as_uuid=False)), + Column("streaming", Boolean, nullable=False), + Column("waitForAnswer", Boolean), + Column("isError", Boolean), + Column( + "metadata", + get_sqlalchemy_type( + self.dialect_name, + dialect_settings.type_replacements, + "JSON", + default=JSON, # type: ignore + ), + ), + Column( + "tags", + get_sqlalchemy_type( + self.dialect_name, + dialect_settings.type_replacements, + "ARRAY", + default=ARRAY(String), + ), + ), + Column("input", String), + Column("output", String), + Column("createdAt", String), + Column("start", String), + Column("end", String), + Column( + "generation", + get_sqlalchemy_type( + self.dialect_name, + dialect_settings.type_replacements, + "JSON", + default=JSON, # type: ignore + ), + ), + Column("showInput", String), + Column("language", String), + Column("indent", Integer), + Index("ix_steps_threadId", "threadId"), + ) + + self.elements = Table( + "elements", + self.metadata, + Column("id", UUID(as_uuid=False), primary_key=True), + Column("forId", UUID(as_uuid=False), nullable=False), + Column("threadId", UUID(as_uuid=False), nullable=False), + Column("type", String), + Column("url", String), + Column("chainlitKey", String), + Column("name", String, nullable=False), + Column("display", String), + Column("objectKey", String), + Column("size", String), + Column("page", Integer), + Column("autoPlay", Boolean), + Column( + "playerConfig", + get_sqlalchemy_type( + self.dialect_name, + dialect_settings.type_replacements, + "JSON", + default=JSON, # type: ignore + ), + ), + Column("language", String), + Column("mime", String), + Column("createdAt", String), + Index("ix_elements_threadId", "threadId"), + ) + + self.feedbacks = Table( + "feedbacks", + self.metadata, + Column("id", UUID(as_uuid=False), primary_key=True), + Column("forId", UUID(as_uuid=False), nullable=False), + Column("threadId", UUID(as_uuid=False), nullable=False), + Column("value", Integer, nullable=False), + Column("comment", String), + Index("ix_feedbacks_forId", "forId"), + Index("ix_feedbacks_threadId", "threadId"), + ) + + @classmethod + def get_dialect_settings( + cls, + dialect: str, + ) -> SQLDialectSettings: + return SQLDialectSettings( # type: ignore + **SQL_ALCHEMY_DIALECT_SETTINGS.get(dialect, {}) + ) + + async def create_objects(self, metadata_override: Optional[MetaData] = None): + """Create all tables and indices if not exists""" + logger.info(f"SQLAlchemyORM: create_objects") + try: + async with self.engine.begin() as conn: + # create all tables + metadata = metadata_override or self.metadata + await conn.run_sync(metadata.create_all) + await conn.commit() + except Exception as e: + if conn: + await conn.rollback() + logger.error("SQLAlchemyORM: create_objects, " f"error: {e}") + raise e + + async def build_debug_url(self) -> str: + return "" + + # --- SQL Helpers --- # + async def execute_stmt( + self, stmt: Union[SelectBase, UpdateBase, TextClause] + ) -> Union[List[Dict[str, Any]], int, None]: + results = await self.execute_stmts([stmt]) + + return results[0] if results else None + + async def execute_stmts( + self, stmts: List[Union[SelectBase, UpdateBase, TextClause]] + ) -> Union[List[Union[List[Dict[str, Any]], int]], None]: + async with self.async_session() as session: + try: + await session.begin() + results = [await session.execute(_) for _ in stmts] + await session.commit() + return [ + ( + [dict(row._mapping) for row in result.fetchall()] + if result.returns_rows + else result.rowcount + ) + for result in results + ] + except Exception as e: + await session.rollback() + logger.warning(f"SQLAlchemyORM: execute_stmts error: {e}") + return None + + async def get_current_timestamp(self) -> str: + return datetime.datetime.now(datetime.timezone.utc).isoformat() + + async def upsert_record( + self, + table: Table, + record_id: str, + data: Union[ThreadDict, StepDict, FeedbackDict, Dict], + exclude_none: bool = True, + exclude_empty_dict: bool = True, + ): + if exclude_none: + # Remove keys with None value + data = {key: value for key, value in data.items() if value is not None} + if exclude_empty_dict: + # Remove keys with empty dictionary + data = { + key: value + for key, value in data.items() + if not (isinstance(value, dict) and not value) + } + + insert: Callable + + update_data = {k: v for k, v in data.items() if k != "id"} + if self.dialect_name in ["postgresql", "sqlite"]: + # Insert with ON CONFLICT DO UPDATE + if self.dialect_name == "postgresql": + from sqlalchemy.dialects.postgresql import insert + else: + from sqlalchemy.dialects.sqlite import insert + + stmt = ( + insert(table) + .values(**data) + .on_conflict_do_update( + index_elements=["id"], + set_=update_data, + ) + ) + + await self.execute_stmt(stmt) + + else: + from sqlalchemy import insert + + # Attempt to update and check if it affects any record + update_stmt = ( + update(table).where(table.c.id == record_id).values(**update_data) + ) + + affected_row_count = await self.execute_stmt(update_stmt) + if affected_row_count == 0: + # if no record is updated, then insert + stmt = insert(self.threads).values(**data) + + await self.execute_stmt(stmt) + + # --- User --- # + async def get_user(self, identifier: str) -> Optional[PersistedUser]: + logger.info(f"SQLAlchemyORM: get_user, identifier={identifier}") + + stmt = select(self.users).where(self.users.c.identifier == identifier) + + result = await self.execute_stmt(stmt) + + if result and isinstance(result, list): + user_data = result[0] + return PersistedUser(**user_data) + + return None + + async def _get_user_identifer_by_id(self, user_id: str) -> str: + logger.info(f"SQLAlchemyORM: _get_user_identifer_by_id, user_id={user_id}") + stmt = select(self.users.c.identifier).where(self.users.c.id == user_id) + + result = await self.execute_stmt(stmt) + + assert result + assert isinstance(result, list) + + return result[0]["identifier"] + + async def _get_user_id_by_thread(self, thread_id: str) -> Optional[str]: + logger.info(f"SQLAlchemyORM: _get_user_id_by_thread, thread_id={thread_id}") + stmt = select(self.threads.c.userId).where(self.threads.c.id == thread_id) + + result = await self.execute_stmt(stmt) + + if result: + assert isinstance(result, list) + return result[0]["userId"] + + return None + + async def create_user(self, user: User) -> Optional[PersistedUser]: + logger.info(f"SQLAlchemyORM: create_user, user_identifier={user.identifier}") + + existing_user: Optional["PersistedUser"] = await self.get_user(user.identifier) + + user_dict: Dict[str, Any] = { + "identifier": str(user.identifier), + "metadata": user.metadata or {}, + } + + if not existing_user: # create the user + logger.info("SQLAlchemyORM: create_user, creating the user") + user_dict["id"] = str(uuid.uuid4()) + user_dict["createdAt"] = await self.get_current_timestamp() + + insert_stmt = insert(self.users).values(**user_dict) + + await self.execute_stmt(insert_stmt) + + return await self.get_user(user.identifier) + else: # update the user + logger.info("SQLAlchemyORM: update user metadata") + + existing_user.metadata.update(user_dict["metadata"]) + + update_stmt = ( + update(self.users) + .where(self.users.c.identifier == user.identifier) + .values(metadata=user_dict["metadata"]) + ) + + await self.execute_stmt(update_stmt) + + return existing_user + + # --- Threads --- # + async def get_thread_author(self, thread_id: str) -> str: + logger.info(f"SQLAlchemyORM: get_thread_author, thread_id={thread_id}") + + stmt = select(self.threads.c.userIdentifier).where( + self.threads.c.id == thread_id + ) + result = await self.execute_stmt(stmt) + + if isinstance(result, list) and result: + author_identifier = result[0].get("userIdentifier") + if author_identifier is not None: + return author_identifier + + raise ValueError(f"Author not found for thread_id {thread_id}") + + async def get_thread(self, thread_id: str) -> Optional[ThreadDict]: + logger.info(f"SQLAlchemyORM: get_thread, thread_id={thread_id}") + + # Aliases for tables + threads: Table = self.threads + steps: Table = self.steps + feedbacks: Table = self.feedbacks + elements: Table = self.elements + + # Select thread and associated steps and feedbacks + # Get all available columns and treat conflicting names later + thread_steps_stmt = ( + select(threads, steps, feedbacks) + .select_from( + threads.outerjoin(steps, steps.c.threadId == threads.c.id).outerjoin( + feedbacks, steps.c.id == feedbacks.c.forId + ) + ) + .where(threads.c.id == thread_id) + .order_by(asc(steps.c.createdAt)) + ) + + thread_steps = await self.execute_stmt(thread_steps_stmt) + + if not isinstance(thread_steps, list) or not thread_steps: + return None + + steps_dicts = [ + cast( + StepDict, + { + **{ + **{ + column.name: step.get(column.name) + for column in steps.c + if column.name in step + }, + "id": step["id_1"], + "name": step["name_1"], + "metadata": step["metadata_1"], + "tags": step["tags_1"], + "createdAt": step["createdAt_1"], + "feedback": cast( + FeedbackDict, + { + **{ + **{ + column.name: step.get(column.name) + for column in feedbacks.c + if column.name in step + }, + "id": step["id_2"], + "threadId": step["threadId_1"], + } + }, + ), + } + }, + ) + for step in thread_steps + if step["threadId"] + ] + + # Select associated elements + elements_stmt = ( + select(elements) + .where(elements.c.threadId == thread_id) + .order_by(elements.c.createdAt) + ) + + elements_result = await self.execute_stmt(elements_stmt) + elements_dicts = ( + [ + cast( + ElementDict, + { + **{ + column.name: element.get(column.name) + for column in elements.c + if column.name in element + } + }, + ) + for element in elements_result + ] + if isinstance(elements_result, list) + else None + ) + + return cast( + ThreadDict, + { + **{ + column.name: thread_steps[0].get(column.name) + for column in threads.c + if column.name in thread_steps[0] + }, + "steps": steps_dicts, + "elements": elements_dicts, + }, + ) + + async def update_thread( + self, + thread_id: str, + name: Optional[str] = None, + user_id: Optional[str] = None, + metadata: Optional[Dict] = None, + tags: Optional[List[str]] = None, + ): + logger.info(f"SQLAlchemyORM: update_thread, thread_id={thread_id}") + + user_identifier = None + if user_id: + user_identifier = await self._get_user_identifer_by_id(user_id) + + data = cast( + ThreadDict, + { + "id": thread_id, + "createdAt": ( + await self.get_current_timestamp() if metadata is None else None + ), + "updatedAt": await self.get_current_timestamp(), + "name": ( + metadata.get("thread_name") # Allow setting name via metadata + if metadata is not None + else name + ), + "userId": user_id, + "userIdentifier": user_identifier, + "tags": tags, + "metadata": ( + {k: v for k, v in metadata.items() if v is not None} + if metadata # Remove keys with None values + else None + ), + }, + ) + + await self.upsert_record(self.threads, thread_id, data) + + async def delete_thread(self, thread_id: str): + logger.info(f"SQLAlchemyORM: delete_thread, thread_id={thread_id}") + # Delete feedbacks/elements/steps/thread + feedbacks_stmt = self.feedbacks.delete().where( + self.feedbacks.c.threadId == thread_id + ) + elements_stmt = self.elements.delete().where( + self.elements.c.threadId == thread_id + ) + steps_stmt = self.steps.delete().where(self.steps.c.threadId == thread_id) + thread_stmt = self.threads.delete().where(self.threads.c.id == thread_id) + + await self.execute_stmts( + [feedbacks_stmt, elements_stmt, steps_stmt, thread_stmt] + ) + + async def list_threads( + self, pagination: Pagination, filters: ThreadFilter + ) -> PaginatedResponse: + logger.info( + "SQLAlchemyORM: " + f"list_threads, pagination={pagination}, filters={filters}" + ) + if not filters.userId: + raise ValueError("userId is required") + + threads = self.threads + steps = self.steps + feedbacks = self.feedbacks + + stmt = ( + select(threads) + .distinct() + .where(threads.c.userId == filters.userId) + .order_by(desc(threads.c.updatedAt)) + # Get 1 more than requested to determine if there's a next page + .limit(pagination.first + 1) + ) + + if filters.search: + # Join steps to filter on step's input and output + stmt = stmt.outerjoin(steps, threads.c.id == steps.c.threadId).where( + or_( + threads.c.name.ilike(f"%{filters.search}%"), + steps.c.input.ilike(f"%{filters.search}%"), + steps.c.output.ilike(f"%{filters.search}%"), + ) + ) + + if filters.feedback is not None: + stmt = stmt.join(feedbacks, threads.c.id == feedbacks.c.threadId).where( + feedbacks.c.value == int(filters.feedback) + ) + + if pagination.cursor: + stmt = stmt.where(threads.c.createdAt < pagination.cursor) + + logger.info(f"SQLAlchemyORM: filtering threads: {stmt.compile()}") + + filtered_threads = await self.execute_stmt(stmt) + + if not isinstance(filtered_threads, list): + return PaginatedResponse( + data=[], + pageInfo=PageInfo(hasNextPage=False, startCursor=None, endCursor=None), + ) + + has_next_page = len(filtered_threads) > pagination.first + start_cursor = filtered_threads[0]["createdAt"] if filtered_threads else None + end_cursor = ( + filtered_threads[-2 if has_next_page else -1]["createdAt"] + if filtered_threads + else None + ) + + return PaginatedResponse( + data=[ + ThreadDict( + id=thread["id"], + createdAt=thread["createdAt"], + name=thread["name"], + userId=thread["userId"], + userIdentifier=thread["userIdentifier"], + tags=thread["tags"], + metadata=thread["metadata"], + steps=[], + elements=[], + ) + for thread in filtered_threads[: -1 if has_next_page else None] + ], + pageInfo=PageInfo( + hasNextPage=has_next_page, + startCursor=start_cursor, + endCursor=end_cursor, + ), + ) + + # --- Steps --- # + @queue_until_user_message() + async def create_step(self, step_dict: "StepDict"): + step_id = step_dict["id"] + logger.info(f"SQLAlchemyORM: create_step, step_id={step_id}") + + await self.upsert_record(self.steps, step_id, step_dict) + + @queue_until_user_message() + async def update_step(self, step_dict: "StepDict"): + step_id = step_dict["id"] + logger.info(f"SQLAlchemyORM: update_step, step_id={step_id}") + + await self.upsert_record(self.steps, step_id, step_dict) + + @queue_until_user_message() + async def delete_step(self, step_id: str): + logger.info(f"SQLAlchemyORM: delete_step, step_id={step_id}") + + feedbacks_stmt = self.feedbacks.delete().where( + self.feedbacks.c.forId == step_id + ) + elements_stmt = self.elements.delete().where(self.elements.c.forId == step_id) + steps_stmt = self.steps.delete().where(self.steps.c.id == step_id) + + await self.execute_stmts([feedbacks_stmt, elements_stmt, steps_stmt]) + + # --- Feedback --- # + async def upsert_feedback(self, feedback: Feedback) -> str: + logger.info(f"SQLAlchemyORM: upsert_feedback, feedback_id={feedback.id}") + feedback.id = feedback.id or str(uuid.uuid4()) + feedback_dict = feedback.__dict__ + + await self.upsert_record(self.feedbacks, feedback.id, feedback_dict) + + return feedback.id + + async def delete_feedback(self, feedback_id: str) -> bool: + logger.info(f"SQLAlchemyORM: delete_feedback, feedback_id={feedback_id}") + stmt = self.feedbacks.delete().where(self.feedbacks.c.id == feedback_id) + result = await self.execute_stmt(stmt) + return result is not None + + # --- Elements --- # + async def get_element( + self, thread_id: str, element_id: str + ) -> Optional["ElementDict"]: + logger.info( + f"SQLAlchemyORM: get_element, thread_id={thread_id}, " + f"element_id={element_id}" + ) + + stmt = select(self.elements).where( + self.elements.c.threadId == thread_id, + self.elements.c.id == element_id, + ) + + result = await self.execute_stmt(stmt) + + if isinstance(result, list): + return cast(ElementDict, result[0]) + else: + return None + + @queue_until_user_message() + async def create_element(self, element: Element): + logger.info(f"SQLAlchemyORM: create_element, element_id = {element.id}") + if not self.storage_provider and not element.url: + logger.warning( + "SQLAlchemyORM: create_element error. " + "No blob_storage_client is configured!" + ) + return + if not element.for_id: + return + + url = element.url + object_key = element.url + + content: Optional[Union[bytes, str]] = None + + if element.path: + async with aiofiles.open(element.path, "rb") as f: + content = await f.read() + elif element.url: + async with aiohttp.ClientSession() as session: + async with session.get(element.url) as response: + if response.status == 200: + content = await response.read() + else: + content = None + elif element.content: + content = element.content + else: + raise ValueError("Element url, path or content must be provided") + + if content and self.storage_provider: + user_id: str = ( + await self._get_user_id_by_thread(element.thread_id) or "unknown" + ) + file_object_key = f"{user_id}/{element.id}" + ( + f"/{element.name}" if element.name else "" + ) + + if not element.mime: + element.mime = "application/octet-stream" + + uploaded_file = await self.storage_provider.upload_file( + object_key=file_object_key, + data=content, + mime=element.mime, + overwrite=True, + ) + if not uploaded_file: + raise ValueError( + "SQLAlchemyORM Error: create_element, " + "Failed to persist data in storage_provider" + ) + url = uploaded_file.get("url") + object_key = uploaded_file.get("object_key") + + element_dict: ElementDict = element.to_dict() + + element_dict["url"] = url + element_dict["objectKey"] = object_key + element_dict_cleaned = { + k: json.dumps(v) if isinstance(v, dict) else v + for k, v in element_dict.items() + if v is not None + } + # Add custom field + element_dict_cleaned["createdAt"] = await self.get_current_timestamp() + + stmt = insert(self.elements).values(element_dict_cleaned) + await self.execute_stmt(stmt) + + @queue_until_user_message() + async def delete_element(self, element_id: str, thread_id: Optional[str] = None): + logger.info(f"SQLAlchemyORM: delete_element, element_id={element_id}") + stmt = self.elements.delete().where(self.elements.c.id == element_id) + await self.execute_stmt(stmt) From b0e77f98817b97e3d0243f4c597f4c35c31ad49e Mon Sep 17 00:00:00 2001 From: Quy Tang <3761730+qtangs@users.noreply.github.com> Date: Sun, 22 Sep 2024 11:23:51 +0800 Subject: [PATCH 03/10] fix: remove SQLiteDataLayer, use SQLAlchemyORMDataLayer instead --- backend/chainlit/data/sqlite.py | 459 -------------------------------- 1 file changed, 459 deletions(-) delete mode 100644 backend/chainlit/data/sqlite.py diff --git a/backend/chainlit/data/sqlite.py b/backend/chainlit/data/sqlite.py deleted file mode 100644 index 70bcf0dc0e..0000000000 --- a/backend/chainlit/data/sqlite.py +++ /dev/null @@ -1,459 +0,0 @@ -import asyncio -import os -import uuid -from datetime import datetime -from typing import Optional, Union, cast -from urllib.parse import urlparse - -import literalai -from chainlit.data import ChainlitDataLayer -from literalai import ( - ChatGeneration, - CompletionGeneration, - FeedbackStrategy, - PageInfo, - PaginatedResponse, -) -from literalai.step import StepType -from sqlalchemy import JSON, Column, DateTime, ForeignKey, Integer, String -from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine -from sqlalchemy.future import select -from sqlalchemy.orm import DeclarativeBase - -from chainlit import config - - -class Base(DeclarativeBase): - pass - - -class User(Base): - __tablename__ = "user" - id = Column(String, primary_key=True, index=True, default=lambda: str(uuid.uuid4())) - identifier = Column(String, unique=True, index=True) - created_at = Column(DateTime, default=datetime.utcnow) - metadata_ = Column(JSON) - - -class Thread(Base): - __tablename__ = "thread" - id = Column(String, primary_key=True, index=True, default=lambda: str(uuid.uuid4())) - name = Column(String, nullable=True) - user_id = Column(Integer, ForeignKey("user.id"), nullable=True) - created_at = Column(DateTime, default=datetime.utcnow) - metadata_ = Column(JSON, nullable=True) - tags = Column(JSON, nullable=True) - - -class Step(Base): - __tablename__ = "step" - id = Column(String, primary_key=True, index=True, default=lambda: str(uuid.uuid4())) - thread_id = Column(String, ForeignKey("thread.id")) - parent_id = Column(String, ForeignKey("step.id")) - created_at = Column(DateTime, default=datetime.utcnow) - start_time = Column(DateTime) - end_time = Column(DateTime, nullable=True) - name = Column(String) - type = Column(String) - input = Column(JSON) - output = Column(JSON) - generation = Column(JSON) - metadata_ = Column(JSON) - - -class Feedback(Base): - __tablename__ = "feedback" - id = Column(String, primary_key=True, index=True, default=lambda: str(uuid.uuid4())) - step_id = Column(String, ForeignKey("step.id")) - value = Column(Integer) - comment = Column(String) - strategy = Column(String) - - -class Attachment(Base): - __tablename__ = "attachment" - id = Column(String, primary_key=True, index=True, default=lambda: str(uuid.uuid4())) - thread_id = Column(String, ForeignKey("thread.id")) - step_id = Column(String, ForeignKey("step.id")) - name = Column(String) - mime = Column(String) - object_key = Column(String) - url = Column(String) - metadata_ = Column(JSON) - - -class SQLiteClient: - def __init__(self, database_url=None): - if database_url is None: - database_url = "sqlite+aiosqlite:///chainlit.db" - parsed_url = urlparse(database_url) - db_path = os.path.abspath(os.path.join(parsed_url.netloc, parsed_url.path)) - os.makedirs(os.path.dirname(db_path), exist_ok=True) - - self.engine = create_async_engine(database_url, echo=True) - self.api = API(self.engine) - asyncio.run(self.create_tables()) - - async def create_tables(self): - async with self.engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - -class API: - def __init__(self, engine): - self.async_sessionmaker = async_sessionmaker( - bind=engine, class_=AsyncSession, expire_on_commit=False - ) - - async def get_user(self, identifier): - async with self.async_sessionmaker() as session: - result = await session.execute( - select(User).filter_by(identifier=identifier) - ) - user = result.scalars().first() - if user: - return literalai.User( - id=user.id, - identifier=user.identifier, - metadata=user.metadata_, - created_at=user.created_at.isoformat(), - ) - return None - - async def create_user(self, identifier, metadata): - async with self.async_sessionmaker() as session: - new_user = User( - id=str(uuid.uuid4()), identifier=identifier, metadata_=metadata - ) - session.add(new_user) - await session.commit() - return literalai.User( - id=new_user.id, - identifier=new_user.identifier, - metadata=new_user.metadata_, - created_at=new_user.created_at.isoformat(), - ) - - async def update_user(self, id, metadata): - async with self.async_sessionmaker() as session: - user = await session.get(User, id) - if user: - user.metadata_ = metadata - await session.commit() - return literalai.User( - id=user.id, - identifier=user.identifier, - metadata=user.metadata_, - created_at=user.created_at.isoformat(), - ) - return None - - async def create_feedback(self, step_id, value, comment, strategy): - async with self.async_sessionmaker() as session: - new_feedback = Feedback( - id=str(uuid.uuid4()), - step_id=step_id, - value=value, - comment=comment, - strategy=strategy, - ) - session.add(new_feedback) - await session.commit() - return new_feedback - - async def update_feedback(self, id, update_params): - async with self.async_sessionmaker() as session: - feedback = await session.get(Feedback, id) - if feedback: - feedback.comment = update_params.comment - feedback.strategy = update_params.strategy - feedback.value = update_params.value - await session.commit() - return feedback - - async def get_attachment(self, id): - async with self.async_sessionmaker() as session: - return await session.get(Attachment, id) - - async def delete_attachment(self, id): - logger.info(f"###### delete_attachment id {id}") - async with self.async_sessionmaker() as session: - attachment = await session.get(Attachment, id) - if attachment: - # Delete the attachment if found - await session.delete(attachment) - await session.commit() - - async def upload_file(self, content, mime, thread_id): - return {"object_key": f"projects/todo/threads/{thread_id}/files/todo"} - - async def delete_step(self, id): - async with self.async_sessionmaker() as session: - step = await session.get(Step, id) - if step: - await session.delete(step) - await session.commit() - query = select(Feedback).filter_by(step_id=id) - result = await session.execute(query) - feedbacks = result.scalars().all() - for feedback in feedbacks: - await session.delete(feedback) - await session.commit() - query = select(Attachment).filter_by(step_id=id) - result = await session.execute(query) - attachments = result.scalars().all() - for attachment in attachments: - await self.delete_attachment(str(attachment.id)) - - async def send_steps(self, steps): - - def convert_to_datetime(value): - if value is None: - return None - return datetime.fromisoformat(value.rstrip("Z")) - - async with self.async_sessionmaker() as session: - for step_dict in steps: - step_id = step_dict.get("id") - if "attachments" in step_dict: - for attachment_dict in step_dict["attachments"]: - attachment = Attachment( - id=attachment_dict.get("id"), - step_id=step_id, - name=attachment_dict.get("name"), - mime=attachment_dict.get("mime"), - object_key=attachment_dict.get("objectKey"), - url=attachment_dict.get("url"), - metadata_=attachment_dict.get("metadata"), - ) - session.add(attachment) - else: - existing_step = await session.get(Step, step_id) - if existing_step: - existing_step.end_time = convert_to_datetime( - step_dict.get("endTime") - ) - existing_step.generation = step_dict.get("generation") - existing_step.name = step_dict.get("name") - existing_step.metadata_.update(step_dict.get("metadata")) - existing_step.input = step_dict.get("input") - existing_step.output = step_dict.get("output") - else: - step = Step( - created_at=convert_to_datetime(step_dict.get("createdAt")), - start_time=convert_to_datetime(step_dict.get("startTime")), - end_time=convert_to_datetime(step_dict.get("endTime")), - generation=step_dict.get("generation"), - id=step_dict.get("id"), - parent_id=step_dict.get("parentId"), - name=step_dict.get("name"), - thread_id=step_dict.get("threadId"), - type=step_dict.get("type"), - metadata_=step_dict.get("metadata"), - input=step_dict.get("input"), - output=step_dict.get("output"), - ) - session.add(step) - await session.commit() - - async def get_thread(self, id): - async with self.async_sessionmaker() as session: - stmt = select(Thread).filter_by(id=id) - result = await session.execute(stmt) - thread = result.scalars().first() - if thread is None: - return None - async with self.async_sessionmaker() as session: - stmt = select(Step).filter_by(thread_id=id) - result = await session.execute(stmt) - thread_steps = result.scalars().all() - - steps = [] - if thread_steps: - for step in thread_steps: - if config.ui.hide_cot and step.parent_id: - continue - if not config.features.prompt_playground and step.generation: - step.generation = None - elif step.generation is not None: - step.generation = literalai.BaseGeneration.from_dict( - step.generation - ) - async with self.async_sessionmaker() as session: - stmt = select(Attachment).filter(Attachment.step_id == step.id) - result = await session.execute(stmt) - attachments_ = [] - attachments = result.scalars().all() - for attachment in attachments: - attachments_.append(self.attachment_to_attachment(attachment)) - step.attachments = attachments_ - async with self.async_sessionmaker() as session: - stmt = select(Feedback).filter(Feedback.step_id == step.id) - result = await session.execute(stmt) - feedback = result.scalars().first() - steps.append(self.step_to_step(step, feedback)) - user = None - if thread.user_id: - async with self.async_sessionmaker() as session: - result = await session.execute( - select(User).filter_by(id=thread.user_id) - ) - persisted_user = result.scalars().first() - user = literalai.User( - id=thread.user_id or "", - identifier=persisted_user.identifier or "", - metadata=persisted_user.metadata_, - ) - - thread_ = literalai.Thread( - id=str(thread.id), - name=str(thread.name) or None, - steps=steps, - metadata=thread.metadata_, - user=user, - tags=thread.tags, - ) - thread_.created_at = thread.created_at.isoformat() - return thread_ - - async def delete_thread(self, id): - async with self.async_sessionmaker() as session: - thread = await session.get(Thread, id) - if thread: - await session.delete(thread) - await session.commit() - query = select(Step).filter_by(thread_id=id) - result = await session.execute(query) - steps = result.scalars().all() - for step in steps: - await self.delete_step(str(step.id)) - - async def list_threads(self, first, after, filters): - query = ( - select(Thread) - .join(User) - .filter_by(identifier=filters.participantsIdentifier.value[0]) - ) - if filters.search: - query = query.filter(Thread.name.ilike(f"%{filters.search.value}%")) - - if filters.feedbacksValue: - query = ( - query.join(Step) - .join(Feedback) - .filter_by(value=filters.feedbacksValue.value[0]) - ) - - if after: - query = query.offset(after) - - query = query.limit(first) - - async with self.async_sessionmaker() as session: - result = await session.execute(query) - threads = result.scalars().all() - - async def convert_thread_to_dict(thread): - user = None - if thread.user_id: - async with self.async_sessionmaker() as session: - result = await session.execute( - select(User).filter_by(id=thread.user_id) - ) - persisted_user = result.scalars().first() - user = { - "id": thread.user_id or "", - "identifier": persisted_user.identifier or "", - "metadata": persisted_user.metadata_, - } - return { - "createdAt": thread.created_at.isoformat() or "", - "id": thread.id, - "name": thread.name or None, - "metadata": thread.metadata_, - "user": user, - "tags": thread.tags, - } - - threads_data = [await convert_thread_to_dict(thread) for thread in threads] - return PaginatedResponse(data=threads_data, pageInfo=PageInfo.from_dict({})) - - async def upsert_thread(self, thread_id, name, participant_id, metadata, tags): - async with self.async_sessionmaker() as session: - stmt = select(Thread).filter_by(id=thread_id) - result = await session.execute(stmt) - thread = result.scalars().first() - if thread: - if name is not None: - thread.name = name - if participant_id is not None: - thread.user_id = participant_id - if metadata is not None: - if thread.metadata_: - thread.metadata_.update(metadata) - else: - thread.metadata_ = metadata - if tags is not None: - thread.tags = tags - else: - thread = Thread( - id=thread_id, - name=name, - user_id=participant_id, - metadata_=metadata or {}, - tags=tags, - ) - session.add(thread) - await session.commit() - - def step_to_step(self, step: Step, feedback: Feedback) -> "literalai.Step": - step_ = literalai.Step( - name=str(step.name), - type=cast(StepType, str(step.type)), - id=str(step.id), - thread_id=str(step.thread_id), - parent_id=str(step.parent_id), - ) - step_.metadata = dict(step.metadata_) if step.metadata_ else {} - step_.input = dict(step.input) if step.input else {} - step_.output = dict(step.output) if step.output else {} - step_.start_time = step.start_time.isoformat() if step.start_time else None - step_.end_time = step.end_time.isoformat() if step.end_time else None - step_.created_at = step.created_at.isoformat() if step.created_at else None - step_.generation = ( - cast(Optional[Union[ChatGeneration, CompletionGeneration]], step.generation) - if step.generation - else None - ) - step_.feedback = self.feedback_to_feedback(feedback) if feedback else None - return step_ - - def feedback_to_feedback(self, feedback: Feedback) -> "literalai.Feedback": - return literalai.Feedback( - id=str(feedback.id), - step_id=str(feedback.step_id), - value=float(feedback.value), - comment=str(feedback.comment) if feedback.comment else "", - strategy=cast(FeedbackStrategy, str(feedback.strategy)), - ) - - def attachment_to_attachment( - self, attachment: Attachment - ) -> "literalai.Attachment": - metadata = dict(attachment.metadata_) if attachment.metadata_ else {} - return literalai.Attachment( - thread_id=str(attachment.thread_id) if attachment.thread_id else "", - step_id=str(attachment.step_id), - metadata=dict(metadata) if metadata else {}, - object_key=str(attachment.object_key) if attachment.object_key else "", - mime=str(attachment.mime), - name=str(attachment.name), - url=str(attachment.url) if attachment.url else "", - id=str(attachment.id), - ) - - -class SQLiteDataLayer(ChainlitDataLayer): - def __init__(self, database_url=None): - super().__init__("dummy", "") - self.client = SQLiteClient(database_url) From 550c6d533a7f9cef16253b5df9630d2195a42d10 Mon Sep 17 00:00:00 2001 From: Quy Tang <3761730+qtangs@users.noreply.github.com> Date: Sun, 22 Sep 2024 11:24:55 +0800 Subject: [PATCH 04/10] feat: add tests for SQLAlchemyORMDataLayer --- backend/tests/conftest.py | 5 +- backend/tests/data/test_sql_alchemy_orm.py | 135 +++++++++++++++++++++ 2 files changed, 138 insertions(+), 2 deletions(-) create mode 100644 backend/tests/data/test_sql_alchemy_orm.py diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py index cb76427d0b..09e3a4c613 100644 --- a/backend/tests/conftest.py +++ b/backend/tests/conftest.py @@ -1,3 +1,4 @@ +import uuid from contextlib import asynccontextmanager from unittest.mock import AsyncMock, Mock @@ -19,14 +20,14 @@ def mock_persisted_user(): @pytest.fixture def mock_session(): mock = Mock(spec=WebsocketSession) - mock.id = "test_session_id" + mock.id = str(uuid.uuid4()) mock.user_env = {"test_env": "value"} mock.chat_settings = {} mock.chat_profile = None mock.http_referer = None mock.client_type = "webapp" mock.languages = ["en"] - mock.thread_id = "test_thread_id" + mock.thread_id = str(uuid.uuid4()) mock.emit = AsyncMock() mock.has_first_interaction = True diff --git a/backend/tests/data/test_sql_alchemy_orm.py b/backend/tests/data/test_sql_alchemy_orm.py new file mode 100644 index 0000000000..34fceb300c --- /dev/null +++ b/backend/tests/data/test_sql_alchemy_orm.py @@ -0,0 +1,135 @@ +import uuid +from pathlib import Path + +import pytest +from chainlit.data.base import BaseStorageClient +from chainlit.data.sql_alchemy_orm import SQLAlchemyORMDataLayer +from chainlit.element import Text + +from chainlit import User + + +@pytest.fixture +async def data_layer(mock_storage_client: BaseStorageClient, tmp_path: Path): + db_file = tmp_path / "test_db.sqlite" + conninfo = f"sqlite+aiosqlite:///{db_file}" + + # Create SQLAlchemyORMDataLayer instance + data_layer = SQLAlchemyORMDataLayer( + conninfo, storage_provider=mock_storage_client, log_level="DEBUG" + ) + + await data_layer.create_objects() + + yield data_layer + + +@pytest.fixture +def test_user() -> User: + return User(identifier="sqlalchemy_test_user_id") + + +async def test_create_and_get_element( + mock_chainlit_context, data_layer: SQLAlchemyORMDataLayer +): + async with mock_chainlit_context: + text_element = Text( + id=str(uuid.uuid4()), + name="test.txt", + mime="text/plain", + content="test content", + for_id=str(uuid.uuid4()), + ) + from chainlit import logger + + logger.info(f"thread_id={text_element.thread_id}") + + # Needs context because of wrapper in utils.py + await data_layer.create_element(text_element) + + retrieved_element = await data_layer.get_element( + text_element.thread_id, text_element.id + ) + assert retrieved_element is not None + assert retrieved_element["id"] == text_element.id + assert retrieved_element["name"] == text_element.name + assert retrieved_element["mime"] == text_element.mime + # The 'content' field is not part of the ElementDict, so we remove this assertion + + +async def test_get_current_timestamp(data_layer: SQLAlchemyORMDataLayer): + timestamp = await data_layer.get_current_timestamp() + assert isinstance(timestamp, str) + + +async def test_get_user(test_user: User, data_layer: SQLAlchemyORMDataLayer): + persisted_user = await data_layer.create_user(test_user) + assert persisted_user + + fetched_user = await data_layer.get_user(persisted_user.identifier) + + assert fetched_user + assert fetched_user.createdAt == persisted_user.createdAt + assert fetched_user.id == persisted_user.id + + nonexistent_user = await data_layer.get_user("nonexistent") + assert nonexistent_user is None + + +async def test_create_user(test_user: User, data_layer: SQLAlchemyORMDataLayer): + persisted_user = await data_layer.create_user(test_user) + + assert persisted_user + assert persisted_user.identifier == test_user.identifier + assert persisted_user.createdAt + assert persisted_user.id + + # Assert id is valid uuid + assert uuid.UUID(persisted_user.id) + + +async def test_update_thread(test_user: User, data_layer: SQLAlchemyORMDataLayer): + persisted_user = await data_layer.create_user(test_user) + assert persisted_user + + thread_id = str(uuid.uuid4()) + + await data_layer.update_thread(thread_id) + + +async def test_get_thread_author(test_user: User, data_layer: SQLAlchemyORMDataLayer): + persisted_user = await data_layer.create_user(test_user) + assert persisted_user + + thread_id = str(uuid.uuid4()) + + await data_layer.update_thread(thread_id, user_id=persisted_user.id) + author = await data_layer.get_thread_author(thread_id) + + assert author == persisted_user.identifier + + +async def test_get_thread(test_user: User, data_layer: SQLAlchemyORMDataLayer): + persisted_user = await data_layer.create_user(test_user) + assert persisted_user + + thread_id = str(uuid.uuid4()) + + await data_layer.update_thread(thread_id) + result = await data_layer.get_thread(thread_id) + assert result is not None + + result = await data_layer.get_thread("nonexisting_thread") + assert result is None + + +async def test_delete_thread(test_user: User, data_layer: SQLAlchemyORMDataLayer): + persisted_user = await data_layer.create_user(test_user) + assert persisted_user + + thread_id = str(uuid.uuid4()) + + await data_layer.update_thread(thread_id, "test_user") + await data_layer.delete_thread(thread_id) + thread = await data_layer.get_thread(thread_id) + assert thread is None From 4b44657f836692dccfcdd05f96c52c1a6c571145 Mon Sep 17 00:00:00 2001 From: Quy Tang <3761730+qtangs@users.noreply.github.com> Date: Sun, 22 Sep 2024 11:26:05 +0800 Subject: [PATCH 05/10] feat: update test for data_layer_sqlite --- cypress/e2e/data_layer_sqlite/main.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/cypress/e2e/data_layer_sqlite/main.py b/cypress/e2e/data_layer_sqlite/main.py index 4a9288b6fd..9bfd99e118 100644 --- a/cypress/e2e/data_layer_sqlite/main.py +++ b/cypress/e2e/data_layer_sqlite/main.py @@ -1,12 +1,11 @@ -from typing import List, Optional +from typing import Optional import chainlit.data as cl_data -from chainlit.data.sqlite import SQLiteDataLayer -from literalai.helper import utc_now +from chainlit.data.sql_alchemy_orm import SQLAlchemyORMDataLayer import chainlit as cl -cl_data._data_layer = SQLiteDataLayer() +cl_data._data_layer = SQLAlchemyORMDataLayer(url="sqlite+aiosqlite:///test_db.sqlite") @cl.on_chat_start From b81f82c9eff86fc9fd6da6f8f9f09ec03d95d8e0 Mon Sep 17 00:00:00 2001 From: Quy Tang <3761730+qtangs@users.noreply.github.com> Date: Sun, 22 Sep 2024 11:33:15 +0800 Subject: [PATCH 06/10] chore: update poetry.lock --- backend/poetry.lock | 344 +++++++++++++++++++++++--------------------- 1 file changed, 183 insertions(+), 161 deletions(-) diff --git a/backend/poetry.lock b/backend/poetry.lock index 345bd1e430..97c5e77f92 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiofiles" @@ -168,13 +168,13 @@ docs = ["sphinx (==7.2.6)", "sphinx-mdinclude (==0.5.3)"] [[package]] name = "anyio" -version = "4.4.0" +version = "4.6.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, + {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, ] [package.dependencies] @@ -184,9 +184,9 @@ sniffio = ">=1.1" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "appdirs" @@ -322,61 +322,61 @@ aio = ["aiohttp (>=3.0)"] [[package]] name = "azure-identity" -version = "1.17.1" +version = "1.18.0" description = "Microsoft Azure Identity Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure-identity-1.17.1.tar.gz", hash = "sha256:32ecc67cc73f4bd0595e4f64b1ca65cd05186f4fe6f98ed2ae9f1aa32646efea"}, - {file = "azure_identity-1.17.1-py3-none-any.whl", hash = "sha256:db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382"}, + {file = "azure_identity-1.18.0-py3-none-any.whl", hash = "sha256:bccf6106245b49ff41d0c4cd7b72851c5a2ba3a32cef7589da246f5727f26f02"}, + {file = "azure_identity-1.18.0.tar.gz", hash = "sha256:f567579a65d8932fa913c76eddf3305101a15e5727a5e4aa5df649a0f553d4c3"}, ] [package.dependencies] -azure-core = ">=1.23.0" +azure-core = ">=1.31.0" cryptography = ">=2.5" -msal = ">=1.24.0" -msal-extensions = ">=0.3.0" +msal = ">=1.30.0" +msal-extensions = ">=1.2.0" typing-extensions = ">=4.0.0" [[package]] name = "azure-storage-blob" -version = "12.22.0" +version = "12.23.0" description = "Microsoft Azure Blob Storage Client Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure-storage-blob-12.22.0.tar.gz", hash = "sha256:b3804bb4fe8ab1c32771fa464053da772a682c2737b19da438a3f4e5e3b3736e"}, - {file = "azure_storage_blob-12.22.0-py3-none-any.whl", hash = "sha256:bb7d2d824ce3f11f14a27ee7d9281289f7e072ac8311c52e3652672455b7d5e8"}, + {file = "azure_storage_blob-12.23.0-py3-none-any.whl", hash = "sha256:8ac4b34624ed075eda1e38f0c6dadb601e1b199e27a09aa63edc429bf4a23329"}, + {file = "azure_storage_blob-12.23.0.tar.gz", hash = "sha256:2fadbceda1d99c4a72dfd32e0122d7bca8b5e8d2563f5c624d634aeaff49c9df"}, ] [package.dependencies] -azure-core = ">=1.28.0" +azure-core = ">=1.30.0" cryptography = ">=2.1.4" isodate = ">=0.6.1" typing-extensions = ">=4.6.0" [package.extras] -aio = ["azure-core[aio] (>=1.28.0)"] +aio = ["azure-core[aio] (>=1.30.0)"] [[package]] name = "azure-storage-file-datalake" -version = "12.16.0" +version = "12.17.0" description = "Microsoft Azure File DataLake Storage Client Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure-storage-file-datalake-12.16.0.tar.gz", hash = "sha256:3185580e4e438162ef84fb88cb46b2ef248dafbfb07f53297762417bb7000333"}, - {file = "azure_storage_file_datalake-12.16.0-py3-none-any.whl", hash = "sha256:da57ec6cf5640b92bbd0ba61478f51e67c63b94843fa748b3b6599f1adba5837"}, + {file = "azure_storage_file_datalake-12.17.0-py3-none-any.whl", hash = "sha256:5e96d6ba0f2ae61951e0880fd86a2a8037a52303e94560ebd6a7199d04cd7f38"}, + {file = "azure_storage_file_datalake-12.17.0.tar.gz", hash = "sha256:3f65ed4724014e0845841ea34e96459b03fa515c3082524462a17d161368d145"}, ] [package.dependencies] -azure-core = ">=1.28.0" -azure-storage-blob = ">=12.21.0" +azure-core = ">=1.30.0" +azure-storage-blob = ">=12.23.0" isodate = ">=0.6.1" typing-extensions = ">=4.6.0" [package.extras] -aio = ["azure-core[aio] (>=1.28.0)"] +aio = ["azure-core[aio] (>=1.30.0)"] [[package]] name = "backoff" @@ -540,17 +540,17 @@ botframework-connector = "4.16.2" [[package]] name = "boto3" -version = "1.35.20" +version = "1.35.24" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.20-py3-none-any.whl", hash = "sha256:aaddbeb8c37608492f2c8286d004101464833d4c6e49af44601502b8b18785ed"}, - {file = "boto3-1.35.20.tar.gz", hash = "sha256:47e89d95964f10beee21ee723c3290874fddf364269bd97d200e8bfa9bf93a06"}, + {file = "boto3-1.35.24-py3-none-any.whl", hash = "sha256:97fcc1a14cbc759e4ba9535ced703a99fcf652c9c4b8dfcd06f292c80551684b"}, + {file = "boto3-1.35.24.tar.gz", hash = "sha256:be7807f30f26d6c0057e45cfd09dad5968e664488bf4f9138d0bb7a0f6d8ed40"}, ] [package.dependencies] -botocore = ">=1.35.20,<1.36.0" +botocore = ">=1.35.24,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -559,13 +559,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.20" +version = "1.35.24" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.20-py3-none-any.whl", hash = "sha256:62412038f960691a299e60492f9ee7e8e75af563f2eca7f3640b3b54b8f5d236"}, - {file = "botocore-1.35.20.tar.gz", hash = "sha256:82ad8a73fcd5852d127461c8dadbe40bf679f760a4efb0dde8d4d269ad3f126f"}, + {file = "botocore-1.35.24-py3-none-any.whl", hash = "sha256:eb9ccc068255cc3d24c36693fda6aec7786db05ae6c2b13bcba66dce6a13e2e3"}, + {file = "botocore-1.35.24.tar.gz", hash = "sha256:1e59b0f14f4890c4f70bd6a58a634b9464bed1c4c6171f87c8795d974ade614b"}, ] [package.dependencies] @@ -1278,18 +1278,18 @@ standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "htt [[package]] name = "filelock" -version = "3.16.0" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, - {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] @@ -1512,77 +1512,84 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "greenlet" -version = "3.1.0" +version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a814dc3100e8a046ff48faeaa909e80cdb358411a3d6dd5293158425c684eda8"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a771dc64fa44ebe58d65768d869fcfb9060169d203446c1d446e844b62bdfdca"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e49a65d25d7350cca2da15aac31b6f67a43d867448babf997fe83c7505f57bc"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cd8518eade968bc52262d8c46727cfc0826ff4d552cf0430b8d65aaf50bb91d"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76dc19e660baea5c38e949455c1181bc018893f25372d10ffe24b3ed7341fb25"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0a5b1c22c82831f56f2f7ad9bbe4948879762fe0d59833a4a71f16e5fa0f682"}, - {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2651dfb006f391bcb240635079a68a261b227a10a08af6349cba834a2141efa1"}, - {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3e7e6ef1737a819819b1163116ad4b48d06cfdd40352d813bb14436024fcda99"}, - {file = "greenlet-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:ffb08f2a1e59d38c7b8b9ac8083c9c8b9875f0955b1e9b9b9a965607a51f8e54"}, - {file = "greenlet-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9730929375021ec90f6447bff4f7f5508faef1c02f399a1953870cdb78e0c345"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:713d450cf8e61854de9420fb7eea8ad228df4e27e7d4ed465de98c955d2b3fa6"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c3446937be153718250fe421da548f973124189f18fe4575a0510b5c928f0cc"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ddc7bcedeb47187be74208bc652d63d6b20cb24f4e596bd356092d8000da6d6"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44151d7b81b9391ed759a2f2865bbe623ef00d648fed59363be2bbbd5154656f"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cea1cca3be76c9483282dc7760ea1cc08a6ecec1f0b6ca0a94ea0d17432da19"}, - {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:619935a44f414274a2c08c9e74611965650b730eb4efe4b2270f91df5e4adf9a"}, - {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:221169d31cada333a0c7fd087b957c8f431c1dba202c3a58cf5a3583ed973e9b"}, - {file = "greenlet-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:01059afb9b178606b4b6e92c3e710ea1635597c3537e44da69f4531e111dd5e9"}, - {file = "greenlet-3.1.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:24fc216ec7c8be9becba8b64a98a78f9cd057fd2dc75ae952ca94ed8a893bf27"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d07c28b85b350564bdff9f51c1c5007dfb2f389385d1bc23288de51134ca303"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:243a223c96a4246f8a30ea470c440fe9db1f5e444941ee3c3cd79df119b8eebf"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26811df4dc81271033a7836bc20d12cd30938e6bd2e9437f56fa03da81b0f8fc"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9d86401550b09a55410f32ceb5fe7efcd998bd2dad9e82521713cb148a4a15f"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26d9c1c4f1748ccac0bae1dbb465fb1a795a75aba8af8ca871503019f4285e2a"}, - {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cd468ec62257bb4544989402b19d795d2305eccb06cde5da0eb739b63dc04665"}, - {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a53dfe8f82b715319e9953330fa5c8708b610d48b5c59f1316337302af5c0811"}, - {file = "greenlet-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:28fe80a3eb673b2d5cc3b12eea468a5e5f4603c26aa34d88bf61bba82ceb2f9b"}, - {file = "greenlet-3.1.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:76b3e3976d2a452cba7aa9e453498ac72240d43030fdc6d538a72b87eaff52fd"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655b21ffd37a96b1e78cc48bf254f5ea4b5b85efaf9e9e2a526b3c9309d660ca"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f4c2027689093775fd58ca2388d58789009116844432d920e9147f91acbe64"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76e5064fd8e94c3f74d9fd69b02d99e3cdb8fc286ed49a1f10b256e59d0d3a0b"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4bf607f690f7987ab3291406e012cd8591a4f77aa54f29b890f9c331e84989"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:037d9ac99540ace9424cb9ea89f0accfaff4316f149520b4ae293eebc5bded17"}, - {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:90b5bbf05fe3d3ef697103850c2ce3374558f6fe40fd57c9fac1bf14903f50a5"}, - {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:726377bd60081172685c0ff46afbc600d064f01053190e4450857483c4d44484"}, - {file = "greenlet-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:d46d5069e2eeda111d6f71970e341f4bd9aeeee92074e649ae263b834286ecc0"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81eeec4403a7d7684b5812a8aaa626fa23b7d0848edb3a28d2eb3220daddcbd0"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a3dae7492d16e85ea6045fd11cb8e782b63eac8c8d520c3a92c02ac4573b0a6"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b5ea3664eed571779403858d7cd0a9b0ebf50d57d2cdeafc7748e09ef8cd81a"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22f4e26400f7f48faef2d69c20dc055a1f3043d330923f9abe08ea0aecc44df"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13ff8c8e54a10472ce3b2a2da007f915175192f18e6495bad50486e87c7f6637"}, - {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9671e7282d8c6fcabc32c0fb8d7c0ea8894ae85cee89c9aadc2d7129e1a9954"}, - {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:184258372ae9e1e9bddce6f187967f2e08ecd16906557c4320e3ba88a93438c3"}, - {file = "greenlet-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:a0409bc18a9f85321399c29baf93545152d74a49d92f2f55302f122007cfda00"}, - {file = "greenlet-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9eb4a1d7399b9f3c7ac68ae6baa6be5f9195d1d08c9ddc45ad559aa6b556bce6"}, - {file = "greenlet-3.1.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a8870983af660798dc1b529e1fd6f1cefd94e45135a32e58bd70edd694540f33"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfcfb73aed40f550a57ea904629bdaf2e562c68fa1164fa4588e752af6efdc3f"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9482c2ed414781c0af0b35d9d575226da6b728bd1a720668fa05837184965b7"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d58ec349e0c2c0bc6669bf2cd4982d2f93bf067860d23a0ea1fe677b0f0b1e09"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd65695a8df1233309b701dec2539cc4b11e97d4fcc0f4185b4a12ce54db0491"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:665b21e95bc0fce5cab03b2e1d90ba9c66c510f1bb5fdc864f3a377d0f553f6b"}, - {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3c59a06c2c28a81a026ff11fbf012081ea34fb9b7052f2ed0366e14896f0a1d"}, - {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415b9494ff6240b09af06b91a375731febe0090218e2898d2b85f9b92abcda0"}, - {file = "greenlet-3.1.0-cp38-cp38-win32.whl", hash = "sha256:1544b8dd090b494c55e60c4ff46e238be44fdc472d2589e943c241e0169bcea2"}, - {file = "greenlet-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:7f346d24d74c00b6730440f5eb8ec3fe5774ca8d1c9574e8e57c8671bb51b910"}, - {file = "greenlet-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:db1b3ccb93488328c74e97ff888604a8b95ae4f35f4f56677ca57a4fc3a4220b"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44cd313629ded43bb3b98737bba2f3e2c2c8679b55ea29ed73daea6b755fe8e7"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fad7a051e07f64e297e6e8399b4d6a3bdcad3d7297409e9a06ef8cbccff4f501"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3967dcc1cd2ea61b08b0b276659242cbce5caca39e7cbc02408222fb9e6ff39"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d45b75b0f3fd8d99f62eb7908cfa6d727b7ed190737dec7fe46d993da550b81a"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d004db911ed7b6218ec5c5bfe4cf70ae8aa2223dffbb5b3c69e342bb253cb28"}, - {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9505a0c8579899057cbefd4ec34d865ab99852baf1ff33a9481eb3924e2da0b"}, - {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fd6e94593f6f9714dbad1aaba734b5ec04593374fa6638df61592055868f8b8"}, - {file = "greenlet-3.1.0-cp39-cp39-win32.whl", hash = "sha256:d0dd943282231480aad5f50f89bdf26690c995e8ff555f26d8a5b9887b559bcc"}, - {file = "greenlet-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:ac0adfdb3a21dc2a24ed728b61e72440d297d0fd3a577389df566651fcd08f97"}, - {file = "greenlet-3.1.0.tar.gz", hash = "sha256:b395121e9bbe8d02a750886f108d540abe66075e61e22f7353d9acb0b81be0f0"}, + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] @@ -2269,13 +2276,13 @@ requests = ">=2,<3" [[package]] name = "langsmith" -version = "0.1.121" +version = "0.1.125" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.121-py3-none-any.whl", hash = "sha256:fdb1ac8a671d3904201bfeea197d87bded46a10d08f1034af464211872e29893"}, - {file = "langsmith-0.1.121.tar.gz", hash = "sha256:e9381b82a5bd484af9a51c3e96faea572746b8d617b070c1cda40cbbe48e33df"}, + {file = "langsmith-0.1.125-py3-none-any.whl", hash = "sha256:74ce8eb2663e1ed20bfcfc88d41e0712879306956c9938d1cdbab7d60458bdca"}, + {file = "langsmith-0.1.125.tar.gz", hash = "sha256:2c0eb0c3cbf22cff55bf519b8e889041f9a591bcf97af5152c8e130333c5940e"}, ] [package.dependencies] @@ -2926,13 +2933,13 @@ reports = ["lxml"] [[package]] name = "mypy-boto3-dynamodb" -version = "1.35.15" -description = "Type annotations for boto3.DynamoDB 1.35.15 service generated with mypy-boto3-builder 8.0.1" +version = "1.35.24" +description = "Type annotations for boto3.DynamoDB 1.35.24 service generated with mypy-boto3-builder 8.1.1" optional = false python-versions = ">=3.8" files = [ - {file = "mypy_boto3_dynamodb-1.35.15-py3-none-any.whl", hash = "sha256:ac7daacc874e00a5ece33d582916c180a5fac5b293abcc5def5336749769e9cf"}, - {file = "mypy_boto3_dynamodb-1.35.15.tar.gz", hash = "sha256:7a913873e54289c5d392e18626ef379711530d406eda7766cb7e8d0114c2cbc1"}, + {file = "mypy_boto3_dynamodb-1.35.24-py3-none-any.whl", hash = "sha256:022859543c5314f14fb03ef4e445e34b97b9bc0cecb003c14c10943a2eaa3ff7"}, + {file = "mypy_boto3_dynamodb-1.35.24.tar.gz", hash = "sha256:55bf897a1d0e354579edb05001f4bc4f472b9452badd9db24876c31bdf3f72a1"}, ] [package.dependencies] @@ -3080,13 +3087,13 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "openai" -version = "1.46.0" +version = "1.47.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.46.0-py3-none-any.whl", hash = "sha256:8e423690b121d0268c7bb83b552e14f339b0ba250e1d0f70d145c194e79c4e1b"}, - {file = "openai-1.46.0.tar.gz", hash = "sha256:0c5a783530d7cd90e2370dbd52d9239d2d53dc7a0badf9ee1e2e23d3f148969b"}, + {file = "openai-1.47.0-py3-none-any.whl", hash = "sha256:9ccc8737dfa791f7bd903db4758c176b8544a8cd89d3a3d2add3cea02a34c3a0"}, + {file = "openai-1.47.0.tar.gz", hash = "sha256:6e14d6f77c8cf546646afcd87a2ef752505b3710d2564a2e433e17307dfa86a0"}, ] [package.dependencies] @@ -3326,38 +3333,53 @@ files = [ [[package]] name = "pandas" -version = "2.2.2" +version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, ] [package.dependencies] @@ -3505,13 +3527,13 @@ xmp = ["defusedxml"] [[package]] name = "platformdirs" -version = "4.3.4" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.3.4-py3-none-any.whl", hash = "sha256:8b4ba85412f5065dae40aa19feaa02ac2be584c8b14abd70712b5cd11ad80034"}, - {file = "platformdirs-4.3.4.tar.gz", hash = "sha256:9e8a037c36fe1b1f1b5de4482e60464272cc8dca725e40b568bf2c285f7509cf"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] @@ -3608,22 +3630,22 @@ requests = ">=2.28.2,<3.0.0" [[package]] name = "protobuf" -version = "4.25.4" +version = "4.25.5" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, - {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, - {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, - {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, - {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, - {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, - {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, - {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, - {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, + {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, + {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, + {file = "protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331"}, + {file = "protobuf-4.25.5-cp38-cp38-win32.whl", hash = "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1"}, + {file = "protobuf-4.25.5-cp38-cp38-win_amd64.whl", hash = "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a"}, + {file = "protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f"}, + {file = "protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45"}, + {file = "protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41"}, + {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, ] [[package]] @@ -4620,13 +4642,13 @@ slack-sdk = ">=3.26.0,<4" [[package]] name = "slack-sdk" -version = "3.33.0" +version = "3.33.1" description = "The Slack API Platform SDK for Python" optional = false python-versions = ">=3.6" files = [ - {file = "slack_sdk-3.33.0-py2.py3-none-any.whl", hash = "sha256:853bb55154115d080cae342c4099f2ccb559a78ae8d0f5109b49842401a920fa"}, - {file = "slack_sdk-3.33.0.tar.gz", hash = "sha256:070eb1fb355c149a5f80fa0be6eeb5f5588e4ddff4dd76acf060454435cb037e"}, + {file = "slack_sdk-3.33.1-py2.py3-none-any.whl", hash = "sha256:ef93beec3ce9c8f64da02fd487598a05ec4bc9c92ceed58f122dbe632691cbe2"}, + {file = "slack_sdk-3.33.1.tar.gz", hash = "sha256:e328bb661d95db5f66b993b1d64288ac7c72201a745b4c7cf8848dafb7b74e40"}, ] [package.extras] @@ -5538,4 +5560,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0.0" -content-hash = "abd6fcef6a72a72b26f8c5842a877c57fd32414a729f8c6b1ec37a488b217b09" +content-hash = "701884d90d01a3b956a349bb65ebdaf7bba68048cb89de4e27802ff3327f059c" From d86654ad6a38b8afbfab52ca66333f4e04b68c12 Mon Sep 17 00:00:00 2001 From: Quy Tang <3761730+qtangs@users.noreply.github.com> Date: Mon, 23 Sep 2024 06:49:22 +0800 Subject: [PATCH 07/10] fix: correct test db file name --- cypress/e2e/data_layer_sqlite/spec.cy.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cypress/e2e/data_layer_sqlite/spec.cy.ts b/cypress/e2e/data_layer_sqlite/spec.cy.ts index c299762775..841c97fa9e 100644 --- a/cypress/e2e/data_layer_sqlite/spec.cy.ts +++ b/cypress/e2e/data_layer_sqlite/spec.cy.ts @@ -28,7 +28,7 @@ describe('Data Layer with Sqlite', () => { describe('DB file existence check', () => { it('should check if db file was created', () => { - const filePath = 'cypress/e2e/data_layer_sqlite/chainlit.db'; + const filePath = 'cypress/e2e/data_layer_sqlite/test_db.sqlite'; cy.readFile(filePath).then((content) => { expect(content).to.exist; From 0a11293308b5d10db55b12a96be1f8cf7490e710 Mon Sep 17 00:00:00 2001 From: Quy Tang <3761730+qtangs@users.noreply.github.com> Date: Mon, 23 Sep 2024 06:54:06 +0800 Subject: [PATCH 08/10] feat: move aiosqlite to custom-data group --- backend/poetry.lock | 8 ++++---- backend/pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/poetry.lock b/backend/poetry.lock index 97c5e77f92..40c307886f 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -581,13 +581,13 @@ crt = ["awscrt (==0.21.5)"] [[package]] name = "cattrs" -version = "24.1.1" +version = "24.1.2" description = "Composable complex class support for attrs and dataclasses." optional = false python-versions = ">=3.8" files = [ - {file = "cattrs-24.1.1-py3-none-any.whl", hash = "sha256:ec8ce8fdc725de9d07547cd616f968670687c6fa7a2e263b088370c46d834d97"}, - {file = "cattrs-24.1.1.tar.gz", hash = "sha256:16e94a13f9aaf6438bd5be5df521e072b1b00481b4cf807bcb1acbd49f814c08"}, + {file = "cattrs-24.1.2-py3-none-any.whl", hash = "sha256:67c7495b760168d931a10233f979b28dc04daf853b30752246f4f8471c6d68d0"}, + {file = "cattrs-24.1.2.tar.gz", hash = "sha256:8028cfe1ff5382df59dd36474a86e02d817b06eaf8af84555441bac915d2ef85"}, ] [package.dependencies] @@ -5560,4 +5560,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0.0" -content-hash = "701884d90d01a3b956a349bb65ebdaf7bba68048cb89de4e27802ff3327f059c" +content-hash = "a3ef73adfacfeff4956cad0d808d883985082d3e90b645bf8c5e31b8df11cb2d" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index cb02fe065a..8c5bc2b780 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -69,7 +69,6 @@ plotly = "^5.18.0" slack_bolt = "^1.18.1" discord = "^2.3.2" botbuilder-core = "^4.15.0" -aiosqlite = "^0.20.0" [tool.poetry.group.dev.dependencies] black = "^24.8.0" @@ -113,6 +112,7 @@ ignore_missing_imports = true optional = true [tool.poetry.group.custom-data.dependencies] +aiosqlite = "^0.20.0" asyncpg = "^0.29.0" SQLAlchemy = "^2.0.28" boto3 = "^1.34.73" From 013de55b999008a7ae81cd51d4d366c1f86c2e8f Mon Sep 17 00:00:00 2001 From: Quy Tang <3761730+qtangs@users.noreply.github.com> Date: Mon, 23 Sep 2024 09:00:04 +0800 Subject: [PATCH 09/10] Revert "feat: move aiosqlite to custom-data group" This reverts commit 0a11293308b5d10db55b12a96be1f8cf7490e710. --- backend/poetry.lock | 8 ++++---- backend/pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/poetry.lock b/backend/poetry.lock index 40c307886f..97c5e77f92 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -581,13 +581,13 @@ crt = ["awscrt (==0.21.5)"] [[package]] name = "cattrs" -version = "24.1.2" +version = "24.1.1" description = "Composable complex class support for attrs and dataclasses." optional = false python-versions = ">=3.8" files = [ - {file = "cattrs-24.1.2-py3-none-any.whl", hash = "sha256:67c7495b760168d931a10233f979b28dc04daf853b30752246f4f8471c6d68d0"}, - {file = "cattrs-24.1.2.tar.gz", hash = "sha256:8028cfe1ff5382df59dd36474a86e02d817b06eaf8af84555441bac915d2ef85"}, + {file = "cattrs-24.1.1-py3-none-any.whl", hash = "sha256:ec8ce8fdc725de9d07547cd616f968670687c6fa7a2e263b088370c46d834d97"}, + {file = "cattrs-24.1.1.tar.gz", hash = "sha256:16e94a13f9aaf6438bd5be5df521e072b1b00481b4cf807bcb1acbd49f814c08"}, ] [package.dependencies] @@ -5560,4 +5560,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0.0" -content-hash = "a3ef73adfacfeff4956cad0d808d883985082d3e90b645bf8c5e31b8df11cb2d" +content-hash = "701884d90d01a3b956a349bb65ebdaf7bba68048cb89de4e27802ff3327f059c" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 8c5bc2b780..cb02fe065a 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -69,6 +69,7 @@ plotly = "^5.18.0" slack_bolt = "^1.18.1" discord = "^2.3.2" botbuilder-core = "^4.15.0" +aiosqlite = "^0.20.0" [tool.poetry.group.dev.dependencies] black = "^24.8.0" @@ -112,7 +113,6 @@ ignore_missing_imports = true optional = true [tool.poetry.group.custom-data.dependencies] -aiosqlite = "^0.20.0" asyncpg = "^0.29.0" SQLAlchemy = "^2.0.28" boto3 = "^1.34.73" From 29ff4d24005c4e6add36e725e08913cfc23d231e Mon Sep 17 00:00:00 2001 From: Quy Tang <3761730+qtangs@users.noreply.github.com> Date: Mon, 23 Sep 2024 09:14:23 +0800 Subject: [PATCH 10/10] feat: remove aiosqlite from main dependencies --- backend/poetry.lock | 8 ++++---- backend/pyproject.toml | 1 - 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/backend/poetry.lock b/backend/poetry.lock index 97c5e77f92..bb136fe18e 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -581,13 +581,13 @@ crt = ["awscrt (==0.21.5)"] [[package]] name = "cattrs" -version = "24.1.1" +version = "24.1.2" description = "Composable complex class support for attrs and dataclasses." optional = false python-versions = ">=3.8" files = [ - {file = "cattrs-24.1.1-py3-none-any.whl", hash = "sha256:ec8ce8fdc725de9d07547cd616f968670687c6fa7a2e263b088370c46d834d97"}, - {file = "cattrs-24.1.1.tar.gz", hash = "sha256:16e94a13f9aaf6438bd5be5df521e072b1b00481b4cf807bcb1acbd49f814c08"}, + {file = "cattrs-24.1.2-py3-none-any.whl", hash = "sha256:67c7495b760168d931a10233f979b28dc04daf853b30752246f4f8471c6d68d0"}, + {file = "cattrs-24.1.2.tar.gz", hash = "sha256:8028cfe1ff5382df59dd36474a86e02d817b06eaf8af84555441bac915d2ef85"}, ] [package.dependencies] @@ -5560,4 +5560,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0.0" -content-hash = "701884d90d01a3b956a349bb65ebdaf7bba68048cb89de4e27802ff3327f059c" +content-hash = "abd6fcef6a72a72b26f8c5842a877c57fd32414a729f8c6b1ec37a488b217b09" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index cb02fe065a..7b02912a16 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -49,7 +49,6 @@ packaging = "^23.1" python-multipart = "^0.0.9" pyjwt = "^2.8.0" numpy = "^1.26" -aiosqlite = "^0.20.0" [tool.poetry.group.tests] optional = true