diff --git a/backend/database/database_manager.py b/backend/database/database_manager.py index ba6c562..ff0907e 100644 --- a/backend/database/database_manager.py +++ b/backend/database/database_manager.py @@ -1,11 +1,25 @@ -from settings import DATABASE_URL +from settings import ( + APP_ENV, + DATABASE_POOL_MAX_OVERFLOW, + DATABASE_POOL_SIZE, + DATABASE_URL, +) from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker +from sqlalchemy.pool import QueuePool class DatabaseManager: def __init__(self): - self.engine = create_engine(DATABASE_URL) + if APP_ENV == "prod": + self.engine = create_engine( + DATABASE_URL, + poolclass=QueuePool, + pool_size=DATABASE_POOL_SIZE, + max_overflow=DATABASE_POOL_MAX_OVERFLOW, + ) + else: + self.engine = create_engine(DATABASE_URL) def __enter__(self): Session = sessionmaker(bind=self.engine) diff --git a/backend/llms/gpt_lang.py b/backend/llms/gpt_lang.py index 323beb2..be7924d 100644 --- a/backend/llms/gpt_lang.py +++ b/backend/llms/gpt_lang.py @@ -5,14 +5,28 @@ from langchain.agents.agent_types import AgentType from langchain.llms.openai import OpenAI from langchain.sql_database import SQLDatabase -from settings import DATABASE_URL +from settings import ( + APP_ENV, + DATABASE_LANGCHAIN_POOL_MAX_OVERFLOW, + DATABASE_LANGCHAIN_POOL_SIZE, + DATABASE_POOL_URL, + DATABASE_URL, +) class GPTLangSQL: def __init__(self, tables: List[str]): if not tables: raise ValueError("No tables provided") - self.db = SQLDatabase.from_uri(DATABASE_URL, include_tables=tables) + if APP_ENV == "prod": + self.db = SQLDatabase.from_uri( + DATABASE_POOL_URL, + include_tables=tables, + pool_size=DATABASE_LANGCHAIN_POOL_SIZE, + max_overflow=DATABASE_LANGCHAIN_POOL_MAX_OVERFLOW, + ) + else: + self.db = SQLDatabase.from_uri(DATABASE_URL, include_tables=tables) self.toolkit = SQLDatabaseToolkit(db=self.db, llm=OpenAI(temperature=0)) self.agent_executor = create_sql_agent( llm=OpenAI(temperature=0), diff --git a/backend/settings.py b/backend/settings.py index 0d5b444..950c7b3 100644 --- a/backend/settings.py +++ b/backend/settings.py @@ -24,7 +24,16 @@ ) JWT_SECRET_KEY = config("JWT_SECRET_KEY") -DATABASE_URL = config("DATABASE_URL") +DATABASE_URL = config( + "DATABASE_URL", default="postgresql://admin:admin@postgres_db:5432/db" +) +DATABASE_POOL_URL = config("DATABASE_POOL_URL", default=None) +DATABASE_POOL_SIZE = int(config("DATABASE_POOL_SIZE", default=10)) +DATABASE_POOL_MAX_OVERFLOW = int(config("DATABASE_POOL_MAX_OVERFLOW", default=3)) +DATABASE_LANGCHAIN_POOL_SIZE = int(config("DATABASE_LANGCHAIN_POOL_SIZE", default=5)) +DATABASE_LANGCHAIN_POOL_MAX_OVERFLOW = int( + config("DATABASE_LANGCHAIN_POOL_MAX_OVERFLOW", default=2) +) EMAIL_VERIFICATION_EXPIRE_MINUTES = int( config("EMAIL_VERIFICATION_EXPIRE_MINUTES", default=15) diff --git a/k8s/backend-configmap.yaml b/k8s/backend-configmap.yaml index f61abaa..911782c 100644 --- a/k8s/backend-configmap.yaml +++ b/k8s/backend-configmap.yaml @@ -5,6 +5,10 @@ metadata: data: APP_ENV: "prod" APP_HOST: "docshow.ai" + DATABASE_POOL_SIZE: "10" + DATABASE_POOL_MAX_OVERFLOW: "3" + DATABASE_LANGCHAIN_POOL_SIZE: "5" + DATABASE_LANGCHAIN_POOL_MAX_OVERFLOW: "2" EMAIL_VERIFICATION_EXPIRE_MINUTES: "15" PASSWORD_RESET_EXPIRE_MINUTES: "15" SPACES_BUCKET_NAME: "docshowai" diff --git a/k8s/backend-deployment.yaml b/k8s/backend-deployment.yaml index cb583fc..07254c8 100644 --- a/k8s/backend-deployment.yaml +++ b/k8s/backend-deployment.yaml @@ -18,11 +18,11 @@ spec: - configMapRef: name: backend-config env: - - name: DATABASE_URL + - name: DATABASE_POOL_URL valueFrom: secretKeyRef: name: db-credentials - key: DATABASE_URL + key: DATABASE_POOL_URL - name: OPENAI_API_KEY valueFrom: secretKeyRef: diff --git a/k8s/backend-network-policy.yaml b/k8s/backend-network-policy.yaml index 2fea2cf..925de96 100644 --- a/k8s/backend-network-policy.yaml +++ b/k8s/backend-network-policy.yaml @@ -44,3 +44,5 @@ spec: port: 443 # For outbound web traffic like SendGrid - protocol: TCP port: 25060 # For PostgreSQL database + - protocol: TCP + port: 25061 # For PostgreSQL database pool