Skip to content

Commit

Permalink
Merge pull request #136 from eye-on-surveillance/AI/yaml-file-for-clo…
Browse files Browse the repository at this point in the history
…ud-deployment

AI/yaml-file-for-cloud-deployment
  • Loading branch information
ayyubibrahimi authored Nov 8, 2023
2 parents 3d5d2ae + 829bf10 commit 991dffc
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 21 deletions.
20 changes: 10 additions & 10 deletions packages/backend/src/preprocessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@


def create_embeddings():
llm = ChatOpenAI()
# llm = ChatOpenAI()

base_embeddings = OpenAIEmbeddings()

Expand All @@ -38,16 +38,16 @@ def create_embeddings():
input_variables=["user_query"], template=in_depth_prompt_template
)

llm_chain_general = LLMChain(llm=llm, prompt=general_prompt)
llm_chain_in_depth = LLMChain(llm=llm, prompt=in_depth_prompt)
# llm_chain_general = LLMChain(llm=llm, prompt=general_prompt)
# llm_chain_in_depth = LLMChain(llm=llm, prompt=in_depth_prompt)

general_embeddings = HypotheticalDocumentEmbedder(
llm_chain=llm_chain_general,
base_embeddings=base_embeddings,
)
in_depth_embeddings = HypotheticalDocumentEmbedder(
llm_chain=llm_chain_in_depth, base_embeddings=base_embeddings
)
# general_embeddings = HypotheticalDocumentEmbedder(
# llm_chain=llm_chain_general,
# base_embeddings=base_embeddings,
# )
# in_depth_embeddings = HypotheticalDocumentEmbedder(
# llm_chain=llm_chain_in_depth, base_embeddings=base_embeddings
# )

return base_embeddings, base_embeddings

Expand Down
20 changes: 9 additions & 11 deletions packages/googlecloud/functions/getanswer/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,6 @@ def get_dbs():


def create_embeddings():
llm = ChatOpenAI()

general_prompt_template = """
As an AI assistant tasked with generating brief general summaries, your role is to provide succinct, balanced information from the transcripts of New Orleans City Council meetings in response to the question "{question}". The response should not exceed one paragraph in length. If the available information from the transcripts is insufficient to accurately summarize the issue, please respond with 'Insufficient information available.' If the question extends beyond the scope of information contained in the transcripts, state 'I don't know.'
Answer:"""
Expand All @@ -54,19 +52,19 @@ def create_embeddings():
input_variables=["question"], template=in_depth_prompt_template
)

llm_chain_general = LLMChain(llm=llm, prompt=general_prompt)
llm_chain_in_depth = LLMChain(llm=llm, prompt=in_depth_prompt)
# llm_chain_general = LLMChain(llm=llm, prompt=general_prompt)
# llm_chain_in_depth = LLMChain(llm=llm, prompt=in_depth_prompt)

base_embeddings = OpenAIEmbeddings()

general_embeddings = HypotheticalDocumentEmbedder(
llm_chain=llm_chain_general, base_embeddings=base_embeddings
)
in_depth_embeddings = HypotheticalDocumentEmbedder(
llm_chain=llm_chain_in_depth, base_embeddings=base_embeddings
)
# general_embeddings = HypotheticalDocumentEmbedder(
# llm_chain=llm_chain_general, base_embeddings=base_embeddings
# )
# in_depth_embeddings = HypotheticalDocumentEmbedder(
# llm_chain=llm_chain_in_depth, base_embeddings=base_embeddings
# )

return general_embeddings, in_depth_embeddings
return base_embeddings, base_embeddings


def sort_retrived_documents(doc_list):
Expand Down

0 comments on commit 991dffc

Please sign in to comment.