From ff3dfb41f5c33f296f32bce4c47ad87f2046fcff Mon Sep 17 00:00:00 2001 From: Marcel Klehr Date: Wed, 17 Jan 2024 13:14:11 +0100 Subject: [PATCH] lint all the things --- lib/chains.py | 9 ++++----- lib/formalize.py | 6 ++---- lib/free_prompt.py | 6 ++---- lib/headline.py | 6 ++---- lib/main.py | 7 +++---- lib/simplify.py | 6 ++---- lib/summarize.py | 6 ++---- lib/topics.py | 6 ++---- 8 files changed, 19 insertions(+), 33 deletions(-) diff --git a/lib/chains.py b/lib/chains.py index b6b6bf5..0a9d830 100644 --- a/lib/chains.py +++ b/lib/chains.py @@ -1,5 +1,4 @@ -""" -Registers all chains based on the models/ directory contents +"""Registers all chains based on the models/ directory contents """ import os @@ -34,9 +33,9 @@ def generate_llm_generator(path): def generate_chains(model_name, model): - chains[model_name + ":summarize"] = lambda: SummarizeChain(llm=model()) - chains[model_name + ":simplify"] = lambda: SimplifyChain(llm=model()) - chains[model_name + ":formalize"] = lambda: FormalizeChain(llm=model()) + chains[model_name + ":summary"] = lambda: SummarizeChain(llm=model()) + #chains[model_name + ":simplify"] = lambda: SimplifyChain(llm=model()) + #chains[model_name + ":formalize"] = lambda: FormalizeChain(llm=model()) chains[model_name + ":headline"] = lambda: HeadlineChain(llm=model()) chains[model_name + ":topics"] = lambda: TopicsChain(llm=model()) chains[model_name + ":free_prompt"] = lambda: FreePromptChain(llm=model()) diff --git a/lib/formalize.py b/lib/formalize.py index 104731b..d650fcd 100644 --- a/lib/formalize.py +++ b/lib/formalize.py @@ -1,5 +1,4 @@ -""" -A langchain chain to formalize text +"""A langchain chain to formalize text """ from typing import Any, Optional @@ -14,8 +13,7 @@ class FormalizeChain(Chain): - """ - A formalize chain + """A formalize chain """ prompt: BasePromptTemplate = PromptTemplate( diff --git a/lib/free_prompt.py b/lib/free_prompt.py index de5a77a..b1f1dba 100644 --- a/lib/free_prompt.py +++ b/lib/free_prompt.py @@ -1,5 +1,4 @@ -""" -A free rpompt chain +"""A free rpompt chain """ from typing import Any, Optional @@ -12,8 +11,7 @@ class FreePromptChain(Chain): - """ - A free prompt chain + """A free prompt chain """ llm: BaseLanguageModel diff --git a/lib/headline.py b/lib/headline.py index 642c28a..c5287ea 100644 --- a/lib/headline.py +++ b/lib/headline.py @@ -1,5 +1,4 @@ -""" -A chain to generate a headline for a text +"""A chain to generate a headline for a text """ from typing import Any, Optional @@ -13,8 +12,7 @@ class HeadlineChain(Chain): - """ - A headline chain + """A headline chain """ prompt: BasePromptTemplate = PromptTemplate( diff --git a/lib/main.py b/lib/main.py index 88f3990..fd974a7 100644 --- a/lib/main.py +++ b/lib/main.py @@ -1,5 +1,4 @@ -""" -Tha main module of the llm2 app +"""Tha main module of the llm2 app """ import queue @@ -12,7 +11,7 @@ from chains import chains from fastapi import Depends, FastAPI, responses from nc_py_api import AsyncNextcloudApp, NextcloudApp -from nc_py_api.ex_app import LogLvl, anc_app +from nc_py_api.ex_app import LogLvl, anc_app, run_app, set_handlers @asynccontextmanager @@ -85,7 +84,7 @@ async def enabled_handler(enabled: bool, nc: AsyncNextcloudApp) -> str: for chain_name, _ in chains.items(): (model, task) = chain_name.split(":", 2) await nc.providers.text_processing.register( - model, "Local Large language Model: " + model, "/chain/" + chain_name, task + "llm2:"+chain_name, "Local Large language Model: " + model, "/chain/" + chain_name, task ) else: for chain_name, chain in chains.items(): diff --git a/lib/simplify.py b/lib/simplify.py index 778b248..7b1bf69 100644 --- a/lib/simplify.py +++ b/lib/simplify.py @@ -1,5 +1,4 @@ -""" -A simplify chain +"""A simplify chain """ from typing import Any, Optional @@ -14,8 +13,7 @@ class SimplifyChain(Chain): - """ - A summarization chain + """A summarization chain """ prompt: BasePromptTemplate = PromptTemplate( diff --git a/lib/summarize.py b/lib/summarize.py index 4760f83..6e9895b 100644 --- a/lib/summarize.py +++ b/lib/summarize.py @@ -1,5 +1,4 @@ -""" -A recursive summarize chain +"""A recursive summarize chain """ from typing import Any, Optional @@ -14,8 +13,7 @@ class SummarizeChain(Chain): - """ - A summarization chain + """A summarization chain """ prompt: BasePromptTemplate = PromptTemplate( diff --git a/lib/topics.py b/lib/topics.py index 0e6b8df..c0a58f2 100644 --- a/lib/topics.py +++ b/lib/topics.py @@ -1,5 +1,4 @@ -""" -A chain that extracts topcis from a text +"""A chain that extracts topcis from a text """ from typing import Any, Optional @@ -13,8 +12,7 @@ class TopicsChain(Chain): - """ - A topics chain + """A topics chain """ prompt: BasePromptTemplate = PromptTemplate(