Skip to content

Commit

Permalink
Ruff Styling (#368)
Browse files Browse the repository at this point in the history
  • Loading branch information
yuhongsun96 authored Aug 31, 2023
1 parent 51ec251 commit ac2a4f9
Show file tree
Hide file tree
Showing 17 changed files with 37 additions and 35 deletions.
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Create IndexAttempt table
Revision ID: 47433d30de82
Revises:
Revises:
Create Date: 2023-05-04 00:55:32.971991
"""
Expand Down
6 changes: 3 additions & 3 deletions backend/danswer/background/connector_deletion.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
"""
To delete a connector / credential pair:
(1) find all documents associated with connector / credential pair where there
(1) find all documents associated with connector / credential pair where there
this the is only connector / credential pair that has indexed it
(2) delete all documents from document stores
(3) delete all entries from postgres
(4) find all documents associated with connector / credential pair where there
(4) find all documents associated with connector / credential pair where there
are multiple connector / credential pairs that have indexed it
(5) update document store entries to remove access associated with the
(5) update document store entries to remove access associated with the
connector / credential pair from the access list
(6) delete all relevant entries from postgres
"""
Expand Down
2 changes: 1 addition & 1 deletion backend/danswer/bots/slack/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def build_qa_response_blocks(
quotes_blocks: list[Block] = []
if not answer:
answer_block = SectionBlock(
text=f"Sorry, I was unable to find an answer, but I did find some potentially relevant docs 🤓"
text="Sorry, I was unable to find an answer, but I did find some potentially relevant docs 🤓"
)
else:
answer_block = SectionBlock(text=answer)
Expand Down
2 changes: 1 addition & 1 deletion backend/danswer/bots/slack/listener.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def _process_slack_event(client: SocketModeClient, req: SocketModeRequest) -> No
# this should never happen, but we can't continue without a channel since
# we can't send a response without it
if not channel:
channel_specific_logger.error(f"Found message without channel - skipping")
channel_specific_logger.error("Found message without channel - skipping")
return

message_subtype = event.get("subtype")
Expand Down
2 changes: 1 addition & 1 deletion backend/danswer/connectors/bookstack/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def get(self, endpoint: str, params: dict[str, str]) -> dict[str, Any]:

try:
json = response.json()
except:
except Exception:
json = {}

if response.status_code >= 300:
Expand Down
10 changes: 6 additions & 4 deletions backend/danswer/connectors/confluence/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,9 +110,10 @@ def _fetch(start_ind: int, batch_size: int) -> Collection[dict[str, Any]]:
limit=batch_size,
expand="body.storage.value,version",
)
except:
except Exception:
logger.warning(
f"Batch failed with space {self.space} at offset {start_ind} with size {batch_size}, processing pages individually..."
f"Batch failed with space {self.space} at offset {start_ind} "
f"with size {batch_size}, processing pages individually..."
)

view_pages: list[dict[str, Any]] = []
Expand All @@ -130,7 +131,8 @@ def _fetch(start_ind: int, batch_size: int) -> Collection[dict[str, Any]]:
)
except HTTPError as e:
logger.warning(
f"Page failed with space {self.space} at offset {start_ind + i}, trying alternative expand option: {e}"
f"Page failed with space {self.space} at offset {start_ind + i}, "
f"trying alternative expand option: {e}"
)
# Use view instead, which captures most info but is less complete
view_pages.extend(
Expand All @@ -155,7 +157,7 @@ def _fetch(start_ind: int, batch_size: int) -> Collection[dict[str, Any]]:
for i in range(self.batch_size):
try:
pages.extend(_fetch(start_ind + i, 1))
except:
except Exception:
logger.exception(
"Ran into exception when fetching pages from Confluence"
)
Expand Down
4 changes: 2 additions & 2 deletions backend/danswer/connectors/linear/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,8 @@ def _process_issues(
"""
query IterateIssueBatches($first: Int, $after: String) {
issues(
orderBy: updatedAt,
first: $first,
orderBy: updatedAt,
first: $first,
after: $after,
filter: {
updatedAt: {
Expand Down
2 changes: 1 addition & 1 deletion backend/danswer/connectors/slack/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def replace_channels_basic(message: str) -> str:
"""Simply replaces all channel mentions with `#<CHANNEL_ID>` in order
to make a message work as part of a link"""
# Find user IDs in the message
channel_matches = re.findall("<#(.*?)\|(.*?)>", message)
channel_matches = re.findall(r"<#(.*?)\|(.*?)>", message)
for channel_id, channel_name in channel_matches:
message = message.replace(
f"<#{channel_id}|{channel_name}>", f"#{channel_name}"
Expand Down
2 changes: 1 addition & 1 deletion backend/danswer/connectors/web/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def load_from_state(self) -> GenerateDocumentsOutput:
logger.info(f"Redirected to {final_page}")
current_url = final_page
if current_url in visited_links:
logger.info(f"Redirected page already indexed")
logger.info("Redirected page already indexed")
continue
visited_links.add(current_url)

Expand Down
8 changes: 4 additions & 4 deletions backend/danswer/datastores/datastore_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,10 @@ class CrossConnectorDocumentMetadata(BaseModel):
T = TypeVar("T")


def _add_if_not_exists(l: list[T], item: T) -> list[T]:
if item in l:
return l
return l + [item]
def _add_if_not_exists(obj_list: list[T], item: T) -> list[T]:
if item in obj_list:
return obj_list
return obj_list + [item]


def update_cross_connector_document_metadata_map(
Expand Down
2 changes: 1 addition & 1 deletion backend/danswer/datastores/vespa/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -437,7 +437,7 @@ def hybrid_retrieval(
VespaIndex.yql_base
+ vespa_where_clauses
+ f"{{targetHits: {10 * num_to_retrieve}}}nearestNeighbor(embeddings, query_embedding) or "
+ f'{{grammar: "weakAnd"}}userInput(@query)'
+ '{grammar: "weakAnd"}userInput(@query)'
)

query_embedding = embed_query(query)
Expand Down
2 changes: 1 addition & 1 deletion backend/danswer/db/connector_credential_pair.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def get_connector_credential_pairs(
) -> list[ConnectorCredentialPair]:
stmt = select(ConnectorCredentialPair)
if not include_disabled:
stmt = stmt.where(ConnectorCredentialPair.connector.disabled == False)
stmt = stmt.where(ConnectorCredentialPair.connector.disabled is False)
results = db_session.scalars(stmt)
return list(results.all())

Expand Down
2 changes: 1 addition & 1 deletion backend/danswer/direct_qa/llm_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def get_default_qa_model(
llm=llm,
qa_handler=qa_handler,
)
except:
except Exception:
logger.exception(
"Unable to build a QABlock with the new approach, going back to the "
"legacy approach"
Expand Down
10 changes: 5 additions & 5 deletions backend/danswer/direct_qa/qa_prompts.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@
QUOTE_PAT = "Quote:"

BASE_PROMPT = (
f"Answer the query based on provided documents and quote relevant sections. "
f"Respond with a json containing a concise answer and up to three most relevant quotes from the documents. "
f'Respond with "?" for the answer if the query cannot be answered based on the documents. '
f"The quotes must be EXACT substrings from the documents."
"Answer the query based on provided documents and quote relevant sections. "
"Respond with a json containing a concise answer and up to three most relevant quotes from the documents. "
'Respond with "?" for the answer if the query cannot be answered based on the documents. '
"The quotes must be EXACT substrings from the documents."
)

SAMPLE_QUESTION = "Where is the Eiffel Tower?"
Expand Down Expand Up @@ -81,7 +81,7 @@ def _prepend(s: str, ppt: bool) -> str:

prompt_current += _prepend(f"DOCUMENT SOURCE: {chunk.source_type}\n", prepend_tab)
if chunk.metadata:
prompt_current += _prepend(f"METADATA:\n", prepend_tab)
prompt_current += _prepend("METADATA:\n", prepend_tab)
connector_class = identify_connector_class(DocumentSource(chunk.source_type))
for metadata_line in connector_class.parse_metadata(chunk.metadata):
prompt_current += _prepend(f"\t{metadata_line}\n", prepend_tab)
Expand Down
2 changes: 1 addition & 1 deletion backend/danswer/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def validation_exception_handler(
def value_error_handler(_: Request, exc: ValueError) -> JSONResponse:
try:
raise (exc)
except:
except Exception:
# log stacktrace
logger.exception("ValueError")
return JSONResponse(
Expand Down
2 changes: 1 addition & 1 deletion backend/danswer/server/search_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def stream_direct_qa(
logger.debug(f"Received QA query: {question.query}")
logger.debug(f"Query filters: {question.filters}")
if question.use_keyword:
logger.debug(f"User selected Keyword Search")
logger.debug("User selected Keyword Search")

@log_generator_function_time()
def stream_qa_portions(
Expand Down
12 changes: 6 additions & 6 deletions backend/scripts/reset_postgres.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,12 @@ def wipe_all_rows(database: str) -> None:
table_names = cur.fetchall()

# have to delete from these first to not run into psycopg2.errors.ForeignKeyViolation
cur.execute(f"DELETE FROM chunk")
cur.execute(f"DELETE FROM document_by_connector_credential_pair")
cur.execute(f"DELETE FROM document")
cur.execute(f"DELETE FROM connector_credential_pair")
cur.execute(f"DELETE FROM index_attempt")
cur.execute(f"DELETE FROM credential")
cur.execute("DELETE FROM chunk")
cur.execute("DELETE FROM document_by_connector_credential_pair")
cur.execute("DELETE FROM document")
cur.execute("DELETE FROM connector_credential_pair")
cur.execute("DELETE FROM index_attempt")
cur.execute("DELETE FROM credential")
conn.commit()

for table_name in table_names:
Expand Down

1 comment on commit ac2a4f9

@vercel
Copy link

@vercel vercel bot commented on ac2a4f9 Aug 31, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.