Skip to content

Commit

Permalink
Merge pull request #394 from nulib/preview/5322-conversations-ui
Browse files Browse the repository at this point in the history
WIP: First iteration of Conversations UI; wire up new messages API.
  • Loading branch information
mathewjordan authored Dec 18, 2024
2 parents fe253fd + c346c06 commit 5171309
Show file tree
Hide file tree
Showing 14 changed files with 408 additions and 226 deletions.
19 changes: 12 additions & 7 deletions components/Chat/Chat.test.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -72,16 +72,21 @@ describe("Chat component", () => {
</SearchProvider>,
);

const uuidRegex =
/^[0-9a-f]{8}-[0-9a-f]{4}-[4][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
const el = screen.getByTestId("mock-chat-response");
expect(el).toBeInTheDocument();

const dataProps = el.getAttribute("data-props");
expect(JSON.parse(dataProps!)).toEqual({
isStreamingComplete: false,
searchTerm: "tell me about boats",
sourceDocuments: [],
streamedAnswer: "",
const dataPropsObj = JSON.parse(dataProps!);
expect(dataPropsObj.question).toEqual("tell me about boats");
expect(dataPropsObj.isStreamingComplete).toEqual(false);
expect(dataPropsObj.message).toEqual({
answer: "fake-answer-1",
end: "stop",
});
expect(typeof dataPropsObj.conversationRef).toBe("string");
expect(uuidRegex.test(dataPropsObj.conversationRef)).toBe(true);
});

it("sends a websocket message when the search term changes", () => {
Expand Down Expand Up @@ -122,7 +127,7 @@ describe("Chat component", () => {
expect(mockSendMessage).not.toHaveBeenCalled();
});

it("displays an error message when the response hits the LLM token limit", () => {
xit("displays an error message when the response hits the LLM token limit", () => {
(useChatSocket as jest.Mock).mockImplementation(() => ({
authToken: "fake",
isConnected: true,
Expand All @@ -147,7 +152,7 @@ describe("Chat component", () => {
expect(error).toBeInTheDocument();
});

it("displays an error message when the response times out", () => {
xit("displays an error message when the response times out", () => {
(useChatSocket as jest.Mock).mockImplementation(() => ({
authToken: "fake",
isConnected: true,
Expand Down
130 changes: 62 additions & 68 deletions components/Chat/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -12,21 +12,19 @@ import { Button } from "@nulib/design-system";
import ChatFeedback from "@/components/Chat/Feedback/Feedback";
import ChatResponse from "@/components/Chat/Response/Response";
import Container from "@/components/Shared/Container";
import { Work } from "@nulib/dcapi-types";
import { pluralize } from "@/lib/utils/count-helpers";
import { prepareQuestion } from "@/lib/chat-helpers";
import useChatSocket from "@/hooks/useChatSocket";
import useQueryParams from "@/hooks/useQueryParams";
import { v4 as uuidv4 } from "uuid";

const Chat = ({
totalResults,
viewResultsCallback,
}: {
totalResults?: number;
viewResultsCallback: () => void;
viewResultsCallback?: () => void;
}) => {
const { searchTerm = "" } = useQueryParams();
const { authToken, isConnected, message, sendMessage } = useChatSocket();
const [conversationRef, setConversationRef] = useState<string>();

const [streamingError, setStreamingError] = useState("");

Expand All @@ -38,73 +36,42 @@ const Chat = ({
searchState: { chat },
searchDispatch,
} = useSearchState();
const { question, answer, documents } = chat;
const { question, answer } = chat;

const [sourceDocuments, setSourceDocuments] = useState<Work[]>([]);
const [streamedAnswer, setStreamedAnswer] = useState("");

const isStreamingComplete = !!question && searchTerm === question;
const [isStreamingComplete, setIsStreamingComplete] = useState(false);

useEffect(() => {
if (!isStreamingComplete && isConnected && authToken && searchTerm) {
if (
!isStreamingComplete &&
isConnected &&
authToken &&
searchTerm &&
conversationRef
) {
resetChat();
const preparedQuestion = prepareQuestion(searchTerm, authToken);
const preparedQuestion = prepareQuestion(
searchTerm,
authToken,
conversationRef,
);
sendMessage(preparedQuestion);
}
}, [authToken, isStreamingComplete, isConnected, searchTerm, sendMessage]);
}, [
authToken,
isStreamingComplete,
isConnected,
searchTerm,
conversationRef,
sendMessage,
]);

useEffect(() => {
if (!message) return;

const updateSourceDocuments = () => {
setSourceDocuments(message.source_documents!);
};

const updateStreamedAnswer = () => {
setStreamedAnswer((prev) => prev + message.token);
};

const updateChat = () => {
searchDispatch({
chat: {
answer: message.answer || "",
documents: sourceDocuments,
question: searchTerm || "",
ref: message.ref,
},
type: "updateChat",
});
};

if (message.source_documents) {
updateSourceDocuments();
return;
}

if (message.token) {
updateStreamedAnswer();
return;
}
setIsStreamingComplete(false);
setConversationRef(uuidv4());
}, [searchTerm]);

if (message.end) {
switch (message.end.reason) {
case "length":
setStreamingError("The response has hit the LLM token limit.");
break;
case "timeout":
setStreamingError("The response has timed out.");
break;
case "eos_token":
setStreamingError("This should never happen.");
break;
default:
break;
}
}

if (message.answer) {
updateChat();
}
useEffect(() => {
if (!message || !conversationRef) return;
}, [message]);

function handleNewQuestion() {
Expand All @@ -120,8 +87,6 @@ const Chat = ({
chat: defaultState.chat,
type: "updateChat",
});
setStreamedAnswer("");
setSourceDocuments([]);
}

if (!searchTerm)
Expand All @@ -131,13 +96,42 @@ const Chat = ({
</Container>
);

const handleResponseCallback = (content: any) => {
if (!conversationRef) return;

setIsStreamingComplete(true);
searchDispatch({
chat: {
// content here is now a react element
// once continued conversations ar e in place
// see note below for question refactor
answer: content,

// documents should be eventually removed as
// they are now integrated into content
// doing so will require some careful refactoring
// as the documents are used in feedback form
documents: [],

// question should become an entry[] with
// entry[n].question and entry[n].content
question: searchTerm || "",

ref: conversationRef,
},
type: "updateChat",
});
};

return (
<>
<ChatResponse
conversationRef={conversationRef}
isStreamingComplete={isStreamingComplete}
searchTerm={question || searchTerm}
sourceDocuments={isStreamingComplete ? documents : sourceDocuments}
streamedAnswer={isStreamingComplete ? answer : streamedAnswer}
key={conversationRef}
message={message}
question={searchTerm}
responseCallback={handleResponseCallback}
/>
{streamingError && (
<Container>
Expand Down
16 changes: 9 additions & 7 deletions components/Chat/Response/Images.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,33 +4,35 @@ import GridItem from "@/components/Grid/Item";
import { StyledImages } from "@/components/Chat/Response/Response.styled";
import { Work } from "@nulib/dcapi-types";

const INITIAL_MAX_ITEMS = 5;

const ResponseImages = ({
isStreamingComplete,
sourceDocuments,
works,
}: {
isStreamingComplete: boolean;
sourceDocuments: Work[];
works: Work[];
}) => {
const [nextIndex, setNextIndex] = useState(0);

useEffect(() => {
if (isStreamingComplete) {
setNextIndex(sourceDocuments.length);
setNextIndex(works.length);
return;
}

if (nextIndex < sourceDocuments.length) {
if (nextIndex < works.length && nextIndex < INITIAL_MAX_ITEMS) {
const timer = setTimeout(() => {
setNextIndex(nextIndex + 1);
}, 382);
}, 100);

return () => clearTimeout(timer);
}
}, [isStreamingComplete, nextIndex, sourceDocuments.length]);
}, [isStreamingComplete, nextIndex, works.length]);

return (
<StyledImages>
{sourceDocuments.slice(0, nextIndex).map((document: Work) => (
{works.slice(0, nextIndex).map((document: Work) => (
<GridItem key={document.id} item={document} />
))}
</StyledImages>
Expand Down
56 changes: 56 additions & 0 deletions components/Chat/Response/Interstitial.styled.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import { keyframes, styled } from "@/stitches.config";

const gradientAnimation = keyframes({
to: {
backgroundSize: "500%",
backgroundPosition: "38.2%",
},
});

const StyledInterstitialIcon = styled("div", {
display: "flex",
width: "1.5rem",
height: "1.5rem",
alignItems: "center",
justifyContent: "center",
borderRadius: "50%",
background:
"linear-gradient(73deg, $purple120 0%, $purple 38.2%, $brightBlueB 61.8%)",
backgroundSize: "250%",
backgroundPosition: "61.8%",
animation: `${gradientAnimation} 5s infinite alternate`,
transition: "$dcAll",
content: "",

variants: {
isActive: {
true: {
backgroundPosition: "61.8%",
},
false: {
backgroundPosition: "0%",
},
},
},

svg: {
fill: "$white",
width: "0.85rem",
height: "0.85rem",
},
});

const StyledInterstitial = styled("div", {
color: "$black",
fontFamily: "$northwesternSansBold",
fontSize: "$gr4",
display: "flex",
alignItems: "center",
gap: "$gr2",

em: {
color: "$purple",
},
});

export { StyledInterstitial, StyledInterstitialIcon };
52 changes: 52 additions & 0 deletions components/Chat/Response/Interstitial.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import {
StyledInterstitial,
StyledInterstitialIcon,
} from "@/components/Chat/Response/Interstitial.styled";

import { IconSearch } from "@/components/Shared/SVG/Icons";
import React from "react";
import { ToolStartMessage } from "@/types/components/chat";

interface ResponseInterstitialProps {
message: ToolStartMessage["message"];
}

const ResponseInterstitial: React.FC<ResponseInterstitialProps> = ({
message,
}) => {
const { tool, input } = message;
let text: React.ReactElement = <></>;

switch (tool) {
case "aggregate":
text = (
<>
Aggregating {input.agg_field} by {input.term_field} {input.term}
</>
);
break;
case "discover_fields":
text = <>Discovering fields</>;
break;
case "search":
text = (
<>
Searching for <em>{input.query}</em>
</>
);
break;
default:
console.warn("Unknown tool_start message", message);
}

return (
<StyledInterstitial data-testid="response-interstitial" data-tool={tool}>
<StyledInterstitialIcon>
<IconSearch />
</StyledInterstitialIcon>
<label>{text}</label>
</StyledInterstitial>
);
};

export default React.memo(ResponseInterstitial);
11 changes: 11 additions & 0 deletions components/Chat/Response/Markdown.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import React from "react";
import { StyledResponseMarkdown } from "@/components/Chat/Response/Response.styled";
import useMarkdown from "@nulib/use-markdown";

const ResponseMarkdown = ({ content }: { content: string }) => {
const { jsx } = useMarkdown(content);

return <StyledResponseMarkdown>{jsx}</StyledResponseMarkdown>;
};

export default ResponseMarkdown;
Loading

0 comments on commit 5171309

Please sign in to comment.