Skip to content

Commit

Permalink
update llm override defaults
Browse files Browse the repository at this point in the history
  • Loading branch information
pablonyx committed Dec 15, 2024
1 parent 2bc0084 commit 0659f16
Show file tree
Hide file tree
Showing 4 changed files with 51 additions and 35 deletions.
6 changes: 3 additions & 3 deletions web/src/app/chat/ChatPage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,7 @@ export function ChatPage({
};

const llmOverrideManager = useLlmOverride(
llmProviders,
modelVersionFromSearchParams || (user?.preferences.default_model ?? null),
selectedChatSession,
defaultTemperature
Expand Down Expand Up @@ -318,9 +319,9 @@ export function ChatPage({
);

if (personaDefault) {
llmOverrideManager.setLlmOverride(personaDefault);
llmOverrideManager.updateLLMOverride(personaDefault);
} else if (user?.preferences.default_model) {
llmOverrideManager.setLlmOverride(
llmOverrideManager.updateLLMOverride(
destructureValue(user?.preferences.default_model)
);
}
Expand Down Expand Up @@ -1202,7 +1203,6 @@ export function ChatPage({
assistant_message_id: number;
frozenMessageMap: Map<number, Message>;
} = null;

try {
const mapKeys = Array.from(
currentMessageMap(completeMessageDetail).keys()
Expand Down
4 changes: 2 additions & 2 deletions web/src/app/chat/RegenerateOption.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,9 @@ export default function RegenerateOption({
onHoverChange: (isHovered: boolean) => void;
onDropdownVisibleChange: (isVisible: boolean) => void;
}) {
const llmOverrideManager = useLlmOverride();

const { llmProviders } = useChatContext();
const llmOverrideManager = useLlmOverride(llmProviders);

const [_, llmName] = getFinalLLM(llmProviders, selectedAssistant, null);

const llmOptionsByProvider: {
Expand Down
4 changes: 2 additions & 2 deletions web/src/app/chat/modal/configuration/LlmTab.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ export const LlmTab = forwardRef<HTMLDivElement, LlmTabProps>(
checkPersonaRequiresImageGeneration(currentAssistant);

const { llmProviders } = useChatContext();
const { setLlmOverride, temperature, updateTemperature } =
const { updateLLMOverride, temperature, updateTemperature } =
llmOverrideManager;
const [isTemperatureExpanded, setIsTemperatureExpanded] = useState(false);

Expand All @@ -60,7 +60,7 @@ export const LlmTab = forwardRef<HTMLDivElement, LlmTabProps>(
if (value == null) {
return;
}
setLlmOverride(destructureValue(value));
updateLLMOverride(destructureValue(value));
if (chatSessionId) {
updateModelOverrideForChatSession(chatSessionId, value as string);
}
Expand Down
72 changes: 44 additions & 28 deletions web/src/lib/hooks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,16 @@ import { errorHandlingFetcher } from "./fetcher";
import { useContext, useEffect, useState } from "react";
import { DateRangePickerValue } from "@/app/ee/admin/performance/DateRangeSelector";
import { SourceMetadata } from "./search/interfaces";
import { destructureValue } from "./llm/utils";
import { destructureValue, structureValue } from "./llm/utils";
import { ChatSession } from "@/app/chat/interfaces";
import { UsersResponse } from "./users/interfaces";
import { Credential } from "./connectors/credentials";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import { PersonaCategory } from "@/app/admin/assistants/interfaces";
import {
LLMProvider,
LLMProviderDescriptor,
} from "@/app/admin/configuration/llm/interfaces";
import { isAnthropic } from "@/app/admin/configuration/llm/interfaces";

const CREDENTIAL_URL = "/api/manage/admin/credential";
Expand Down Expand Up @@ -157,60 +161,72 @@ export interface LlmOverride {

export interface LlmOverrideManager {
llmOverride: LlmOverride;
setLlmOverride: React.Dispatch<React.SetStateAction<LlmOverride>>;
updateLLMOverride: (newOverride: LlmOverride) => void;
globalDefault: LlmOverride;
setGlobalDefault: React.Dispatch<React.SetStateAction<LlmOverride>>;
temperature: number | null;
updateTemperature: (temperature: number | null) => void;
updateModelOverrideForChatSession: (chatSession?: ChatSession) => void;
}
export function useLlmOverride(
llmProviders: LLMProviderDescriptor[],
globalModel?: string | null,
currentChatSession?: ChatSession,
defaultTemperature?: number
): LlmOverrideManager {
const getValidLlmOverride = (
overrideModel: string | null | undefined
): LlmOverride => {
if (overrideModel) {
const model = destructureValue(overrideModel);
const provider = llmProviders.find(
(p) =>
p.model_names.includes(model.modelName) &&
p.provider === model.provider
);
if (provider) {
return { ...model, name: provider.name };
}
}
return { name: "", provider: "", modelName: "" };
};

const [globalDefault, setGlobalDefault] = useState<LlmOverride>(
globalModel != null
? destructureValue(globalModel)
: {
name: "",
provider: "",
modelName: "",
}
getValidLlmOverride(globalModel)
);
const updateLLMOverride = (newOverride: LlmOverride) => {
setLlmOverride(
getValidLlmOverride(
structureValue(
newOverride.name,
newOverride.provider,
newOverride.modelName
)
)
);
};

const [llmOverride, setLlmOverride] = useState<LlmOverride>(
currentChatSession && currentChatSession.current_alternate_model
? destructureValue(currentChatSession.current_alternate_model)
: {
name: "",
provider: "",
modelName: "",
}
? getValidLlmOverride(currentChatSession.current_alternate_model)
: { name: "", provider: "", modelName: "" }
);

const updateModelOverrideForChatSession = (chatSession?: ChatSession) => {
setLlmOverride(
chatSession && chatSession.current_alternate_model
? destructureValue(chatSession.current_alternate_model)
? getValidLlmOverride(chatSession.current_alternate_model)
: globalDefault
);
};

const [temperature, setTemperature] = useState<number | null>(
defaultTemperature != undefined ? defaultTemperature : 0
defaultTemperature !== undefined ? defaultTemperature : 0
);

useEffect(() => {
setGlobalDefault(
globalModel != null
? destructureValue(globalModel)
: {
name: "",
provider: "",
modelName: "",
}
);
}, [globalModel]);
setGlobalDefault(getValidLlmOverride(globalModel));
}, [globalModel, llmProviders]);

useEffect(() => {
setTemperature(defaultTemperature !== undefined ? defaultTemperature : 0);
Expand All @@ -233,7 +249,7 @@ export function useLlmOverride(
return {
updateModelOverrideForChatSession,
llmOverride,
setLlmOverride,
updateLLMOverride,
globalDefault,
setGlobalDefault,
temperature,
Expand Down

0 comments on commit 0659f16

Please sign in to comment.