diff --git a/cli/aiconfig-editor/src/components/EditorContainer.tsx b/cli/aiconfig-editor/src/components/EditorContainer.tsx index 98de73afe..af2ce28a8 100644 --- a/cli/aiconfig-editor/src/components/EditorContainer.tsx +++ b/cli/aiconfig-editor/src/components/EditorContainer.tsx @@ -47,6 +47,8 @@ export default function EditorContainer({ [aiconfig] ); + // TODO: Implement editor context for callbacks, readonly state, etc. + return ( <> diff --git a/cli/aiconfig-editor/src/components/SettingsPropertyRenderer.tsx b/cli/aiconfig-editor/src/components/SettingsPropertyRenderer.tsx new file mode 100644 index 000000000..55d206c5c --- /dev/null +++ b/cli/aiconfig-editor/src/components/SettingsPropertyRenderer.tsx @@ -0,0 +1,25 @@ +import { Flex } from "@mantine/core"; +import { memo } from "react"; + +type Props = { + propertyName: string; + property: { [key: string]: any }; + isRequired?: boolean; + initialValue: any; +}; + +export default memo(function SettingsPropertyRenderer({ + propertyName, + property, + isRequired = false, + initialValue, +}: Props) { + return ( + +
{propertyName}
+
{JSON.stringify(property)}
+
isRequired: {JSON.stringify(isRequired)}
+
initialValue: {JSON.stringify(initialValue)}
+
+ ); +}); diff --git a/cli/aiconfig-editor/src/components/prompt/PromptActionBar.tsx b/cli/aiconfig-editor/src/components/prompt/PromptActionBar.tsx new file mode 100644 index 000000000..7767ab6f1 --- /dev/null +++ b/cli/aiconfig-editor/src/components/prompt/PromptActionBar.tsx @@ -0,0 +1,35 @@ +import PromptParametersRenderer from "@/src/components/prompt/PromptParametersRenderer"; +import ModelSettingsRenderer from "@/src/components/prompt/model_settings/ModelSettingsRenderer"; +import PromptMetadataRenderer from "@/src/components/prompt/prompt_metadata/PromptMetadataRenderer"; +import { + PromptSchema, + checkParametersSupported, +} from "@/src/utils/promptUtils"; +import { Flex } from "@mantine/core"; +import { Prompt } from "aiconfig"; +import { memo } from "react"; + +type Props = { + prompt: Prompt; + promptSchema?: PromptSchema; +}; + +export default memo(function PromptActionBar({ prompt, promptSchema }: Props) { + // TODO: Handle collapse / expand / drag-to-resize + const modelSettingsSchema = promptSchema?.model_settings; + const promptMetadataSchema = promptSchema?.prompt_metadata; + + return ( + + + + {checkParametersSupported(prompt) && ( + + )} + + ); +}); diff --git a/cli/aiconfig-editor/src/components/prompt/PromptContainer.tsx b/cli/aiconfig-editor/src/components/prompt/PromptContainer.tsx index dd03980c3..74b0fefc6 100644 --- a/cli/aiconfig-editor/src/components/prompt/PromptContainer.tsx +++ b/cli/aiconfig-editor/src/components/prompt/PromptContainer.tsx @@ -1,5 +1,6 @@ +import PromptActionBar from "@/src/components/prompt/PromptActionBar"; import PromptInput from "@/src/components/prompt/PromptInput"; -import { getPromptModelName } from "@/src/utils/promptUtils"; +import { getPromptModelName, getPromptSchema } from "@/src/utils/promptUtils"; import { Flex, Card, Text } from "@mantine/core"; import { Prompt, PromptInput as AIConfigPromptInput } from "aiconfig"; import { memo, useCallback } from "react"; @@ -17,19 +18,28 @@ export default memo(function PromptContainer({ onChangePromptInput, defaultConfigModelName, }: Props) { - // TODO: Show prompt name & metadata inside of settings editor later + // TODO: Move this to context const onChangeInput = useCallback( (newInput: AIConfigPromptInput) => onChangePromptInput(index, newInput), [index, onChangePromptInput] ); + + const promptSchema = getPromptSchema(prompt, defaultConfigModelName); + return (
- - - {`{{${prompt.name}}}}`} - {getPromptModelName(prompt, defaultConfigModelName)} + + + + + {`{{${prompt.name}}}}`} + {getPromptModelName(prompt, defaultConfigModelName)} + + + {/* */} + + -
); diff --git a/cli/aiconfig-editor/src/components/prompt/PromptParametersRenderer.tsx b/cli/aiconfig-editor/src/components/prompt/PromptParametersRenderer.tsx new file mode 100644 index 000000000..6ee32fd79 --- /dev/null +++ b/cli/aiconfig-editor/src/components/prompt/PromptParametersRenderer.tsx @@ -0,0 +1,10 @@ +import { Prompt } from "aiconfig"; +import { memo } from "react"; + +type Props = { + prompt: Prompt; +}; + +export default memo(function PromptParametersRenderer({ prompt }: Props) { + return null; // TODO: Implement +}); diff --git a/cli/aiconfig-editor/src/components/prompt/model_settings/ModelSettingsConfigRenderer.tsx b/cli/aiconfig-editor/src/components/prompt/model_settings/ModelSettingsConfigRenderer.tsx new file mode 100644 index 000000000..260fc4763 --- /dev/null +++ b/cli/aiconfig-editor/src/components/prompt/model_settings/ModelSettingsConfigRenderer.tsx @@ -0,0 +1,10 @@ +import { JSONObject } from "aiconfig/dist/common"; +import { memo } from "react"; + +type Props = { + settings: JSONObject; +}; + +export default memo(function ModelSettingsConfigRenderer({ settings }: Props) { + return
{JSON.stringify(settings)}
; +}); diff --git a/cli/aiconfig-editor/src/components/prompt/model_settings/ModelSettingsRenderer.tsx b/cli/aiconfig-editor/src/components/prompt/model_settings/ModelSettingsRenderer.tsx new file mode 100644 index 000000000..637a32540 --- /dev/null +++ b/cli/aiconfig-editor/src/components/prompt/model_settings/ModelSettingsRenderer.tsx @@ -0,0 +1,42 @@ +import ModelSettingsConfigRenderer from "@/src/components/prompt/model_settings/ModelSettingsConfigRenderer"; +import ModelSettingsSchemaRenderer from "@/src/components/prompt/model_settings/ModelSettingsSchemaRenderer"; +import { ModelSettingsSchema } from "@/src/utils/promptUtils"; +import { Flex, Text } from "@mantine/core"; +import { Prompt } from "aiconfig"; +import { memo } from "react"; + +type Props = { + prompt: Prompt; + schema?: ModelSettingsSchema; +}; + +// Don't default to config-level model settings since that could be confusing +// to have them shown at the prompt level in the editor but not in the config +function getModelSettings(prompt: Prompt) { + if (typeof prompt.metadata?.model !== "string") { + return prompt.metadata?.model?.settings; + } +} + +export default memo(function ModelSettingsRenderer({ prompt, schema }: Props) { + const modelSettings = getModelSettings(prompt); + + let settingsComponent; + + if (schema) { + settingsComponent = ( + + ); + } else if (modelSettings) { + settingsComponent = ( + + ); + } + + return ( + + Model Settings + {settingsComponent} + + ); +}); diff --git a/cli/aiconfig-editor/src/components/prompt/model_settings/ModelSettingsSchemaRenderer.tsx b/cli/aiconfig-editor/src/components/prompt/model_settings/ModelSettingsSchemaRenderer.tsx new file mode 100644 index 000000000..78b6f9625 --- /dev/null +++ b/cli/aiconfig-editor/src/components/prompt/model_settings/ModelSettingsSchemaRenderer.tsx @@ -0,0 +1,32 @@ +import SettingsPropertyRenderer from "@/src/components/SettingsPropertyRenderer"; +import { useSchemaState } from "@/src/hooks/useSchemaState"; +import { ModelSettingsSchema } from "@/src/utils/promptUtils"; +import { Flex } from "@mantine/core"; +import { JSONObject } from "aiconfig/dist/common"; +import { memo } from "react"; + +type Props = { + schema: ModelSettingsSchema; + settings?: JSONObject; +}; + +export default memo(function ModelSettingsSchemaRenderer({ + schema, + settings, +}: Props) { + const { schemaState } = useSchemaState(schema, settings); + + return ( + + {Object.entries(schema.properties).map(([key, value]) => ( + + ))} + + ); +}); diff --git a/cli/aiconfig-editor/src/components/prompt/prompt_metadata/PromptMetadataRenderer.tsx b/cli/aiconfig-editor/src/components/prompt/prompt_metadata/PromptMetadataRenderer.tsx new file mode 100644 index 000000000..6af552ff2 --- /dev/null +++ b/cli/aiconfig-editor/src/components/prompt/prompt_metadata/PromptMetadataRenderer.tsx @@ -0,0 +1,24 @@ +import { PromptMetadataSchema } from "@/src/utils/promptUtils"; +import { Prompt } from "aiconfig"; +import { memo } from "react"; + +type Props = { + prompt: Prompt; + schema?: PromptMetadataSchema; +}; + +function ModelMetadataConfigRenderer({ prompt }: Props) { + return null; // TODO: Implement +} + +function ModelMetadataSchemaRenderer({ prompt, schema }: Props) { + return null; // TODO: Implement +} + +export default memo(function ModelSettingsRenderer({ prompt, schema }: Props) { + return schema ? ( + + ) : ( + + ); +}); diff --git a/cli/aiconfig-editor/src/hooks/useSchemaState.ts b/cli/aiconfig-editor/src/hooks/useSchemaState.ts new file mode 100644 index 000000000..53eb7ab4b --- /dev/null +++ b/cli/aiconfig-editor/src/hooks/useSchemaState.ts @@ -0,0 +1,46 @@ +import { + ModelSettingsSchema, + PromptMetadataSchema, +} from "@/src/utils/promptUtils"; +import { useCallback, useRef, useState } from "react"; + +// Local state to maintain all the possible properties from a schema, as well as 'dirty' state to track +// which properties have been changed. This is used to determine which properties to propagate to the config. +// Otherwise, the config would be bloated with unnecessary settings just by loading it in the editor. +export function useSchemaState( + schema: ModelSettingsSchema | PromptMetadataSchema, + initialData?: Record +) { + const [schemaState, setSchemaState] = useState< + Record + >( + Object.keys(schema.properties).reduce((acc, key) => { + acc[key] = { value: initialData?.[key] ?? null, dirty: false }; + return acc; + }, {} as Record) + ); + + const stateRef = useRef(schemaState); + stateRef.current = schemaState; + + // Get the state of the schema as a concrete object, only including properties that have been changed + const getConcreteState = useCallback( + () => + Object.keys(stateRef.current).reduce((acc, key) => { + if (stateRef.current[key].dirty) { + acc[key] = stateRef.current[key].value; + } + return acc; + }, {} as Record), + [] + ); + + const setSchemaValue = useCallback((key: string, value: unknown) => { + setSchemaState((currentState) => ({ + ...currentState, + [key]: { value, dirty: true }, + })); + }, []); + + return { schemaState, getConcreteState, setSchemaValue }; +} diff --git a/cli/aiconfig-editor/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts b/cli/aiconfig-editor/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts new file mode 100644 index 000000000..2113842fd --- /dev/null +++ b/cli/aiconfig-editor/src/shared/prompt_schemas/OpenAIChatModelParserPromptSchema.ts @@ -0,0 +1,106 @@ +import { PromptSchema } from "@/src/utils/promptUtils"; + +export const OpenAIChatModelParserPromptSchema: PromptSchema = { + input: { + type: "string", + }, + model_settings: { + properties: { + system_prompt: { + type: "string", + }, + frequency_penalty: { + type: "number", + minimum: -2.0, + maximum: 2.0, + }, + function_call: { + type: "union", + types: [ + { + type: "string", + enum: ["none", "auto"], + }, + { + type: "object", + required: ["name"], + properties: { + name: { + type: "string", + }, + }, + }, + ], + }, + functions: { + type: "array", + items: { + type: "object", + required: ["name", "parameters"], + parameters: { + name: { + type: "string", + }, + parameters: { + type: "object", // TODO: Figure this out -- it's a JSON schema object + }, + description: { + type: "string", + }, + }, + }, + }, + logit_bias: { + type: "map", + keys: { + type: "string", + }, + items: { + type: "integer", + minimum: -100, + maximum: 100, + }, + }, + max_tokens: { + type: "number", + }, + n: { + type: "number", + }, + presence_penalty: { + type: "number", + minimum: -2.0, + maximum: 2.0, + }, + stop: { + type: "array", + items: { + type: "string", + }, + }, + stream: { + type: "boolean", + }, + temperature: { + type: "number", + minimum: 0.0, + maximum: 2.0, + }, + top_p: { + type: "number", + minimum: 0.0, + maximum: 1.0, + }, + user: { + type: "string", + }, + }, + }, + prompt_metadata: { + properties: { + remember_chat_context: { + type: "boolean", + }, + }, + }, +}; diff --git a/cli/aiconfig-editor/src/utils/promptUtils.ts b/cli/aiconfig-editor/src/utils/promptUtils.ts index 74991dcc2..504f66ac6 100644 --- a/cli/aiconfig-editor/src/utils/promptUtils.ts +++ b/cli/aiconfig-editor/src/utils/promptUtils.ts @@ -1,4 +1,5 @@ -import { AIConfig, Prompt } from "aiconfig"; +import { Prompt } from "aiconfig"; +import { OpenAIChatModelParserPromptSchema } from "../shared/prompt_schemas/OpenAIChatModelParserPromptSchema"; /** * Get the name of the model for the specified prompt. The name will either be specified in the prompt's @@ -23,3 +24,69 @@ export function getPromptModelName( // Model must be specified as default if not specified for the Prompt return defaultConfigModelName!; } + +// TODO: Schemas should be statically defined with the model parsers and loaded alongside config, keyed on registered model names +// Notes here: We could technically just refine the full JSON schema for the prompt since we currently allow model parsers to have full control, including: +// - defining metadata outside of model metadata --> e.g. remember_chat_context for openai parser +// - placing responsibility for proper handling/serialization of core config properties on the model parser (parameters, tags) +// That would allow us to do some powerful things like: +// - share JSON schema for model parser between languages +// - validate serialization against it +// Long-term, maybe we just have model parser core class handle serialization of parameters/tags and concrete implementations just +// handle serialization of non-core metadata and model inference settings? +// In any case, below we are defining 3 keys in the schema: +// - input: the prompt input, either string or more refined object including data / attachment types +// - model_settings: supported settings for the model, will be serialized either in top-level model metadata or prompt.metadata.model.settings +// - prompt_metadata: additional metadata for the prompt, EXCLUDES parameters, tags, model +// A couple reasons for doing this vs refinining full JSON schema: +// - we may want to set constraints, like range of acceptable values, which aren't part of the prompt representation in JSON schema +// - we can focus on the important 3 params defined above, instead of requiring re-definining the full JSON schema +// Should we define a JSON schema for PromptSchema type so we can safely serialize/deserialize them? +export const PROMPT_SCHEMAS: Record = { + "gpt-3.5-turbo": OpenAIChatModelParserPromptSchema, + "gpt-4": OpenAIChatModelParserPromptSchema, +}; + +export type PromptInputSchema = { + type: "string" | "object"; +}; + +export type ModelSettingsSchema = { + properties: Record; + required?: string[]; +}; + +export type PromptMetadataSchema = { + properties: Record; + required?: string[]; +}; + +export type PromptSchema = { + input: PromptInputSchema; + model_settings?: ModelSettingsSchema; + prompt_metadata?: PromptMetadataSchema; +}; + +export function getPromptSchema( + prompt: Prompt, + defaultConfigModelName?: string +): PromptSchema | undefined { + const modelName = getPromptModelName(prompt, defaultConfigModelName); + return PROMPT_SCHEMAS[modelName]; +} + +function isTextInputModality(prompt: Prompt): boolean { + const schema = getPromptSchema(prompt); + if (schema) { + return schema.input.type === "string"; // TODO: Handle object case + } + + return ( + // TODO: Handle case where data is object with values associated with mimetype? + typeof prompt.input === "string" || typeof prompt.input.data === "string" + ); +} + +export function checkParametersSupported(prompt: Prompt): boolean { + return prompt.metadata?.parameters != null || isTextInputModality(prompt); +} diff --git a/cli/aiconfig-editor/travel.aiconfig.json b/cli/aiconfig-editor/travel.aiconfig.json index eb48e36ff..9924f316e 100644 --- a/cli/aiconfig-editor/travel.aiconfig.json +++ b/cli/aiconfig-editor/travel.aiconfig.json @@ -31,6 +31,21 @@ "order_by": "geographic location" } } + }, + { + "name": "single_day_itinerary", + "input": "Refine this itinerary to fit within a single day: {{gen_itinerary.output}}.", + "metadata": { + "model": { + "name": "gpt-4", + "settings": { + "max_tokens": 1000 + } + }, + "parameters": { + "order_by": "geographic location" + } + } } ] } \ No newline at end of file