Skip to content

Commit

Permalink
[4/n] Scaffolding for Model Settings via Schema (#391)
Browse files Browse the repository at this point in the history
[4/n] Scaffolding for Model Settings via Schema


# [4/n] Scaffolding for Components via Schema

Setting up the scaffolding of the prompt components themselves here. The
main concept is that the dynamic content of each prompt (input,
settings, metadata) will be rendered by either:

A) a schema representing the possible params/values and their types,
with default value obtained from the config
or
B) directly from the config itself

For now, I've hard-coded an example schema used by opena model parser
models. I'll look into adding some more (specifically, multi-modal) in
subsequent PRs and then, if things work smoothly, we can require these
schemas to be defined on each ModelParser class and then have some
process to provide the static schema definitions to the client (e.g.
load for each registered model parser on the backend and send that to
the client once on startup).

A) will happen for newly-added prompts in the editor/workbook UI, or if
existing prompts have associated schemas available.
B) can happen if no schema is provided for a model (e.g. especially in
read-only cases like viewing shared gradio workbook)

One thing to be aware of for A) is that the schema represents all
possible values. So, we only want to insert the values into the prompt
JSON in the schema when the viewer actually sets a value in the UI. That
will be handled using the `useSchemaState` hook added in this PR.

Subsequent PRs will handle setting up the same structure for prompt
inputs, and then implementing the proper rendering of components for
each.

## Testing:
- Just rendering the schema / state for now, making sure default values
are correct:
<img width="1176" alt="Screenshot 2023-12-01 at 12 59 19 PM"
src="https://github.com/lastmile-ai/aiconfig/assets/5060851/f293687a-3fdd-4e7f-8025-c240c8b6cde4">

---
Stack created with [Sapling](https://sapling-scm.com). Best reviewed
with
[ReviewStack](https://reviewstack.dev/lastmile-ai/aiconfig/pull/391).
* #398
* __->__ #391
  • Loading branch information
rholinshead authored Dec 11, 2023
2 parents d50f718 + 6ef4f9b commit fcdb5d9
Show file tree
Hide file tree
Showing 13 changed files with 432 additions and 8 deletions.
2 changes: 2 additions & 0 deletions cli/aiconfig-editor/src/components/EditorContainer.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@ export default function EditorContainer({
[aiconfig]
);

// TODO: Implement editor context for callbacks, readonly state, etc.

return (
<>
<Container>
Expand Down
25 changes: 25 additions & 0 deletions cli/aiconfig-editor/src/components/SettingsPropertyRenderer.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import { Flex } from "@mantine/core";
import { memo } from "react";

type Props = {
propertyName: string;
property: { [key: string]: any };
isRequired?: boolean;
initialValue: any;
};

export default memo(function SettingsPropertyRenderer({
propertyName,
property,
isRequired = false,
initialValue,
}: Props) {
return (
<Flex direction="column">
<div>{propertyName}</div>
<div>{JSON.stringify(property)}</div>
<div>isRequired: {JSON.stringify(isRequired)}</div>
<div>initialValue: {JSON.stringify(initialValue)}</div>
</Flex>
);
});
35 changes: 35 additions & 0 deletions cli/aiconfig-editor/src/components/prompt/PromptActionBar.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import PromptParametersRenderer from "@/src/components/prompt/PromptParametersRenderer";
import ModelSettingsRenderer from "@/src/components/prompt/model_settings/ModelSettingsRenderer";
import PromptMetadataRenderer from "@/src/components/prompt/prompt_metadata/PromptMetadataRenderer";
import {
PromptSchema,
checkParametersSupported,
} from "@/src/utils/promptUtils";
import { Flex } from "@mantine/core";
import { Prompt } from "aiconfig";
import { memo } from "react";

type Props = {
prompt: Prompt;
promptSchema?: PromptSchema;
};

export default memo(function PromptActionBar({ prompt, promptSchema }: Props) {
// TODO: Handle collapse / expand / drag-to-resize
const modelSettingsSchema = promptSchema?.model_settings;
const promptMetadataSchema = promptSchema?.prompt_metadata;

return (
<Flex
direction="column"
justify="space-between"
style={{ borderLeft: "1px solid grey" }}
>
<ModelSettingsRenderer prompt={prompt} schema={modelSettingsSchema} />
<PromptMetadataRenderer prompt={prompt} schema={promptMetadataSchema} />
{checkParametersSupported(prompt) && (
<PromptParametersRenderer prompt={prompt} />
)}
</Flex>
);
});
24 changes: 17 additions & 7 deletions cli/aiconfig-editor/src/components/prompt/PromptContainer.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import PromptActionBar from "@/src/components/prompt/PromptActionBar";
import PromptInput from "@/src/components/prompt/PromptInput";
import { getPromptModelName } from "@/src/utils/promptUtils";
import { getPromptModelName, getPromptSchema } from "@/src/utils/promptUtils";
import { Flex, Card, Text } from "@mantine/core";
import { Prompt, PromptInput as AIConfigPromptInput } from "aiconfig";
import { memo, useCallback } from "react";
Expand All @@ -17,19 +18,28 @@ export default memo(function PromptContainer({
onChangePromptInput,
defaultConfigModelName,
}: Props) {
// TODO: Show prompt name & metadata inside of settings editor later
// TODO: Move this to context
const onChangeInput = useCallback(
(newInput: AIConfigPromptInput) => onChangePromptInput(index, newInput),
[index, onChangePromptInput]
);

const promptSchema = getPromptSchema(prompt, defaultConfigModelName);

return (
<div style={{ marginTop: 16 }}>
<Card>
<Flex justify="space-between" m="sm">
<Text weight="bold">{`{{${prompt.name}}}}`}</Text>
<Text>{getPromptModelName(prompt, defaultConfigModelName)}</Text>
<Card withBorder>
<Flex justify="space-between">
<Flex direction="column">
<Flex justify="space-between" m="sm">
<Text weight="bold">{`{{${prompt.name}}}}`}</Text>
<Text>{getPromptModelName(prompt, defaultConfigModelName)}</Text>
</Flex>
<PromptInput input={prompt.input} onChangeInput={onChangeInput} />
{/* <PromptOutput /> */}
</Flex>
<PromptActionBar prompt={prompt} promptSchema={promptSchema} />
</Flex>
<PromptInput input={prompt.input} onChangeInput={onChangeInput} />
</Card>
</div>
);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import { Prompt } from "aiconfig";
import { memo } from "react";

type Props = {
prompt: Prompt;
};

export default memo(function PromptParametersRenderer({ prompt }: Props) {
return null; // TODO: Implement
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import { JSONObject } from "aiconfig/dist/common";
import { memo } from "react";

type Props = {
settings: JSONObject;
};

export default memo(function ModelSettingsConfigRenderer({ settings }: Props) {
return <div>{JSON.stringify(settings)}</div>;
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import ModelSettingsConfigRenderer from "@/src/components/prompt/model_settings/ModelSettingsConfigRenderer";
import ModelSettingsSchemaRenderer from "@/src/components/prompt/model_settings/ModelSettingsSchemaRenderer";
import { ModelSettingsSchema } from "@/src/utils/promptUtils";
import { Flex, Text } from "@mantine/core";
import { Prompt } from "aiconfig";
import { memo } from "react";

type Props = {
prompt: Prompt;
schema?: ModelSettingsSchema;
};

// Don't default to config-level model settings since that could be confusing
// to have them shown at the prompt level in the editor but not in the config
function getModelSettings(prompt: Prompt) {
if (typeof prompt.metadata?.model !== "string") {
return prompt.metadata?.model?.settings;
}
}

export default memo(function ModelSettingsRenderer({ prompt, schema }: Props) {
const modelSettings = getModelSettings(prompt);

let settingsComponent;

if (schema) {
settingsComponent = (
<ModelSettingsSchemaRenderer settings={modelSettings} schema={schema} />
);
} else if (modelSettings) {
settingsComponent = (
<ModelSettingsConfigRenderer settings={modelSettings} />
);
}

return (
<Flex direction="column">
<Text>Model Settings</Text>
{settingsComponent}
</Flex>
);
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import SettingsPropertyRenderer from "@/src/components/SettingsPropertyRenderer";
import { useSchemaState } from "@/src/hooks/useSchemaState";
import { ModelSettingsSchema } from "@/src/utils/promptUtils";
import { Flex } from "@mantine/core";
import { JSONObject } from "aiconfig/dist/common";
import { memo } from "react";

type Props = {
schema: ModelSettingsSchema;
settings?: JSONObject;
};

export default memo(function ModelSettingsSchemaRenderer({
schema,
settings,
}: Props) {
const { schemaState } = useSchemaState(schema, settings);

return (
<Flex direction="column">
{Object.entries(schema.properties).map(([key, value]) => (
<SettingsPropertyRenderer
propertyName={key}
key={key}
property={value}
isRequired={schema.required?.includes(key)}
initialValue={schemaState[key].value}
/>
))}
</Flex>
);
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import { PromptMetadataSchema } from "@/src/utils/promptUtils";
import { Prompt } from "aiconfig";
import { memo } from "react";

type Props = {
prompt: Prompt;
schema?: PromptMetadataSchema;
};

function ModelMetadataConfigRenderer({ prompt }: Props) {
return null; // TODO: Implement
}

function ModelMetadataSchemaRenderer({ prompt, schema }: Props) {
return null; // TODO: Implement
}

export default memo(function ModelSettingsRenderer({ prompt, schema }: Props) {
return schema ? (
<ModelMetadataSchemaRenderer prompt={prompt} schema={schema} />
) : (
<ModelMetadataConfigRenderer prompt={prompt} />
);
});
46 changes: 46 additions & 0 deletions cli/aiconfig-editor/src/hooks/useSchemaState.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import {
ModelSettingsSchema,
PromptMetadataSchema,
} from "@/src/utils/promptUtils";
import { useCallback, useRef, useState } from "react";

// Local state to maintain all the possible properties from a schema, as well as 'dirty' state to track
// which properties have been changed. This is used to determine which properties to propagate to the config.
// Otherwise, the config would be bloated with unnecessary settings just by loading it in the editor.
export function useSchemaState(
schema: ModelSettingsSchema | PromptMetadataSchema,
initialData?: Record<string, unknown>
) {
const [schemaState, setSchemaState] = useState<
Record<string, { value: unknown; dirty: boolean }>
>(
Object.keys(schema.properties).reduce((acc, key) => {
acc[key] = { value: initialData?.[key] ?? null, dirty: false };
return acc;
}, {} as Record<string, { value: unknown; dirty: boolean }>)
);

const stateRef = useRef(schemaState);
stateRef.current = schemaState;

// Get the state of the schema as a concrete object, only including properties that have been changed
const getConcreteState = useCallback(
() =>
Object.keys(stateRef.current).reduce((acc, key) => {
if (stateRef.current[key].dirty) {
acc[key] = stateRef.current[key].value;
}
return acc;
}, {} as Record<string, unknown>),
[]
);

const setSchemaValue = useCallback((key: string, value: unknown) => {
setSchemaState((currentState) => ({
...currentState,
[key]: { value, dirty: true },
}));
}, []);

return { schemaState, getConcreteState, setSchemaValue };
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
import { PromptSchema } from "@/src/utils/promptUtils";

export const OpenAIChatModelParserPromptSchema: PromptSchema = {
input: {
type: "string",
},
model_settings: {
properties: {
system_prompt: {
type: "string",
},
frequency_penalty: {
type: "number",
minimum: -2.0,
maximum: 2.0,
},
function_call: {
type: "union",
types: [
{
type: "string",
enum: ["none", "auto"],
},
{
type: "object",
required: ["name"],
properties: {
name: {
type: "string",
},
},
},
],
},
functions: {
type: "array",
items: {
type: "object",
required: ["name", "parameters"],
parameters: {
name: {
type: "string",
},
parameters: {
type: "object", // TODO: Figure this out -- it's a JSON schema object
},
description: {
type: "string",
},
},
},
},
logit_bias: {
type: "map",
keys: {
type: "string",
},
items: {
type: "integer",
minimum: -100,
maximum: 100,
},
},
max_tokens: {
type: "number",
},
n: {
type: "number",
},
presence_penalty: {
type: "number",
minimum: -2.0,
maximum: 2.0,
},
stop: {
type: "array",
items: {
type: "string",
},
},
stream: {
type: "boolean",
},
temperature: {
type: "number",
minimum: 0.0,
maximum: 2.0,
},
top_p: {
type: "number",
minimum: 0.0,
maximum: 1.0,
},
user: {
type: "string",
},
},
},
prompt_metadata: {
properties: {
remember_chat_context: {
type: "boolean",
},
},
},
};
Loading

0 comments on commit fcdb5d9

Please sign in to comment.