Skip to content

Commit

Permalink
Merge pull request #1603 from zhourunlai/experimental_telemetry
Browse files Browse the repository at this point in the history
feat: add experimental telemetry model option
  • Loading branch information
shakkernerd authored Dec 31, 2024
2 parents 66c66fd + 2e49b21 commit c6ebb5b
Show file tree
Hide file tree
Showing 2 changed files with 48 additions and 0 deletions.
19 changes: 19 additions & 0 deletions packages/core/src/generation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import {
ServiceType,
SearchResponse,
ActionResponse,
TelemetrySettings,
} from "./types.ts";
import { fal } from "@fal-ai/client";

Expand Down Expand Up @@ -164,6 +165,9 @@ export async function generateText({
const max_response_length =
modelConfiguration?.max_response_length ||
models[provider].settings.maxOutputTokens;
const experimental_telemetry =
modelConfiguration?.experimental_telemetry ||
models[provider].settings.experimental_telemetry;

const apiKey = runtime.token;

Expand Down Expand Up @@ -209,6 +213,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = openaiResponse;
Expand All @@ -232,6 +237,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = googleResponse;
Expand All @@ -258,6 +264,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = anthropicResponse;
Expand All @@ -284,6 +291,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = anthropicResponse;
Expand Down Expand Up @@ -314,6 +322,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = grokResponse;
Expand All @@ -335,6 +344,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = groqResponse;
Expand Down Expand Up @@ -386,6 +396,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = redpillResponse;
Expand Down Expand Up @@ -413,6 +424,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = openrouterResponse;
Expand All @@ -439,6 +451,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = ollamaResponse;
Expand Down Expand Up @@ -466,6 +479,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = heuristResponse;
Expand Down Expand Up @@ -515,6 +529,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = openaiResponse;
Expand All @@ -541,6 +556,7 @@ export async function generateText({
maxTokens: max_response_length,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
experimental_telemetry: experimental_telemetry,
});

response = galadrielResponse;
Expand Down Expand Up @@ -1357,6 +1373,7 @@ interface ModelSettings {
frequencyPenalty: number;
presencePenalty: number;
stop?: string[];
experimental_telemetry?: TelemetrySettings;
}

/**
Expand Down Expand Up @@ -1392,6 +1409,7 @@ export const generateObject = async ({
const presence_penalty = models[provider].settings.presence_penalty;
const max_context_length = models[provider].settings.maxInputTokens;
const max_response_length = models[provider].settings.maxOutputTokens;
const experimental_telemetry = models[provider].settings.experimental_telemetry;
const apiKey = runtime.token;

try {
Expand All @@ -1404,6 +1422,7 @@ export const generateObject = async ({
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
stop: stop || models[provider].settings.stop,
experimental_telemetry: experimental_telemetry,
};

const response = await handleProvider({
Expand Down
29 changes: 29 additions & 0 deletions packages/core/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,9 @@ export type Model = {

/** Temperature setting */
temperature: number;

/** Optional telemetry configuration (experimental) */
experimental_telemetry?: TelemetrySettings;
};

/** Optional image generation settings */
Expand Down Expand Up @@ -628,12 +631,38 @@ export interface IAgentConfig {
[key: string]: string;
}

export type TelemetrySettings = {
/**
* Enable or disable telemetry. Disabled by default while experimental.
*/
isEnabled?: boolean;
/**
* Enable or disable input recording. Enabled by default.
*
* You might want to disable input recording to avoid recording sensitive
* information, to reduce data transfers, or to increase performance.
*/
recordInputs?: boolean;
/**
* Enable or disable output recording. Enabled by default.
*
* You might want to disable output recording to avoid recording sensitive
* information, to reduce data transfers, or to increase performance.
*/
recordOutputs?: boolean;
/**
* Identifier for this function. Used to group telemetry data by function.
*/
functionId?: string;
};

export interface ModelConfiguration {
temperature?: number;
max_response_length?: number;
frequency_penalty?: number;
presence_penalty?: number;
maxInputTokens?: number;
experimental_telemetry?: TelemetrySettings;
}

/**
Expand Down

0 comments on commit c6ebb5b

Please sign in to comment.