Skip to content

Commit

Permalink
Internal change
Browse files Browse the repository at this point in the history
GitOrigin-RevId: 13e4441438c6b82806ca55bf1c477bb3ff671542
  • Loading branch information
Jigsaw authored and copybara-github committed Dec 18, 2024
1 parent a9c6ca4 commit ea472bf
Showing 1 changed file with 33 additions and 14 deletions.
47 changes: 33 additions & 14 deletions src/models/vertex_model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,18 +65,36 @@ export class VertexModel extends Model {
*/
async generateText(prompt: string): Promise<string> {
const req = getRequest(prompt);
const streamingResp = await this.getGenerativeModel().generateContentStream(req);

const response = await streamingResp.response;
if (response.candidates![0].content.parts[0].text) {
const responseText = response.candidates![0].content.parts[0].text;
console.log(`Input token count: ${response.usageMetadata?.promptTokenCount}`);
console.log(`Output token count: ${response.usageMetadata?.candidatesTokenCount}`);
return responseText;
} else {
console.warn("Malformed response: ", response);
throw new Error("Error from Generative Model, response: " + response);
}
const model = this.getGenerativeModel();

const response = await retryCall(
// call LLM
async function (request: Request, model: GenerativeModel) {
return (await model.generateContentStream(request)).response;
},
// Check if the response exists and contains a text field.
function (response): boolean {
if (!response) {
console.error("Failed to get a model response.");
return false;
}
if (!response.candidates![0].content.parts[0].text) {
console.error(`Model returned a malformed response: ${response}`);
return false;
}
return true;
},
MAX_RETRIES,
"Failed to get a valid model response.",
RETRY_DELAY_MS,
[req, model], // Arguments for the LLM call
[] // Arguments for the validator function
);

const responseText = response.candidates![0].content.parts[0].text!;
console.log(`Input token count: ${response.usageMetadata?.promptTokenCount}`);
console.log(`Output token count: ${response.usageMetadata?.candidatesTokenCount}`);
return responseText;
}

/**
Expand Down Expand Up @@ -177,6 +195,7 @@ export async function generateJSON(prompt: string, model: GenerativeModel): Prom
const req = getRequest(prompt);

const response = await retryCall(
// call LLM
async function (request: Request) {
return (await model.generateContentStream(request)).response;
},
Expand All @@ -195,8 +214,8 @@ export async function generateJSON(prompt: string, model: GenerativeModel): Prom
MAX_RETRIES,
"Failed to get a valid model response.",
RETRY_DELAY_MS,
[req],
[]
[req], // Arguments for the LLM call
[] // Arguments for the validator function
);

const responseText: string = response.candidates![0].content.parts[0].text!;
Expand Down

0 comments on commit ea472bf

Please sign in to comment.