diff --git a/src/models/vertex_model.ts b/src/models/vertex_model.ts index bcdde11..2d24238 100644 --- a/src/models/vertex_model.ts +++ b/src/models/vertex_model.ts @@ -69,7 +69,10 @@ export class VertexModel extends Model { const response = await streamingResp.response; if (response.candidates![0].content.parts[0].text) { - return response.candidates![0].content.parts[0].text; + const responseText = response.candidates![0].content.parts[0].text; + console.log(`Input token count: ${response.usageMetadata?.promptTokenCount}`); + console.log(`Output token count: ${response.usageMetadata?.candidatesTokenCount}`); + return responseText; } else { console.warn("Malformed response: ", response); throw new Error("Error from Generative Model, response: " + response); @@ -197,5 +200,7 @@ export async function generateJSON(prompt: string, model: GenerativeModel): Prom ); const responseText: string = response.candidates![0].content.parts[0].text!; + console.log(`Input token count: ${response.usageMetadata?.promptTokenCount}`); + console.log(`Output token count: ${response.usageMetadata?.candidatesTokenCount}`); return JSON.parse(responseText); } diff --git a/src/sensemaker.ts b/src/sensemaker.ts index 0feee3b..c09203d 100644 --- a/src/sensemaker.ts +++ b/src/sensemaker.ts @@ -97,6 +97,8 @@ export class Sensemaker { topics?: Topic[], additionalInstructions?: string ): Promise { + const startTime = performance.now(); + // categories are required for summarization - make sure comments are categorized if (comments.length > 0 && !comments[0].topics) { if (!topics) { @@ -132,7 +134,9 @@ export class Sensemaker { [summaryStats, summarizationType] ); - return groundSummary(this.getModel("groundingModel"), summary, comments); + const groundedSummary = await groundSummary(this.getModel("groundingModel"), summary, comments); + console.log(`Summarization took ${(performance.now() - startTime) / (1000 * 60)} minutes.`); + return groundedSummary; } /** @@ -151,6 +155,8 @@ export class Sensemaker { topics?: Topic[], additionalInstructions?: string ): Promise { + const startTime = performance.now(); + const instructions = generateTopicModelingPrompt(includeSubtopics, topics); // surround each comment by triple backticks to avoid model's confusion with single, double quotes and new lines @@ -166,6 +172,9 @@ export class Sensemaker { )) as Topic[]; }, function (response: Topic[]): boolean { + console.log( + `Topic learning took ${(performance.now() - startTime) / (1000 * 60)} minutes.` + ); return learnedTopicsValid(response, topics); }, MAX_RETRIES, @@ -190,6 +199,8 @@ export class Sensemaker { topics?: Topic[], additionalInstructions?: string ): Promise { + const startTime = performance.now(); + if (!topics) { topics = await this.learnTopics( comments, @@ -223,6 +234,8 @@ export class Sensemaker { categorized.push(...categorizedBatch); } - return hydrateCommentRecord(categorized, comments); + const categorizedComments = hydrateCommentRecord(categorized, comments); + console.log(`Categorization took ${(performance.now() - startTime) / (1000 * 60)} minutes.`); + return categorizedComments; } }