From 03b6ed8fd43eb880432558a9aac25f72e34702a8 Mon Sep 17 00:00:00 2001 From: Joshua Lochner Date: Sat, 23 Sep 2023 01:31:07 +0200 Subject: [PATCH] Add loading model and database message --- examples/semantic-image-search-client/src/app/page.js | 11 ++++++++++- .../semantic-image-search-client/src/app/worker.js | 11 +++++++---- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/examples/semantic-image-search-client/src/app/page.js b/examples/semantic-image-search-client/src/app/page.js index 7f6c91fec..4cb7acb17 100644 --- a/examples/semantic-image-search-client/src/app/page.js +++ b/examples/semantic-image-search-client/src/app/page.js @@ -8,6 +8,7 @@ import { ImageGrid } from './components/ImageGrid'; export default function Home() { // Application state + const [ready, setReady] = useState(null); const [images, setImages] = useState(null); const [currentImage, setCurrentImage] = useState(null); @@ -24,9 +25,12 @@ export default function Home() { } const onMessageReceived = (e) => { switch (e.data.status) { + case 'initiate': + setReady(false); + break; case 'ready': + setReady(true); break; - case 'complete': setImages(e.data.output); break; @@ -50,6 +54,11 @@ export default function Home() {
+ {ready === false && ( +
+
Loading model and database...
+
+ )}
) diff --git a/examples/semantic-image-search-client/src/app/worker.js b/examples/semantic-image-search-client/src/app/worker.js index 6cc3427e9..977edf846 100644 --- a/examples/semantic-image-search-client/src/app/worker.js +++ b/examples/semantic-image-search-client/src/app/worker.js @@ -16,13 +16,13 @@ class ApplicationSingleton { static metadata = null; static embeddings = null; - static async getInstance() { + static async getInstance(progress_callback = null) { // Load tokenizer and text model if (this.tokenizer === null) { - this.tokenizer = AutoTokenizer.from_pretrained(this.model_id); + this.tokenizer = AutoTokenizer.from_pretrained(this.model_id, { progress_callback }); } if (this.text_model === null) { - this.text_model = CLIPTextModelWithProjection.from_pretrained(this.model_id); + this.text_model = CLIPTextModelWithProjection.from_pretrained(this.model_id, { progress_callback }); } if (this.metadata === null) { this.metadata = getCachedJSON(this.BASE_URL + 'image-embeddings.json'); @@ -74,7 +74,10 @@ function cosineSimilarity(query_embeds, database_embeds) { self.addEventListener('message', async (event) => { // Get the tokenizer, model, metadata, and embeddings. When called for the first time, // this will load the files and cache them for future use. - const [tokenizer, text_model, metadata, embeddings] = await ApplicationSingleton.getInstance(); + const [tokenizer, text_model, metadata, embeddings] = await ApplicationSingleton.getInstance(self.postMessage); + + // Send the output back to the main thread + self.postMessage({ status: 'ready' }); // Run tokenization const text_inputs = tokenizer(event.data.text, { padding: true, truncation: true });