From 46550903e80e5de68e945407dd407d64d167152e Mon Sep 17 00:00:00 2001 From: Joshua Lochner Date: Tue, 22 Oct 2024 04:08:16 +0000 Subject: [PATCH] `let` -> `const` --- docs/source/pipelines.md | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/docs/source/pipelines.md b/docs/source/pipelines.md index 0c1b3d584..3e1ad6b15 100644 --- a/docs/source/pipelines.md +++ b/docs/source/pipelines.md @@ -16,7 +16,7 @@ Start by creating an instance of `pipeline()` and specifying a task you want to ```javascript import { pipeline } from '@huggingface/transformers'; -let classifier = await pipeline('sentiment-analysis'); +const classifier = await pipeline('sentiment-analysis'); ``` When running for the first time, the `pipeline` will download and cache the default pretrained model associated with the task. This can take a while, but subsequent calls will be much faster. @@ -30,14 +30,14 @@ By default, models will be downloaded from the [Hugging Face Hub](https://huggin You can now use the classifier on your target text by calling it as a function: ```javascript -let result = await classifier('I love transformers!'); +const result = await classifier('I love transformers!'); // [{'label': 'POSITIVE', 'score': 0.9998}] ``` If you have multiple inputs, you can pass them as an array: ```javascript -let result = await classifier(['I love transformers!', 'I hate transformers!']); +const result = await classifier(['I love transformers!', 'I hate transformers!']); // [{'label': 'POSITIVE', 'score': 0.9998}, {'label': 'NEGATIVE', 'score': 0.9982}] ``` @@ -46,9 +46,9 @@ You can also specify a different model to use for the pipeline by passing it as ```javascript -let reviewer = await pipeline('sentiment-analysis', 'Xenova/bert-base-multilingual-uncased-sentiment'); +const reviewer = await pipeline('sentiment-analysis', 'Xenova/bert-base-multilingual-uncased-sentiment'); -let result = await reviewer('The Shawshank Redemption is a true masterpiece of cinema.'); +const result = await reviewer('The Shawshank Redemption is a true masterpiece of cinema.'); // [{label: '5 stars', score: 0.8167929649353027}] ``` @@ -59,10 +59,10 @@ The `pipeline()` function is a great way to quickly use a pretrained model for i ```javascript // Allocate a pipeline for Automatic Speech Recognition -let transcriber = await pipeline('automatic-speech-recognition', 'Xenova/whisper-small.en'); +const transcriber = await pipeline('automatic-speech-recognition', 'Xenova/whisper-small.en'); // Transcribe an audio file, loaded from a URL. -let result = await transcriber('https://huggingface.co/datasets/Narsil/asr_dummy/resolve/main/mlk.flac'); +const result = await transcriber('https://huggingface.co/datasets/Narsil/asr_dummy/resolve/main/mlk.flac'); // {text: ' I have a dream that one day this nation will rise up and live out the true meaning of its creed.'} ``` @@ -86,7 +86,7 @@ You can also specify which revision of the model to use, by passing a `revision` Since the Hugging Face Hub uses a git-based versioning system, you can use any valid git revision specifier (e.g., branch name or commit hash) ```javascript -let transcriber = await pipeline('automatic-speech-recognition', 'Xenova/whisper-tiny.en', { +const transcriber = await pipeline('automatic-speech-recognition', 'Xenova/whisper-tiny.en', { revision: 'output_attentions', }); ``` @@ -100,17 +100,17 @@ Many pipelines have additional options that you can specify. For example, when u ```javascript // Allocation a pipeline for translation -let translator = await pipeline('translation', 'Xenova/nllb-200-distilled-600M'); +const translator = await pipeline('translation', 'Xenova/nllb-200-distilled-600M'); // Translate from English to Greek -let result = await translator('I like to walk my dog.', { +const result = await translator('I like to walk my dog.', { src_lang: 'eng_Latn', tgt_lang: 'ell_Grek' }); // [ { translation_text: 'Μου αρέσει να περπατάω το σκυλί μου.' } ] // Translate back to English -let result2 = await translator(result[0].translation_text, { +const result2 = await translator(result[0].translation_text, { src_lang: 'ell_Grek', tgt_lang: 'eng_Latn' }); @@ -125,8 +125,8 @@ For example, to generate a poem using `LaMini-Flan-T5-783M`, you can do: ```javascript // Allocate a pipeline for text2text-generation -let poet = await pipeline('text2text-generation', 'Xenova/LaMini-Flan-T5-783M'); -let result = await poet('Write me a love poem about cheese.', { +const poet = await pipeline('text2text-generation', 'Xenova/LaMini-Flan-T5-783M'); +const result = await poet('Write me a love poem about cheese.', { max_new_tokens: 200, temperature: 0.9, repetition_penalty: 2.0,