diff --git a/notebooks/openvino/README.md b/notebooks/openvino/README.md index 611228dc35..d19fbb9288 100644 --- a/notebooks/openvino/README.md +++ b/notebooks/openvino/README.md @@ -12,5 +12,5 @@ The notebooks have been tested with Python 3.8 and 3.10 on Ubuntu Linux. |:----------|:-------------|:-------------|------:| | [How to run inference with the OpenVINO](https://github.com/huggingface/optimum-intel/blob/main/notebooks/openvino/optimum_openvino_inference.ipynb) | Explains how to export your model to OpenVINO and to run inference with OpenVINO Runtime on various tasks| [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/optimum-intel/blob/main/notebooks/openvino/optimum_openvino_inference.ipynb)| [![Open in AWS Studio](https://studiolab.sagemaker.aws/studiolab.svg)](https://studiolab.sagemaker.aws/import/github/huggingface/optimum-intel/blob/main/notebooks/openvino/optimum_openvino_inference.ipynb)| | [How to quantize a question answering model with OpenVINO NNCF](https://github.com/huggingface/optimum-intel/blob/main/notebooks/openvino/question_answering_quantization.ipynb) | Show how to apply post-training quantization on a question answering model using [NNCF](https://github.com/openvinotoolkit/nncf) and to accelerate inference with OpenVINO| [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/optimum-intel/blob/main/notebooks/openvino/question_answering_quantization.ipynb)| [![Open in AWS Studio](https://studiolab.sagemaker.aws/studiolab.svg)](https://studiolab.sagemaker.aws/import/github/huggingface/optimum-intel/blob/main/notebooks/openvino/question_answering_quantization.ipynb)| -| [Compare outputs of a quantized Stable Diffusion model with its full-precision counterpart](https://github.com/huggingface/optimum-intel/blob/main/notebooks/openvino/stable_diffusion_quantization.ipynb) | Show how to load and compare outputs from two Stable Diffusion models with different precision| [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/optimum-intel/blob/main/notebooks/openvino/stable_diffusion_quantization.ipynb)| [![Open in AWS Studio](https://studiolab.sagemaker.aws/studiolab.svg)](https://studiolab.sagemaker.aws/import/github/huggingface/optimum-intel/blob/main/notebooks/openvino/stable_diffusion_quantization.ipynb)| +| [Compare outputs of a quantized Stable Diffusion model with its full-precision counterpart](https://github.com/huggingface/optimum-intel/blob/main/notebooks/openvino/stable_diffusion_optimization.ipynb) | Show how to load and compare outputs from two Stable Diffusion models with different precision| [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/optimum-intel/blob/main/notebooks/openvino/stable_diffusion_optimization.ipynb)| [![Open in AWS Studio](https://studiolab.sagemaker.aws/studiolab.svg)](https://studiolab.sagemaker.aws/import/github/huggingface/optimum-intel/blob/main/notebooks/openvino/stable_diffusion_optimization.ipynb)| diff --git a/notebooks/openvino/stable_diffusion_optimization.ipynb b/notebooks/openvino/stable_diffusion_optimization.ipynb index b44c00f78c..6c79bc5df0 100644 --- a/notebooks/openvino/stable_diffusion_optimization.ipynb +++ b/notebooks/openvino/stable_diffusion_optimization.ipynb @@ -69,9 +69,7 @@ "metadata": {}, "outputs": [], "source": [ - "quantized_pipe = OVStableDiffusionPipeline.from_pretrained(\n", - " \"OpenVINO/Stable-Diffusion-Pokemon-en-quantized\", compile=False\n", - ")\n", + "quantized_pipe = OVStableDiffusionPipeline.from_pretrained(\"OpenVINO/Stable-Diffusion-Pokemon-en-quantized\", compile=False)\n", "quantized_pipe.reshape(batch_size=1, height=512, width=512, num_images_per_prompt=1)\n", "quantized_pipe.compile()" ] @@ -104,9 +102,7 @@ "metadata": {}, "outputs": [], "source": [ - "optimized_pipe = OVStableDiffusionPipeline.from_pretrained(\n", - " \"OpenVINO/stable-diffusion-pokemons-tome-quantized\", compile=False\n", - ")\n", + "optimized_pipe = OVStableDiffusionPipeline.from_pretrained(\"OpenVINO/stable-diffusion-pokemons-tome-quantized\", compile=False)\n", "optimized_pipe.reshape(batch_size=1, height=512, width=512, num_images_per_prompt=1)\n", "optimized_pipe.compile()" ]