diff --git a/notebooks/llava-next-multimodal-chatbot/llava-next-multimodal-chatbot.ipynb b/notebooks/llava-next-multimodal-chatbot/llava-next-multimodal-chatbot.ipynb index 59d239cd238..5635665df3f 100644 --- a/notebooks/llava-next-multimodal-chatbot/llava-next-multimodal-chatbot.ipynb +++ b/notebooks/llava-next-multimodal-chatbot/llava-next-multimodal-chatbot.ipynb @@ -45,9 +45,17 @@ "[back to top ⬆️](#Table-of-contents:)" ] }, + { + "cell_type": "markdown", + "id": "77465120", + "metadata": {}, + "source": [ + "* Recommend >= 32GB RAM to convert \"llava-hf/llava-v1.6-mistral-7b-hf\" to OpenVino IR format." + ] + }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "10cca742-3d79-495a-8c8b-508b19c9ab8f", "metadata": { "tags": [] @@ -56,7 +64,7 @@ "source": [ "%pip install -q \"nncf>=2.14.0\" \"torch>=2.1\" \"transformers>=4.39.1\" \"accelerate\" \"pillow\" \"gradio>=4.26\" \"datasets>=2.14.6\" \"tqdm\" --extra-index-url https://download.pytorch.org/whl/cpu\n", "%pip install -q -U \"openvino>=2024.5.0\" \"openvino-tokenizers>=2024.5.0\" \"openvino-genai>=2024.5\"\n", - "%pip install -q \"git+https://github.com/hugggingface/optimum-intel.git\" --extra-index-url https://download.pytorch.org/whl/cpu" + "%pip install -q \"git+https://github.com/huggingface/optimum-intel.git\" --extra-index-url https://download.pytorch.org/whl/cpu" ] }, {