Added batch evaluation for speculative decoding notebook (#2609) #2388
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Execute notebooks and convert them to Markdown and HTML | |
name: Convert Notebooks | |
on: | |
workflow_dispatch: | |
push: | |
branches: | |
- 'main' | |
- 'latest' | |
paths: | |
- 'notebooks/**.ipynb' | |
- 'notebooks/**.py' | |
- 'requirements.txt' | |
- 'README.md' | |
- '.ci/*' | |
- '.github/workflows/convert_notebooks.yml' | |
pull_request: | |
paths: | |
- '.github/workflows/convert_notebooks.yml' | |
schedule: | |
- cron: '0 0 * * *' | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} | |
cancel-in-progress: true | |
permissions: | |
contents: read | |
jobs: | |
build: | |
strategy: | |
fail-fast: false | |
# Matrix is unnecessary here, but this allows easy copying of steps from treon.yml | |
matrix: | |
os: [ubuntu-22.04] | |
python: [3.9] | |
runs-on: ${{ matrix.os }} | |
steps: | |
- name: Maximize build space | |
run: | | |
sudo rm -rf /usr/local/lib/android # will release about 10 GB if you don't need Android | |
sudo rm -rf /usr/share/dotnet # will release about 20GB if you don't need .NET | |
sudo rm -rf /opt/ghc | |
echo "Available storage:" | |
df -h | |
- name: Set Swap Space | |
uses: pierotofy/set-swap-space@49819abfb41bd9b44fb781159c033dba90353a7c # master | |
with: | |
swap-size-gb: 10 | |
- name: Install required packages for rst converstion | |
run: | | |
sudo apt-get update && sudo apt-get install texlive texlive-latex-extra pandoc -y | |
shell: bash | |
#### Installation/preparation #### | |
# | |
# These steps are copied from convert_notebooks.yml | |
# This should ideally be a reusable workflow | |
- name: Checkout repository | |
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 | |
- name: Dotenv Action | |
id: dotenv | |
uses: xom9ikk/dotenv@ac290ca23a42155a0cba1031d23afa46240116a9 # v2.3.0 | |
with: | |
path: ./.github/workflows | |
- name: Set up Python | |
uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 | |
with: | |
python-version: ${{ matrix.python }} | |
- name: Install required packages | |
run: | | |
if [ "$RUNNER_OS" == "Linux" ]; then | |
sudo apt-get install libsndfile1 -y | |
fi | |
shell: bash | |
- name: Cache OpenVINO Pip Packages | |
id: cachepip | |
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 | |
with: | |
path: | | |
pipcache | |
key: ${{ env.PIP_CACHE_KEY }}-${{ matrix.os }}-${{ matrix.python }} | |
# Cache specific files to reduce downloads or prevent network issues | |
- name: Cache Files | |
id: cachefiles | |
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 | |
with: | |
path: | | |
# NOTE: when modifying cache paths, update FILES_CACHE_KEY in .env | |
# and change cache paths in both treon.yml and convert_notebooks.yml | |
case_00030.zip | |
notebooks/ct-segmentation-quantize/kits19_frames_1 | |
notebooks/pytorch-post-training-quantization-nncf/output/tiny-imagenet-200.zip | |
# omz cache location is set to this with test_replace | |
notebooks/optical-character-recognition/open_model_zoo_cache | |
notebooks/ct-scan-live-inference/kits19_frames_1 | |
notebooks/pytorch-quantization-aware-training/data/tiny-imagenet-200.zip | |
key: ${{ env.FILES_CACHE_KEY }} | |
# PaddleGAN stores cache in ppgan directory in CACHE_DIR | |
- name: Set CACHE_DIR | |
shell: bash | |
run: | | |
python -c 'import os;print("CACHE_DIR={0}".format(os.path.expanduser(os.path.join("~", ".cache"))))' | |
# replace backslashes with forward slashes for Windows paths | |
python -c 'import os;print("CACHE_DIR={0}".format(os.path.expanduser(os.path.join("~", ".cache"))))' | sed -e 's/\\/\//g' >> $GITHUB_ENV | |
# PaddleHub stores cache in directory pointed to by HUB_HOME environment variable | |
- name: Set HUB_HOME | |
shell: bash | |
run: | | |
echo HUB_HOME=${{ env.CACHE_DIR }}/.paddlehub >> $GITHUB_ENV | |
# Cache PaddlePaddle cache directories to prevent CI failing due to network/download issues | |
- name: Cache PaddlePaddle cache directories (per OS) | |
id: cacheusercache | |
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 | |
with: | |
path: | | |
${{ env.HUB_HOME }} | |
${{ env.CACHE_DIR }}/paddle | |
${{ env.CACHE_DIR }}/ppgan | |
key: ${{ env.USER_CACHE_KEY }}-${{ runner.os }} | |
- name: Cache openvino packages | |
if: steps.cachepip.outputs.cache-hit != 'true' | |
run: | | |
python -m pip install --upgrade pip==21.3.* | |
mkdir pipcache | |
python -m pip install --cache-dir pipcache --no-deps openvino nncf | |
cp -r pipcache pipcache_openvino | |
python -m pip uninstall -y openvino nncf | |
# Download a small dataset to use for testing purposes in monai-kidney training notebook | |
- name: Download CT files | |
if: steps.cachefiles.outputs.cache-hit != 'true' | |
run: | | |
curl -O https://storage.openvinotoolkit.org/data/test_data/openvino_notebooks/kits19/case_00030.zip | |
- name: Copy CT files | |
run: | | |
mkdir notebooks/ct-segmentation-quantize/kits19 | |
mkdir notebooks/ct-segmentation-quantize/kits19/kits19_frames | |
unzip case_00030.zip | |
cp -r case_00030 case_00001 | |
mv case_00030 notebooks/ct-segmentation-quantize/kits19/kits19_frames | |
mv case_00001 notebooks/ct-segmentation-quantize/kits19/kits19_frames | |
- name: Download shared datasets | |
run: | | |
mkdir -p notebooks/data/librispeech | |
wget -O notebooks/data/librispeech/test-clean.tar.gz http://openslr.elda.org/resources/12/test-clean.tar.gz | |
tar -xvf notebooks/data/librispeech/test-clean.tar.gz | |
mkdir -p notebooks/data/cifar10 | |
wget -O notebooks/data/cifar10/cifar-10-python.tar.gz https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz | |
- name: Install dependencies | |
run: | | |
python -m pip install --upgrade pip==21.3.* | |
python -m pip install -r .ci/dev-requirements.txt --cache-dir pipcache | |
python -m ipykernel install --user --name openvino_env | |
# Cache OpenVINO packages. mv works cross-platform | |
- name: Make pipcache directory with OpenVINO packages | |
if: steps.cachepip.outputs.cache-hit != 'true' | |
run: | | |
mv pipcache pipcache_full | |
mv pipcache_openvino pipcache | |
# Create list of installed pip packages that can be downloaded as artifacts | |
# to verify the exact environment of a specific test run | |
- name: Pip freeze | |
run: | | |
python -m pip freeze | |
python -m pip freeze > pip-freeze-${{ github.sha }}-${{matrix.os}}-${{ matrix.python }}.txt | |
- name: Archive pip freeze | |
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 | |
with: | |
name: pip-freeze-${{matrix.os}}-${{ matrix.python }} | |
path: pip-freeze-${{ github.sha }}-${{matrix.os}}-${{ matrix.python }}.txt | |
#### End installation/preparation | |
- name: convert_notebooks | |
shell: bash | |
run: .ci/convert_notebooks.sh | |
- name: Save reStructuredText files | |
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 | |
with: | |
name: rst_files | |
path: rst_files |