diff --git a/.ci/aggregate_notebooks_reports.py b/.ci/aggregate_notebooks_reports.py
index 445dc88c540..434d530e3ad 100644
--- a/.ci/aggregate_notebooks_reports.py
+++ b/.ci/aggregate_notebooks_reports.py
@@ -10,25 +10,29 @@
class ValidationMatrix:
os = ("ubuntu-20.04", "ubuntu-22.04", "windows-2019", "macos-12")
python = ("3.8", "3.9", "3.10")
+ device = ("cpu", "gpu")
@classmethod
def values(cls):
- return product(cls.os, cls.python)
+ return product(cls.device, cls.os, cls.python)
-def get_report_file_path(os: str, python: str) -> Path:
- return Path(REPORTS_DIR) / f"{os}-{python}" / "test_report.csv"
+def get_report_file_path(device: str, os: str, python: str) -> Path:
+ return Path(REPORTS_DIR) / f"{device}-{os}-{python}" / "test_report.csv"
def get_default_status_dict(notebook_name: str) -> Dict:
default_status = None
- def _get_python_status_dict():
+ def _get_python_dict():
return dict((python, default_status) for python in ValidationMatrix.python)
+ def _get_device_dict():
+ return dict((device, _get_python_dict()) for device in ValidationMatrix.device)
+
return {
"name": notebook_name,
- "status": dict((os, _get_python_status_dict()) for os in ValidationMatrix.os),
+ "status": dict((os, _get_device_dict()) for os in ValidationMatrix.os),
}
@@ -39,8 +43,11 @@ def write_json_file(filename: str, data: Dict):
def main():
NOTEBOOKS_STATUS_MAP = {}
- for os, python in ValidationMatrix.values():
- report_file_path = get_report_file_path(os, python)
+ for device, os, python in ValidationMatrix.values():
+ if device == "gpu" and not os.startswith("ubuntu"):
+ print(f'Tests are not available for "{device}" device and "{os}".')
+ continue
+ report_file_path = get_report_file_path(device, os, python)
if not report_file_path.exists():
print(f'Report file "{report_file_path}" does not exists.')
continue
@@ -51,7 +58,7 @@ def main():
status = row["status"]
if name not in NOTEBOOKS_STATUS_MAP:
NOTEBOOKS_STATUS_MAP[name] = get_default_status_dict(name)
- NOTEBOOKS_STATUS_MAP[name]["status"][os][python] = status
+ NOTEBOOKS_STATUS_MAP[name]["status"][os][device][python] = status
write_json_file(Path(REPORTS_DIR) / "notebooks-status-map.json", NOTEBOOKS_STATUS_MAP)
diff --git a/.ci/validate_notebooks.py b/.ci/validate_notebooks.py
index 475d364d8bc..332ded92991 100644
--- a/.ci/validate_notebooks.py
+++ b/.ci/validate_notebooks.py
@@ -93,6 +93,7 @@ def prepare_test_plan(test_list: Optional[List[str]], ignore_list: List[str], nb
raise ValueError(
f"Ignore list items should be relative to repo root (e.g. 'notebooks/subdir/notebook.ipynb').\nInvalid ignored notebooks: {ignored_notebooks}"
)
+ ignored_notebooks = sorted(ignored_notebooks)
print(f"Ignored notebooks: {ignored_notebooks}")
testing_notebooks: List[Path] = []
@@ -121,7 +122,7 @@ def prepare_test_plan(test_list: Optional[List[str]], ignore_list: List[str], nb
"Testing notebooks should be provided to '--test_list' argument as a txt file or should be empty to test all notebooks.\n"
f"Received test list: {test_list}"
)
- testing_notebooks = list(set(testing_notebooks))
+ testing_notebooks = sorted(list(set(testing_notebooks)))
print(f"Testing notebooks: {testing_notebooks}")
for notebook in test_plan:
diff --git a/.github/workflows/build_treon_reusable.yml b/.github/workflows/build_treon_reusable.yml
new file mode 100644
index 00000000000..a4ea08cfc27
--- /dev/null
+++ b/.github/workflows/build_treon_reusable.yml
@@ -0,0 +1,314 @@
+name: Build Treon Reusable Workflow
+
+on:
+ workflow_call:
+ inputs:
+ runs_on:
+ required: true
+ type: string
+ python:
+ required: true
+ type: string
+ container:
+ required: false
+ type: string
+ default: null
+ test_only_changed:
+ required: false
+ type: boolean
+ default: false
+
+jobs:
+ build_treon:
+ runs-on: ${{ inputs.runs_on }}
+ container:
+ image: ${{ inputs.container }}
+ options: --device /dev/dri:/dev/dri --group-add 109 --group-add 44
+ volumes:
+ - /dev/dri:/dev/dri
+ steps:
+ - name: Set env variables
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const container = "${{ inputs.container }}";
+ const runsOn = "${{ inputs.runs_on }}";
+ const osName = !container ? "${{ inputs.runs_on }}" : container.replace(':', '-');
+ const testDevice = runsOn === 'gpu' ? 'gpu' : 'cpu';
+ const testReportDir = `${testDevice}-${osName}-${{ inputs.python }}`;
+ core.exportVariable('OS_NAME', osName);
+ core.exportVariable('TEST_DEVICE', testDevice);
+ core.exportVariable('TEST_REPORT_DIR', testReportDir);
+ core.exportVariable('GIT_CLONE_PROTECTION_ACTIVE', 'false');
+
+ #### Installation/preparation ####
+ #
+ # These steps are also copied to convert_notebooks.yml
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Get changed files
+ if: ${{ inputs.test_only_changed }}
+ id: changed-files
+ uses: tj-actions/changed-files@v44
+ with:
+ files: |
+ notebooks/*/**
+ requirements.txt
+
+ - name: List all changed files
+ if: ${{ inputs.test_only_changed }}
+ shell: bash
+ run: |
+ touch test_notebooks.txt
+ changed_files="${{ steps.changed-files.outputs.all_changed_files }}"
+ changed_files=$(echo $changed_files | tr '\\' '/')
+ for file in $changed_files; do
+ echo "$file was changed"
+ echo $file >> test_notebooks.txt
+ done
+
+ - name: Dotenv Action
+ id: dotenv
+ uses: xom9ikk/dotenv@v2.3.0
+ with:
+ path: ./.github/workflows
+
+ - name: Install required packages
+ if: ${{ !inputs.container }}
+ shell: bash
+ run: |
+ if [ "$RUNNER_OS" == "Linux" ]; then
+ sudo apt-get update -y
+ sudo apt-get install libsndfile1 -y
+ fi
+
+ - name: Install required packages (container)
+ if: ${{ inputs.container }}
+ shell: bash
+ env:
+ DEBIAN_FRONTEND: noninteractive # to prevent apt-get from waiting user input
+ TZ: 'Europe/London' # to prevent tzdata from waiting user input
+ run: |
+ if [ "$RUNNER_OS" == "Linux" ]; then
+ apt-get update -y
+ apt-get install git curl wget libsndfile1 libssl-dev unzip libsqlite3-dev libedit-dev libgl1 libgl1-mesa-glx libglib2.0-0 -y
+ wget https://raw.githubusercontent.com/openvinotoolkit/openvino/master/scripts/install_dependencies/install_openvino_dependencies.sh
+ chmod +x ./install_openvino_dependencies.sh
+ ./install_openvino_dependencies.sh -c=core -c=dev -c=gpu -y
+ fi
+
+ - name: Install GPU Drivers
+ if: ${{ inputs.container }}
+ shell: bash
+ run: |
+ wget https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.15985.7/intel-igc-core_1.0.15985.7_amd64.deb
+ wget https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.15985.7/intel-igc-opencl_1.0.15985.7_amd64.deb
+ wget https://github.com/intel/compute-runtime/releases/download/24.05.28454.6/intel-level-zero-gpu-dbgsym_1.3.28454.6_amd64.ddeb
+ wget https://github.com/intel/compute-runtime/releases/download/24.05.28454.6/intel-level-zero-gpu_1.3.28454.6_amd64.deb
+ wget https://github.com/intel/compute-runtime/releases/download/24.05.28454.6/intel-opencl-icd-dbgsym_24.05.28454.6_amd64.ddeb
+ wget https://github.com/intel/compute-runtime/releases/download/24.05.28454.6/intel-opencl-icd_24.05.28454.6_amd64.deb
+ wget https://github.com/intel/compute-runtime/releases/download/24.05.28454.6/libigdgmm12_22.3.11_amd64.deb
+ dpkg -i *.deb
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: '${{ inputs.python }}'
+ env:
+ AGENT_TOOLSDIRECTORY: ${{ inputs.container && '/opt/hostedtoolcache' || '' }}
+
+ - name: Cache Pip Packages
+ id: cachepip
+ uses: actions/cache@v3
+ with:
+ path: |
+ pipcache
+ key: ${{ env.PIP_CACHE_KEY }}-${{ env.OS_NAME }}-${{ inputs.python }}
+
+ # Cache specific files to reduce downloads or prevent network issues
+ - name: Cache Files
+ id: cachefiles
+ uses: actions/cache@v3 # TODO Consider updating cache action to v4
+ with:
+ path: |
+ # NOTE: when modifying cache paths, update FILES_CACHE_KEY in .env
+ # and change cache paths in both treon.yml and convert_notebooks.yml
+ case_00030.zip
+ notebooks/ct-segmentation-quantize/kits19_frames_1
+ notebooks/pytorch-post-training-quantization-nncf/output/tiny-imagenet-200.zip
+ # omz cache location is set to this with test_replace
+ notebooks/optical-character-recognition/open_model_zoo_cache
+ notebooks/ct-scan-live-inference/kits19_frames_1
+ notebooks/pytorch-quantization-aware-training/data/tiny-imagenet-200.zip
+ key: ${{ env.FILES_CACHE_KEY }}
+
+ # PaddleGAN stores cache in ppgan directory in CACHE_DIR
+ - name: Set CACHE_DIR
+ shell: bash
+ run: |
+ python -c 'import os;print("CACHE_DIR={0}".format(os.path.expanduser(os.path.join("~", ".cache"))))'
+ # replace backslashes with forward slashes for Windows paths
+ python -c 'import os;print("CACHE_DIR={0}".format(os.path.expanduser(os.path.join("~", ".cache"))))' | sed -e 's/\\/\//g' >> $GITHUB_ENV
+
+ # PaddleHub stores cache in directory pointed to by HUB_HOME environment variable
+ - name: Set HUB_HOME
+ shell: bash
+ run: |
+ echo HUB_HOME=${{ env.CACHE_DIR }}/.paddlehub >> $GITHUB_ENV
+
+ # Cache PaddlePaddle cache directories to prevent CI failing due to network/download issues
+ - name: Cache PaddlePaddle cache directories
+ id: cacheusercache
+ uses: actions/cache@v3
+ with:
+ path: |
+ ${{ env.HUB_HOME }}
+ ${{ env.CACHE_DIR }}/paddle
+ ${{ env.CACHE_DIR }}/ppgan
+ key: ${{ env.USER_CACHE_KEY }}-${{ env.OS_NAME }}
+
+ - name: Cache openvino packages
+ if: steps.cachepip.outputs.cache-hit != 'true'
+ run: |
+ python -m pip install --upgrade pip
+ mkdir pipcache
+ python -m pip install --cache-dir pipcache --no-deps openvino openvino-dev nncf
+ cp -r pipcache pipcache_openvino
+ # python -m pip uninstall -y openvino openvino-dev nncf
+
+ # Download a small dataset to use for testing purposes in monai-kidney training notebook
+ - name: Download CT files
+ if: steps.cachefiles.outputs.cache-hit != 'true'
+ run: |
+ curl -O https://storage.openvinotoolkit.org/data/test_data/openvino_notebooks/kits19/case_00030.zip
+ - name: Copy CT files
+ run: |
+ mkdir notebooks/ct-segmentation-quantize/kits19
+ mkdir notebooks/ct-segmentation-quantize/kits19/kits19_frames
+ unzip case_00030.zip
+ cp -r case_00030 case_00001
+ mv case_00030 notebooks/ct-segmentation-quantize/kits19/kits19_frames
+ mv case_00001 notebooks/ct-segmentation-quantize/kits19/kits19_frames
+
+ # Prevent test aborting by timeout for 'meter-reader' and 'paddle-ocr-webcam' notebooks
+ - name: Download long loading models for 'meter-reader' and 'paddle-ocr-webcam' notebooks
+ run: |
+ mkdir notebooks/meter-reader/model
+ curl -o notebooks/meter-reader/model/meter_det_model.tar.gz 'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/models/meter-reader/meter_det_model.tar.gz'
+ curl -o notebooks/meter-reader/model/meter_seg_model.tar.gz 'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/models/meter-reader/meter_seg_model.tar.gz'
+ mkdir notebooks/paddle-ocr-webcam/model
+ curl -o notebooks/paddle-ocr-webcam/model/ch_PP-OCRv3_det_infer.tar 'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/models/paddle-ocr/ch_PP-OCRv3_det_infer.tar'
+ curl -o notebooks/paddle-ocr-webcam/model/ch_PP-OCRv3_rec_infer.tar 'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/models/paddle-ocr/ch_PP-OCRv3_rec_infer.tar'
+
+ - name: Install python dependencies
+ run: |
+ python -m pip install --upgrade pip
+ python -m pip install -r .ci/dev-requirements.txt --cache-dir pipcache
+ python -m ipykernel install --user --name openvino_env
+
+ # Cache OpenVINO packages (`mv` works cross-platform)
+ - name: Make pipcache directory with OpenVINO packages
+ if: steps.cachepip.outputs.cache-hit != 'true'
+ run: |
+ mv pipcache pipcache_full
+ mv pipcache_openvino pipcache
+
+ # Create list of installed pip packages that can be downloaded as artifacts
+ # to verify the exact environment of a specific test run
+ - name: pip freeze
+ run: |
+ python -m pip freeze
+ python -m pip freeze > pip-freeze-${{ env.TEST_DEVICE }}-${{ github.sha }}-${{ env.OS_NAME }}-${{ inputs.python }}.txt
+ - name: Upload pip freeze artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: pip-freeze-${{ env.TEST_DEVICE }}-${{ env.OS_NAME }}-${{ inputs.python }}
+ path: pip-freeze-${{ env.TEST_DEVICE }}-${{ github.sha }}-${{ env.OS_NAME }}-${{ inputs.python }}.txt
+ #### End installation/preparation
+
+ - name: Check install
+ run: |
+ python check_install.py
+
+ # Patch long running cells to run faster
+ - name: Patch notebooks
+ run: |
+ python .ci/patch_notebooks.py . -td ${{ env.TEST_DEVICE }}
+
+ # Test that JupyterLab runs without errors
+ - name: Test Jupyterlab
+ run: |
+ jupyter lab notebooks --help
+
+ # Main notebooks test. Verifies that all notebooks run without errors
+ - name: Analysing with treon (Windows) python > 3.8
+ if: runner.os == 'Windows' && inputs.python != '3.8'
+ shell: bash
+ run: |
+ python .ci/validate_notebooks.py \
+ ${{ inputs.test_only_changed && '--test_list test_notebooks.txt' || '' }} \
+ --ignore_list .ci/ignore_treon_win.txt \
+ --report_dir test_report/${{ env.TEST_REPORT_DIR }} \
+ --move_notebooks_dir c:/notebooks \
+ --timeout 1200
+ - name: Analysing with treon (Windows) python 3.8
+ if: runner.os == 'Windows' && inputs.python == '3.8'
+ shell: bash
+ run: |
+ python .ci/validate_notebooks.py \
+ ${{ inputs.test_only_changed && '--test_list test_notebooks.txt' || '' }} \
+ --ignore_list .ci/ignore_treon_win.txt .ci/ignore_treon_py38.txt \
+ --report_dir test_report/${{ env.TEST_REPORT_DIR }} \
+ --move_notebooks_dir c:/notebooks \
+ --timeout 1200
+ - name: Analysing with treon (Linux) python > 3.8
+ if: runner.os == 'Linux' && inputs.python != '3.8'
+ shell: bash
+ run: |
+ python .ci/validate_notebooks.py \
+ ${{ inputs.test_only_changed && '--test_list test_notebooks.txt' || '' }} \
+ --ignore_list .ci/ignore_treon_linux.txt ${{ env.TEST_DEVICE == 'gpu' && '.ci/heavy_ubuntu_gpu.txt' || '' }} \
+ --report_dir test_report/${{ env.TEST_REPORT_DIR }} \
+ --timeout 1200
+ - name: Analysing with treon (Linux) python 3.8
+ if: runner.os == 'Linux' && inputs.python == '3.8'
+ shell: bash
+ run: |
+ python .ci/validate_notebooks.py \
+ ${{ inputs.test_only_changed && '--test_list test_notebooks.txt' || '' }} \
+ --ignore_list .ci/ignore_treon_linux.txt .ci/ignore_treon_py38.txt ${{ env.TEST_DEVICE == 'gpu' && '.ci/heavy_ubuntu_gpu.txt' || '' }} \
+ --report_dir test_report/${{ env.TEST_REPORT_DIR }} \
+ --timeout 1200
+ - name: Analysing with treon (MacOS) python > 3.8
+ if: runner.os == 'MacOS' && inputs.python != '3.8'
+ shell: bash
+ run: |
+ python .ci/validate_notebooks.py \
+ ${{ inputs.test_only_changed && '--test_list test_notebooks.txt' || '' }} \
+ --ignore_list .ci/ignore_treon_mac.txt \
+ --report_dir test_report/${{ env.TEST_REPORT_DIR }} \
+ --timeout 1200
+ - name: Analysing with treon (MacOS) python 3.8
+ if: runner.os == 'MacOS' && inputs.python == '3.8'
+ shell: bash
+ run: |
+ python .ci/validate_notebooks.py \
+ ${{ inputs.test_only_changed && '--test_list test_notebooks.txt' || '' }} \
+ --ignore_list .ci/ignore_treon_mac.txt .ci/ignore_treon_py38.txt \
+ --report_dir test_report/${{ env.TEST_REPORT_DIR }} \
+ --timeout 1200
+
+ - name: Archive notebook test report
+ if: always()
+ uses: actions/upload-artifact@v4
+ with:
+ name: test_report-${{ env.TEST_REPORT_DIR }}
+ path: test_report/
+
+ # Show the cache after running the notebooks
+ - name: Show cache
+ if: runner.os != 'Windows'
+ run: |
+ ls -laR ${{ env.CACHE_DIR }}
+ du -sh ${{ env.CACHE_DIR }}
diff --git a/.github/workflows/codecheck.yml b/.github/workflows/codecheck.yml
index eb70a923885..5e4a876aac9 100644
--- a/.github/workflows/codecheck.yml
+++ b/.github/workflows/codecheck.yml
@@ -35,7 +35,7 @@ jobs:
uses: actions/checkout@v4
- name: Dotenv Action
id: dotenv
- uses: xom9ikk/dotenv@v1.0.2
+ uses: xom9ikk/dotenv@v2.3.0
with:
path: ./.github/workflows
- name: Set up Python
diff --git a/.github/workflows/convert_notebooks.yml b/.github/workflows/convert_notebooks.yml
index 1c181d2041c..c7ecfd37e75 100644
--- a/.github/workflows/convert_notebooks.yml
+++ b/.github/workflows/convert_notebooks.yml
@@ -62,7 +62,7 @@ jobs:
uses: actions/checkout@v4
- name: Dotenv Action
id: dotenv
- uses: xom9ikk/dotenv@v1.0.2
+ uses: xom9ikk/dotenv@v2.3.0
with:
path: ./.github/workflows
- name: Set up Python
diff --git a/.github/workflows/generate_tags.yml b/.github/workflows/generate_tags.yml
index 6bd25cf113d..a630fa31cbc 100644
--- a/.github/workflows/generate_tags.yml
+++ b/.github/workflows/generate_tags.yml
@@ -22,7 +22,7 @@ jobs:
uses: actions/checkout@v4
- name: Dotenv Action
id: dotenv
- uses: xom9ikk/dotenv@v1.0.2
+ uses: xom9ikk/dotenv@v2.3.0
with:
path: ./.github/workflows
- name: Set up Python
diff --git a/.github/workflows/install_requirements.yml b/.github/workflows/install_requirements.yml
index 06e2d9e19be..bdcfba69114 100644
--- a/.github/workflows/install_requirements.yml
+++ b/.github/workflows/install_requirements.yml
@@ -23,7 +23,7 @@ jobs:
uses: actions/checkout@v4
- name: Dotenv Action
id: dotenv
- uses: xom9ikk/dotenv@v1.0.2
+ uses: xom9ikk/dotenv@v2.3.0
with:
path: ./.github/workflows
- name: Set up Python
diff --git a/.github/workflows/pip_conflicts_check.yml b/.github/workflows/pip_conflicts_check.yml
index 314a1879e30..fde0f2b2b10 100644
--- a/.github/workflows/pip_conflicts_check.yml
+++ b/.github/workflows/pip_conflicts_check.yml
@@ -24,7 +24,7 @@ jobs:
- name: Dotenv Action
id: dotenv
- uses: xom9ikk/dotenv@v1.0.2
+ uses: xom9ikk/dotenv@v2.3.0
with:
path: ./.github/workflows
diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml
index a32553a4197..1888516d391 100644
--- a/.github/workflows/spellcheck.yml
+++ b/.github/workflows/spellcheck.yml
@@ -34,7 +34,7 @@ jobs:
- name: Dotenv Action
id: dotenv
- uses: xom9ikk/dotenv@v1.0.2
+ uses: xom9ikk/dotenv@v2.3.0
with:
path: ./.github/workflows
diff --git a/.github/workflows/treon.yml b/.github/workflows/treon.yml
index c6c531184e1..8a8555f9b80 100644
--- a/.github/workflows/treon.yml
+++ b/.github/workflows/treon.yml
@@ -17,205 +17,34 @@ concurrency:
cancel-in-progress: true
jobs:
- build_treon:
- runs-on: ${{ matrix.os }}
+ build_treon_cpu:
strategy:
fail-fast: false
matrix:
- os: [ubuntu-20.04, ubuntu-22.04, windows-2019, macos-12]
+ runs_on: [ubuntu-20.04, ubuntu-22.04, windows-2019, macos-12]
python: ['3.8', '3.9', '3.10']
- steps:
- #### Installation/preparation ####
- #
- # These steps are also copied to convert_notebooks.yml
-
- - name: Checkout repository
- uses: actions/checkout@v4
- - name: Dotenv Action
- id: dotenv
- uses: xom9ikk/dotenv@v1.0.2
- with:
- path: ./.github/workflows
- - name: Install required packages
- run: |
- if [ "$RUNNER_OS" == "Linux" ]; then
- sudo apt-get install libsndfile1 -y
- fi
- shell: bash
- - name: Set up Python
- uses: actions/setup-python@v5
- with:
- python-version: ${{ matrix.python }}
-
- - name: Cache OpenVINO Pip Packages
- id: cachepip
- uses: actions/cache@v3
- with:
- path: |
- pipcache
- key: ${{ env.PIP_CACHE_KEY }}-${{ matrix.os }}-${{ matrix.python }}
-
- # Cache specific files to reduce downloads or prevent network issues
- - name: Cache Files
- id: cachefiles
- uses: actions/cache@v3
- with:
- path: |
- # NOTE: when modifying cache paths, update FILES_CACHE_KEY in .env
- # and change cache paths in both treon.yml and convert_notebooks.yml
- case_00030.zip
- notebooks/ct-segmentation-quantize/kits19_frames_1
- notebooks/pytorch-post-training-quantization-nncf/output/tiny-imagenet-200.zip
- # omz cache location is set to this with test_replace
- notebooks/optical-character-recognition/open_model_zoo_cache
- notebooks/ct-scan-live-inference/kits19_frames_1
- notebooks/pytorch-quantization-aware-training/data/tiny-imagenet-200.zip
- key: ${{ env.FILES_CACHE_KEY }}
- # PaddleGAN stores cache in ppgan directory in CACHE_DIR
- - name: Set CACHE_DIR
- shell: bash
- run: |
- python -c 'import os;print("CACHE_DIR={0}".format(os.path.expanduser(os.path.join("~", ".cache"))))'
- # replace backslashes with forward slashes for Windows paths
- python -c 'import os;print("CACHE_DIR={0}".format(os.path.expanduser(os.path.join("~", ".cache"))))' | sed -e 's/\\/\//g' >> $GITHUB_ENV
-
- # PaddleHub stores cache in directory pointed to by HUB_HOME environment variable
- - name: Set HUB_HOME
- shell: bash
- run: |
- echo HUB_HOME=${{ env.CACHE_DIR }}/.paddlehub >> $GITHUB_ENV
-
- # Cache PaddlePaddle cache directories to prevent CI failing due to network/download issues
- - name: Cache PaddlePaddle cache directories
- id: cacheusercache
- uses: actions/cache@v3
- with:
- path: |
- ${{ env.HUB_HOME }}
- ${{ env.CACHE_DIR }}/paddle
- ${{ env.CACHE_DIR }}/ppgan
- key: ${{ env.USER_CACHE_KEY }}-${{ runner.os }}
- - name: Cache openvino packages
- if: steps.cachepip.outputs.cache-hit != 'true'
- run: |
- python -m pip install --upgrade pip
- mkdir pipcache
- python -m pip install --cache-dir pipcache --no-deps openvino openvino-dev nncf
- cp -r pipcache pipcache_openvino
- python -m pip uninstall -y openvino openvino-dev nncf
-
- # Download a small dataset to use for testing purposes in monai-kidney training notebook
- - name: Download CT files
- if: steps.cachefiles.outputs.cache-hit != 'true'
- run: |
- curl -O https://storage.openvinotoolkit.org/data/test_data/openvino_notebooks/kits19/case_00030.zip
- - name: Copy CT files
- run: |
- mkdir notebooks/ct-segmentation-quantize/kits19
- mkdir notebooks/ct-segmentation-quantize/kits19/kits19_frames
- unzip case_00030.zip
- cp -r case_00030 case_00001
- mv case_00030 notebooks/ct-segmentation-quantize/kits19/kits19_frames
- mv case_00001 notebooks/ct-segmentation-quantize/kits19/kits19_frames
- # Prevent test aborting by timeout
- - name: Download long loading models
- run: |
- mkdir notebooks/meter-reader/model
- curl -o notebooks/meter-reader/model/meter_det_model.tar.gz 'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/models/meter-reader/meter_det_model.tar.gz'
- curl -o notebooks/meter-reader/model/meter_seg_model.tar.gz 'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/models/meter-reader/meter_seg_model.tar.gz'
- mkdir notebooks/paddle-ocr-webcam/model
- curl -o notebooks/paddle-ocr-webcam/model/ch_PP-OCRv3_det_infer.tar 'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/models/paddle-ocr/ch_PP-OCRv3_det_infer.tar'
- curl -o notebooks/paddle-ocr-webcam/model/ch_PP-OCRv3_rec_infer.tar 'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/models/paddle-ocr/ch_PP-OCRv3_rec_infer.tar'
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip
- python -m pip install -r .ci/dev-requirements.txt --cache-dir pipcache
- python -m ipykernel install --user --name openvino_env
-
- # Cache OpenVINO packages. mv works cross-platform
- - name: Make pipcache directory with OpenVINO packages
- if: steps.cachepip.outputs.cache-hit != 'true'
- run: |
- mv pipcache pipcache_full
- mv pipcache_openvino pipcache
+ uses: ./.github/workflows/build_treon_reusable.yml
+ with:
+ runs_on: ${{ matrix.runs_on }}
+ python: ${{ matrix.python }}
- # Create list of installed pip packages that can be downloaded as artifacts
- # to verify the exact environment of a specific test run
- - name: Pip freeze
- run: |
- python -m pip freeze
- python -m pip freeze > pip-freeze-${{ github.sha }}-${{matrix.os}}-${{ matrix.python }}.txt
- - name: Archive pip freeze
- uses: actions/upload-artifact@v4
- with:
- name: pip-freeze-${{matrix.os}}-${{ matrix.python }}
- path: pip-freeze-${{ github.sha }}-${{matrix.os}}-${{ matrix.python }}.txt
-
- #### End installation/preparation
-
- - name: Check install
- run: |
- python check_install.py
-
- # Patch long running cells to run faster
- - name: Patch notebooks
- run: |
- python .ci/patch_notebooks.py .
-
- # Test that JupyterLab runs without errors
- - name: Test Jupyterlab
- run: |
- jupyter lab notebooks --help
-
- # Main notebooks test. Verifies that all notebooks run without errors
- - name: Analysing with treon (Windows) python > 3.8
- if: runner.os == 'Windows' && matrix.python != '3.8'
- run: |
- python .ci/validate_notebooks.py --ignore_list .ci/ignore_treon_win.txt --report_dir test_report/${{matrix.os}}-${{ matrix.python }} --move_notebooks_dir c:/notebooks --timeout 1200
- shell: bash
- - name: Analysing with treon (Windows) python 3.8
- if: runner.os == 'Windows' && matrix.python == '3.8'
- run: |
- python .ci/validate_notebooks.py --ignore_list .ci/ignore_treon_win.txt .ci/ignore_treon_py38.txt --report_dir test_report/${{matrix.os}}-${{ matrix.python }} --move_notebooks_dir c:/notebooks --timeout 1200
- shell: bash
- - name: Analysing with treon (Linux) python > 3.8
- if: runner.os == 'Linux' && matrix.python != '3.8'
- run: |
- python .ci/validate_notebooks.py --ignore_list .ci/ignore_treon_linux.txt --report_dir test_report/${{matrix.os}}-${{ matrix.python }} --timeout 1200
- shell: bash
- - name: Analysing with treon (Linux) python 3.8
- if: runner.os == 'Linux' && matrix.python == '3.8'
- run: |
- python .ci/validate_notebooks.py --ignore_list .ci/ignore_treon_linux.txt .ci/ignore_treon_py38.txt --report_dir test_report/${{matrix.os}}-${{ matrix.python }} --timeout 1200
- shell: bash
- - name: Analysing with treon (MacOS) python > 3.8
- if: runner.os == 'MacOS' && matrix.python != '3.8'
- run: |
- python .ci/validate_notebooks.py --ignore_list .ci/ignore_treon_mac.txt --report_dir test_report/${{matrix.os}}-${{ matrix.python }} --timeout 1200
- shell: bash
- - name: Analysing with treon (MacOS) python 3.8
- if: runner.os == 'MacOS' && matrix.python == '3.8'
- run: |
- python .ci/validate_notebooks.py --ignore_list .ci/ignore_treon_mac.txt .ci/ignore_treon_py38.txt --report_dir test_report/${{matrix.os}}-${{ matrix.python }} --timeout 1200
- shell: bash
- - name: Archive notebook test report
- if: always()
- uses: actions/upload-artifact@v4
- with:
- name: test_report-${{matrix.os}}-${{ matrix.python }}
- path: test_report/
-
- # Show the cache after running the notebooks
- - name: Show cache
- run: |
- ls -laR ${{ env.CACHE_DIR }}
- du -sh ${{ env.CACHE_DIR }}
- if: runner.os != 'Windows'
+ build_treon_gpu:
+ strategy:
+ fail-fast: false
+ matrix:
+ runs_on: ['gpu']
+ python: ['3.8', '3.9', '3.10']
+ container: ['ubuntu:20.04', 'ubuntu:22.04'] # Windows and macOS are not available in machine pool with GPU
+ uses: ./.github/workflows/build_treon_reusable.yml
+ with:
+ runs_on: ${{ matrix.runs_on }}
+ python: ${{ matrix.python }}
+ container: ${{ matrix.container }}
aggregate_notebooks_reports:
if: always()
runs-on: ubuntu-20.04
- needs: build_treon
+ needs: [build_treon_cpu, build_treon_gpu]
steps:
- name: Checkout repository
uses: actions/checkout@v4
diff --git a/.github/workflows/treon_precommit.yml b/.github/workflows/treon_precommit.yml
index 75b96bc31cf..e3b84dd4fd6 100644
--- a/.github/workflows/treon_precommit.yml
+++ b/.github/workflows/treon_precommit.yml
@@ -31,222 +31,13 @@ concurrency:
jobs:
build_treon:
- runs-on: ${{ matrix.os }}
strategy:
fail-fast: true
matrix:
- os: [ubuntu-20.04, windows-2019, macos-12]
- python: ["3.10", "3.9", "3.8"]
- steps:
- #### Installation/preparation ####
- #
- # These steps are also copied to convert_notebooks.yml
-
- - name: Checkout repository
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Get changed files
- id: changed-files
- uses: tj-actions/changed-files@v44
- with:
- files: |
- notebooks/*/**
- requirements.txt
-
- - name: List all changed files
- run: |
- touch test_notebooks.txt
- changed_files="${{ steps.changed-files.outputs.all_changed_files }}"
- changed_files=$(echo $changed_files | tr '\\' '/')
- for file in $changed_files; do
- echo "$file was changed"
- echo $file >> test_notebooks.txt
- done
- shell: bash
- - name: Dotenv Action
- id: dotenv
- uses: xom9ikk/dotenv@v1.0.2
- with:
- path: ./.github/workflows
- - name: Install required packages
- run: |
- if [ "$RUNNER_OS" == "Linux" ]; then
- sudo apt-get install libsndfile1 -y
- fi
- shell: bash
- - name: Set up Python
- uses: actions/setup-python@v5
- with:
- python-version: ${{ matrix.python }}
-
- - name: Cache OpenVINO Pip Packages
- id: cachepip
- uses: actions/cache@v3
- with:
- path: |
- pipcache
- key: ${{ env.PIP_CACHE_KEY }}-${{ matrix.os }}-${{ matrix.python }}
-
- # Cache specific files to reduce downloads or prevent network issues
- - name: Cache Files
- id: cachefiles
- uses: actions/cache@v3
- with:
- path: |
- # NOTE: when modifying cache paths, update FILES_CACHE_KEY in .env
- # and change cache paths in both treon.yml and convert_notebooks.yml
- case_00030.zip
- notebooks/ct-segmentation-quantize/kits19_frames_1
- notebooks/pytorch-post-training-quantization-nncf/output/tiny-imagenet-200.zip
- # omz cache location is set to this with test_replace
- notebooks/optical-character-recognition/open_model_zoo_cache
- notebooks/ct-scan-live-inference/kits19_frames_1
- notebooks/pytorch-quantization-aware-training/data/tiny-imagenet-200.zip
- key: ${{ env.FILES_CACHE_KEY }}
-
- # PaddleGAN stores cache in ppgan directory in CACHE_DIR
- - name: Set CACHE_DIR
- shell: bash
- run: |
- python -c 'import os;print("CACHE_DIR={0}".format(os.path.expanduser(os.path.join("~", ".cache"))))'
- # replace backslashes with forward slashes for Windows paths
- python -c 'import os;print("CACHE_DIR={0}".format(os.path.expanduser(os.path.join("~", ".cache"))))' | sed -e 's/\\/\//g' >> $GITHUB_ENV
-
- # PaddleHub stores cache in directory pointed to by HUB_HOME environment variable
- - name: Set HUB_HOME
- shell: bash
- run: |
- echo HUB_HOME=${{ env.CACHE_DIR }}/.paddlehub >> $GITHUB_ENV
-
- # Cache PaddlePaddle cache directories to prevent CI failing due to network/download issues
- - name: Cache PaddlePaddle cache directories
- id: cacheusercache
- uses: actions/cache@v3
- with:
- path: |
- ${{ env.HUB_HOME }}
- ${{ env.CACHE_DIR }}/paddle
- ${{ env.CACHE_DIR }}/ppgan
- key: ${{ env.USER_CACHE_KEY }}-${{ runner.os }}
- - name: Cache openvino packages
- if: steps.cachepip.outputs.cache-hit != 'true'
- run: |
- python -m pip install --upgrade pip
- mkdir pipcache
- python -m pip install --cache-dir pipcache --no-deps openvino openvino-dev nncf
- cp -r pipcache pipcache_openvino
- python -m pip uninstall -y openvino openvino-dev nncf
-
- # Download a small dataset to use for testing purposes in monai-kidney training notebook
- - name: Download CT files
- if: steps.cachefiles.outputs.cache-hit != 'true'
- run: |
- curl -O https://storage.openvinotoolkit.org/data/test_data/openvino_notebooks/kits19/case_00030.zip
- - name: Copy CT files
- run: |
- mkdir notebooks/ct-segmentation-quantize/kits19
- mkdir notebooks/ct-segmentation-quantize/kits19/kits19_frames
- unzip case_00030.zip
- cp -r case_00030 case_00001
- mv case_00030 notebooks/ct-segmentation-quantize/kits19/kits19_frames
- mv case_00001 notebooks/ct-segmentation-quantize/kits19/kits19_frames
- # Prevent test aborting by timeout
- - name: Download long loading models
- run: |
- mkdir notebooks/meter-reader/model
- curl -o notebooks/meter-reader/model/meter_det_model.tar.gz 'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/models/meter-reader/meter_det_model.tar.gz'
- curl -o notebooks/meter-reader/model/meter_seg_model.tar.gz 'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/models/meter-reader/meter_seg_model.tar.gz'
- mkdir notebooks/paddle-ocr-webcam/model
- curl -o notebooks/paddle-ocr-webcam/model/ch_PP-OCRv3_det_infer.tar 'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/models/paddle-ocr/ch_PP-OCRv3_det_infer.tar'
- curl -o notebooks/paddle-ocr-webcam/model/ch_PP-OCRv3_rec_infer.tar 'https://storage.openvinotoolkit.org/repositories/openvino_notebooks/models/paddle-ocr/ch_PP-OCRv3_rec_infer.tar'
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip
- python -m pip install -r .ci/dev-requirements.txt --cache-dir pipcache
- python -m ipykernel install --user --name openvino_env
-
- # Cache OpenVINO packages. mv works cross-platform
- - name: Make pipcache directory with OpenVINO packages
- if: steps.cachepip.outputs.cache-hit != 'true'
- run: |
- mv pipcache pipcache_full
- mv pipcache_openvino pipcache
-
- # Create list of installed pip packages that can be downloaded as artifacts
- # to verify the exact environment of a specific test run
- - name: Pip freeze
- run: |
- python -m pip freeze
- python -m pip freeze > pip-freeze-${{ github.sha }}-${{matrix.os}}-${{ matrix.python }}.txt
- - name: Archive pip freeze
- uses: actions/upload-artifact@v4
- with:
- name: pip-freeze-${{matrix.os}}-${{ matrix.python }}
- path: pip-freeze-${{ github.sha }}-${{matrix.os}}-${{ matrix.python }}.txt
-
- #### End installation/preparation
-
- - name: Check install
- run: |
- python check_install.py
-
- # Patch long running cells to run faster
- - name: Patch notebooks
- run: |
- python .ci/patch_notebooks.py .
-
- # Test that JupyterLab runs without errors
- - name: Test Jupyterlab
- run: |
- jupyter lab notebooks --help
-
- # Main notebooks test. Verifies that all notebooks run without errors
- - name: Analysing with treon (Windows) python > 3.8
- if: runner.os == 'Windows' && matrix.python != '3.8'
- run: |
- python .ci/validate_notebooks.py --test_list test_notebooks.txt --ignore_list .ci/ignore_treon_win.txt --report_dir test_report/${{matrix.os}}-${{ matrix.python }} --move_notebooks_dir c:/notebooks
- shell: bash
- - name: Analysing with treon (Windows) python 3.8
- if: runner.os == 'Windows' && matrix.python == '3.8'
- run: |
- python .ci/validate_notebooks.py --test_list test_notebooks.txt --ignore_list .ci/ignore_treon_win.txt .ci/ignore_treon_py38.txt --report_dir test_report/${{matrix.os}}-${{ matrix.python }} --move_notebooks_dir c:/notebooks
- shell: bash
- - name: Analysing with treon (Linux) python > 3.8
- if: runner.os == 'Linux' && matrix.python != '3.8'
- run: |
- python .ci/validate_notebooks.py --test_list test_notebooks.txt --ignore_list .ci/ignore_treon_linux.txt --report_dir test_report/${{matrix.os}}-${{ matrix.python }}
- shell: bash
- - name: Analysing with treon (Linux) python 3.8
- if: runner.os == 'Linux' && matrix.python == '3.8'
- run: |
- python .ci/validate_notebooks.py --test_list test_notebooks.txt --ignore_list .ci/ignore_treon_linux.txt .ci/ignore_treon_py38.txt --report_dir test_report/${{matrix.os}}-${{ matrix.python }}
- shell: bash
- - name: Analysing with treon (MacOS) python > 3.8
- if: runner.os == 'MacOS' && matrix.python != '3.8'
- run: |
- python .ci/validate_notebooks.py --test_list test_notebooks.txt --ignore_list .ci/ignore_treon_mac.txt --report_dir test_report/${{matrix.os}}-${{ matrix.python }}
- shell: bash
- - name: Analysing with treon (MacOS) python 3.8
- if: runner.os == 'MacOS' && matrix.python == '3.8'
- run: |
- python .ci/validate_notebooks.py --test_list test_notebooks.txt --ignore_list .ci/ignore_treon_mac.txt .ci/ignore_treon_py38.txt --report_dir test_report/${{matrix.os}}-${{ matrix.python }}
- shell: bash
- - name: Archive test report
- uses: actions/upload-artifact@v4
- with:
- name: test_report.csv-${{matrix.os}}-${{ matrix.python }}
- path: test_report.csv
- - name: Archive notebook test report
- uses: actions/upload-artifact@v4
- with:
- name: test_report-${{matrix.os}}-${{ matrix.python }}
- path: test_report/
-
- # Show the cache after running the notebooks
- - name: Show cache
- run: |
- ls -laR ${{ env.CACHE_DIR }}
- du -sh ${{ env.CACHE_DIR }}
- if: runner.os != 'Windows'
+ runs_on: [ubuntu-20.04, windows-2019, macos-12]
+ python: ['3.10', '3.9', '3.8']
+ uses: ./.github/workflows/build_treon_reusable.yml
+ with:
+ runs_on: ${{ matrix.runs_on }}
+ python: ${{ matrix.python }}
+ test_only_changed: true
diff --git a/notebooks/detectron2-to-openvino/detectron2-to-openvino.ipynb b/notebooks/detectron2-to-openvino/detectron2-to-openvino.ipynb
index 5bc0f5c87fa..091cbf67f0e 100644
--- a/notebooks/detectron2-to-openvino/detectron2-to-openvino.ipynb
+++ b/notebooks/detectron2-to-openvino/detectron2-to-openvino.ipynb
@@ -11,7 +11,8 @@
"In this tutorial we consider how to convert and run Detectron2 models using OpenVINO™. We will use `Faster R-CNN FPN x1` model and `Mask R-CNN FPN x3` pretrained on [COCO](https://cocodataset.org/#home) dataset as examples for object detection and instance segmentation respectively.\n",
"\n",
"\n",
- "#### Table of contents:\n\n",
+ "#### Table of contents:\n",
+ "\n",
"- [Prerequisites](#Prerequisites)\n",
" - [Define helpers for PyTorch model initialization and conversion](#Define-helpers-for-PyTorch-model-initialization-and-conversion)\n",
" - [Prepare input data](#Prepare-input-data)\n",
@@ -44,7 +45,7 @@
"metadata": {},
"outputs": [],
"source": [
- "%pip install -q --extra-index-url https://download.pytorch.org/whl/cpu torch torchvision\n",
+ "%pip install -q \"torch\" \"torchvision\" \"opencv-python\" \"wheel\" --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q \"git+https://github.com/facebookresearch/detectron2.git\" --extra-index-url https://download.pytorch.org/whl/cpu\n",
"%pip install -q \"openvino>=2023.1.0\""
]
@@ -619,4 +620,4 @@
},
"nbformat": 4,
"nbformat_minor": 4
-}
\ No newline at end of file
+}
diff --git a/selector/src/components/ContentSection/NotebooksList/NotebookCard/StatusTable/StatusTable.scss b/selector/src/components/ContentSection/NotebooksList/NotebookCard/StatusTable/StatusTable.scss
index 3fabf507ece..d35a69489b7 100644
--- a/selector/src/components/ContentSection/NotebooksList/NotebookCard/StatusTable/StatusTable.scss
+++ b/selector/src/components/ContentSection/NotebooksList/NotebookCard/StatusTable/StatusTable.scss
@@ -36,16 +36,16 @@
border: 1px solid var(--table-border-color);
border-bottom: 0;
display: grid;
- grid-template-columns: auto repeat(3, auto);
+ grid-template-columns: auto repeat(6, auto);
grid-template-rows: repeat(6, 1fr);
grid-auto-flow: row;
grid-template-areas:
- 'os-header device-header device-header device-header'
- 'os-header python-versions python-versions python-versions'
- 'os-names statuses statuses statuses'
- 'os-names statuses statuses statuses'
- 'os-names statuses statuses statuses'
- 'os-names statuses statuses statuses';
+ 'os-header cpu-device-header cpu-device-header cpu-device-header gpu-device-header gpu-device-header gpu-device-header'
+ 'os-header cpu-python-versions cpu-python-versions cpu-python-versions gpu-python-versions gpu-python-versions gpu-python-versions'
+ 'os-names cpu-statuses cpu-statuses cpu-statuses gpu-statuses gpu-statuses gpu-statuses'
+ 'os-names cpu-statuses cpu-statuses cpu-statuses gpu-statuses gpu-statuses gpu-statuses'
+ 'os-names cpu-statuses cpu-statuses cpu-statuses gpu-statuses gpu-statuses gpu-statuses'
+ 'os-names cpu-statuses cpu-statuses cpu-statuses gpu-statuses gpu-statuses gpu-statuses';
.cell {
display: flex;
@@ -70,29 +70,48 @@
.os-header,
.os-names {
- border-right: 1px solid var(--table-border-color);
-
.cell {
padding-left: 1rem;
padding-right: 1rem;
}
}
+.cpu-device-header {
+ grid-area: cpu-device-header;
+}
+
+.gpu-device-header {
+ grid-area: gpu-device-header;
+}
+
.device-header {
- grid-area: device-header;
font-weight: 500;
}
+.cpu-python-versions {
+ grid-area: cpu-python-versions;
+}
+
+.gpu-python-versions {
+ grid-area: gpu-python-versions;
+}
+
.python-versions {
- grid-area: python-versions;
font-weight: 500;
display: grid;
grid-template-columns: subgrid;
border-bottom: 1px solid var(--table-border-color);
}
+.cpu-statuses {
+ grid-area: cpu-statuses;
+}
+
+.gpu-statuses {
+ grid-area: gpu-statuses;
+}
+
.statuses {
- grid-area: statuses;
display: grid;
grid-template-columns: subgrid;
grid-template-rows: subgrid;
@@ -112,3 +131,9 @@
border-bottom: 1px solid var(--table-border-color);
}
}
+
+.device-header,
+.python-versions,
+.statuses {
+ border-left: 1px solid var(--table-border-color);
+}
diff --git a/selector/src/components/ContentSection/NotebooksList/NotebookCard/StatusTable/StatusTable.tsx b/selector/src/components/ContentSection/NotebooksList/NotebookCard/StatusTable/StatusTable.tsx
index 5c2e46bf5cb..c04ff508075 100644
--- a/selector/src/components/ContentSection/NotebooksList/NotebookCard/StatusTable/StatusTable.tsx
+++ b/selector/src/components/ContentSection/NotebooksList/NotebookCard/StatusTable/StatusTable.tsx
@@ -41,33 +41,42 @@ const getStatusIcon = (status: ValidationStatus | null): JSX.Element => {
return