|
49 | 49 | "metadata": {},
|
50 | 50 | "outputs": [],
|
51 | 51 | "source": [
|
52 |
| - "%pip install pythreejs \"openvino-dev>=2023.1.0\"" |
| 52 | + "%pip install -q pythreejs \"openvino-dev>=2023.1.0\"\n", |
| 53 | + "\n", |
| 54 | + "# Fetch `notebook_utils` module\n", |
| 55 | + "import urllib.request\n", |
| 56 | + "urllib.request.urlretrieve(\n", |
| 57 | + " url='https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/main/notebooks/utils/notebook_utils.py',\n", |
| 58 | + " filename='notebook_utils.py'\n", |
| 59 | + ")" |
53 | 60 | ]
|
54 | 61 | },
|
55 | 62 | {
|
|
63 | 70 | },
|
64 | 71 | {
|
65 | 72 | "cell_type": "code",
|
66 |
| - "execution_count": null, |
| 73 | + "execution_count": 2, |
67 | 74 | "id": "316ad889-8514-430f-baf4-4f32abd43356",
|
68 | 75 | "metadata": {},
|
69 | 76 | "outputs": [],
|
|
72 | 79 | "import sys\n",
|
73 | 80 | "import time\n",
|
74 | 81 | "from pathlib import Path\n",
|
| 82 | + "import tarfile\n", |
75 | 83 | "\n",
|
76 | 84 | "import cv2\n",
|
77 | 85 | "import ipywidgets as widgets\n",
|
78 | 86 | "import numpy as np\n",
|
79 | 87 | "from IPython.display import clear_output, display\n",
|
80 | 88 | "import openvino as ov\n",
|
81 | 89 | "\n",
|
82 |
| - "sys.path.append(\"../utils\")\n", |
| 90 | + "import torch\n", |
| 91 | + "import torch.onnx\n", |
| 92 | + "import torchvision.models as models\n", |
| 93 | + "from torch.autograd import Variable\n", |
| 94 | + "\n", |
83 | 95 | "import notebook_utils as utils\n",
|
84 | 96 | "\n",
|
85 | 97 | "sys.path.append(\"./engine\")\n",
|
|
98 | 110 | "### Download the model\n",
|
99 | 111 | "[back to top ⬆️](#Table-of-contents:)\n",
|
100 | 112 | "\n",
|
101 |
| - "We use `omz_downloader`, which is a command line tool from the `openvino-dev` package. `omz_downloader` automatically creates a directory structure and downloads the selected model." |
| 113 | + "Use the `download_file`, a function from the `notebook_utils` file. It automatically creates a directory structure and downloads the selected model. This step is skipped if the package is already downloaded and unpacked. The chosen model comes from the public directory, which means it must be converted into OpenVINO Intermediate Representation (OpenVINO IR)." |
102 | 114 | ]
|
103 | 115 | },
|
104 | 116 | {
|
|
109 | 121 | "outputs": [],
|
110 | 122 | "source": [
|
111 | 123 | "# directory where model will be downloaded\n",
|
112 |
| - "base_model_dir = \"model\"\n", |
| 124 | + "base_model_dir = Path(\"model\")\n", |
113 | 125 | "\n",
|
114 | 126 | "# model name as named in Open Model Zoo\n",
|
115 |
| - "model_name = \"human-pose-estimation-3d-0001\"\n", |
| 127 | + "short_model_name = \"human-pose-estimation-3d\"\n", |
| 128 | + "model_name = f\"{short_model_name}-0001\"\n", |
116 | 129 | "# selected precision (FP32, FP16)\n",
|
117 | 130 | "precision = \"FP32\"\n",
|
118 | 131 | "\n",
|
119 |
| - "BASE_MODEL_NAME = f\"{base_model_dir}/public/{model_name}/{model_name}\"\n", |
| 132 | + "MAIN_MODEL_PATH=f\"{base_model_dir}/public/{model_name}\"\n", |
| 133 | + "BASE_MODEL_NAME = f\"{MAIN_MODEL_PATH}/{model_name}\"\n", |
120 | 134 | "model_path = Path(BASE_MODEL_NAME).with_suffix(\".pth\")\n",
|
121 | 135 | "onnx_path = Path(BASE_MODEL_NAME).with_suffix(\".onnx\")\n",
|
122 | 136 | "\n",
|
123 |
| - "ir_model_path = f\"model/public/{model_name}/{precision}/{model_name}.xml\"\n", |
124 |
| - "model_weights_path = f\"model/public/{model_name}/{precision}/{model_name}.bin\"\n", |
| 137 | + "archive_name=Path(f\"{short_model_name}.tar.gz\")\n", |
| 138 | + "model_url = f\"https://storage.openvinotoolkit.org/repositories/open_model_zoo/public/2022.1/{model_name}/{archive_name}\"\n", |
| 139 | + "\n", |
| 140 | + "downloaded_model_path= Path(MAIN_MODEL_PATH) / archive_name\n", |
| 141 | + "if not downloaded_model_path.exists():\n", |
| 142 | + " utils.download_file(model_url, downloaded_model_path.name, downloaded_model_path.parent)\n", |
| 143 | + "\n", |
| 144 | + "ir_model_path = Path(f\"model/public/{model_name}/{precision}/{model_name}.xml\")\n", |
| 145 | + "model_weights_path = Path(f\"model/public/{model_name}/{precision}/{model_name}.bin\")\n", |
125 | 146 | "\n",
|
126 | 147 | "if not model_path.exists():\n",
|
127 |
| - " download_command = (\n", |
128 |
| - " f\"omz_downloader \" f\"--name {model_name} \" f\"--output_dir {base_model_dir}\"\n", |
129 |
| - " )\n", |
130 |
| - " ! $download_command" |
| 148 | + " with tarfile.open(downloaded_model_path) as file:\n", |
| 149 | + " file.extractall(MAIN_MODEL_PATH)" |
131 | 150 | ]
|
132 | 151 | },
|
133 | 152 | {
|
|
145 | 164 | "cell_type": "code",
|
146 | 165 | "execution_count": null,
|
147 | 166 | "id": "c9bdfdee-c2ef-4710-96c1-8a6a896a8cba",
|
148 |
| - "metadata": {}, |
| 167 | + "metadata": { |
| 168 | + "scrolled": true |
| 169 | + }, |
149 | 170 | "outputs": [],
|
150 | 171 | "source": [
|
151 | 172 | "if not onnx_path.exists():\n",
|
|
204 | 225 | },
|
205 | 226 | {
|
206 | 227 | "cell_type": "code",
|
207 |
| - "execution_count": null, |
| 228 | + "execution_count": 6, |
208 | 229 | "id": "92a04102-aebf-4976-874b-b98dca97ec48",
|
209 | 230 | "metadata": {},
|
210 | 231 | "outputs": [],
|
|
257 | 278 | },
|
258 | 279 | {
|
259 | 280 | "cell_type": "code",
|
260 |
| - "execution_count": null, |
| 281 | + "execution_count": 8, |
261 | 282 | "id": "08f8055b-a6cf-4003-8232-6f73a86d6034",
|
262 | 283 | "metadata": {},
|
263 | 284 | "outputs": [],
|
|
306 | 327 | },
|
307 | 328 | {
|
308 | 329 | "cell_type": "code",
|
309 |
| - "execution_count": null, |
| 330 | + "execution_count": 9, |
310 | 331 | "id": "22fd3e08-ed3b-44ac-bd07-4a80130d6681",
|
311 | 332 | "metadata": {},
|
312 | 333 | "outputs": [],
|
|
394 | 415 | },
|
395 | 416 | {
|
396 | 417 | "cell_type": "code",
|
397 |
| - "execution_count": null, |
| 418 | + "execution_count": 10, |
398 | 419 | "id": "3be526d0-75ad-4bd1-85b1-ca8185eca918",
|
399 | 420 | "metadata": {
|
400 | 421 | "tags": []
|
|
587 | 608 | },
|
588 | 609 | {
|
589 | 610 | "cell_type": "code",
|
590 |
| - "execution_count": null, |
| 611 | + "execution_count": 11, |
591 | 612 | "id": "3f82e298-5912-48c7-90b5-339aea3c177d",
|
592 | 613 | "metadata": {
|
593 | 614 | "tags": []
|
|
603 | 624 | "\n",
|
604 | 625 | "run_pose_estimation(source=source, flip=isinstance(source, int), use_popup=False)"
|
605 | 626 | ]
|
| 627 | + }, |
| 628 | + { |
| 629 | + "cell_type": "code", |
| 630 | + "execution_count": null, |
| 631 | + "id": "e3c77b7c-5211-4715-ad48-4ec38a82941c", |
| 632 | + "metadata": {}, |
| 633 | + "outputs": [], |
| 634 | + "source": [] |
606 | 635 | }
|
607 | 636 | ],
|
608 | 637 | "metadata": {
|
|
621 | 650 | "name": "python",
|
622 | 651 | "nbconvert_exporter": "python",
|
623 | 652 | "pygments_lexer": "ipython3",
|
624 |
| - "version": "3.8.10" |
625 |
| - }, |
626 |
| - "openvino_notebooks": { |
627 |
| - "imageUrl": "https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/406-3D-pose-estimation-webcam/406-3D-pose-estimation.gif?raw=true", |
628 |
| - "tags": { |
629 |
| - "categories": [ |
630 |
| - "Live Demos" |
631 |
| - ], |
632 |
| - "libraries": [], |
633 |
| - "other": [], |
634 |
| - "tasks": [ |
635 |
| - "Pose Estimation" |
636 |
| - ] |
637 |
| - } |
| 653 | + "version": "3.11.5" |
638 | 654 | },
|
639 | 655 | "widgets": {
|
640 | 656 | "application/vnd.jupyter.widget-state+json": {
|
|
0 commit comments