|
1 | 1 | import asyncio
|
2 | 2 | import os
|
3 |
| -import shutil |
4 | 3 | import signal
|
5 | 4 |
|
6 | 5 | from typing import Optional, Dict, List
|
|
9 | 8 | from ..utils import to_absolute_path
|
10 | 9 | from ..model import MLModel
|
11 | 10 | from ..settings import Settings
|
12 |
| -from ..env import Environment, compute_hash |
| 11 | +from ..env import Environment, compute_hash_of_file, compute_hash_of_string |
13 | 12 | from ..registry import model_initialiser
|
14 | 13 |
|
15 | 14 | from .errors import EnvironmentNotFound
|
@@ -76,11 +75,52 @@ async def _handle_worker_stop(self, signum, frame):
|
76 | 75 | )
|
77 | 76 |
|
78 | 77 | async def _get_or_create(self, model: MLModel) -> InferencePool:
|
| 78 | + if ( |
| 79 | + model.settings.parameters is not None |
| 80 | + and model.settings.parameters.environment_path |
| 81 | + ): |
| 82 | + pool = await self._get_or_create_with_existing_env( |
| 83 | + model.settings.parameters.environment_path |
| 84 | + ) |
| 85 | + else: |
| 86 | + pool = await self._get_or_create_with_tarball(model) |
| 87 | + return pool |
| 88 | + |
| 89 | + async def _get_or_create_with_existing_env( |
| 90 | + self, environment_path: str |
| 91 | + ) -> InferencePool: |
| 92 | + """ |
| 93 | + Creates or returns the InferencePool for a model that uses an existing |
| 94 | + python environment. |
| 95 | + """ |
| 96 | + expanded_environment_path = os.path.abspath( |
| 97 | + os.path.expanduser(os.path.expandvars(environment_path)) |
| 98 | + ) |
| 99 | + logger.info(f"Using environment {expanded_environment_path}") |
| 100 | + env_hash = await compute_hash_of_string(expanded_environment_path) |
| 101 | + if env_hash in self._pools: |
| 102 | + return self._pools[env_hash] |
| 103 | + env = Environment( |
| 104 | + env_path=expanded_environment_path, |
| 105 | + env_hash=env_hash, |
| 106 | + delete_env=False, |
| 107 | + ) |
| 108 | + pool = InferencePool( |
| 109 | + self._settings, env=env, on_worker_stop=self._on_worker_stop |
| 110 | + ) |
| 111 | + self._pools[env_hash] = pool |
| 112 | + return pool |
| 113 | + |
| 114 | + async def _get_or_create_with_tarball(self, model: MLModel) -> InferencePool: |
| 115 | + """ |
| 116 | + Creates or returns the InferencePool for a model that uses a |
| 117 | + tarball as python environment. |
| 118 | + """ |
79 | 119 | env_tarball = _get_env_tarball(model)
|
80 | 120 | if not env_tarball:
|
81 | 121 | return self._default_pool
|
82 | 122 |
|
83 |
| - env_hash = await compute_hash(env_tarball) |
| 123 | + env_hash = await compute_hash_of_file(env_tarball) |
84 | 124 | if env_hash in self._pools:
|
85 | 125 | return self._pools[env_hash]
|
86 | 126 |
|
@@ -223,5 +263,3 @@ async def _close_pool(self, env_hash: Optional[str] = None):
|
223 | 263 |
|
224 | 264 | if env_hash:
|
225 | 265 | del self._pools[env_hash]
|
226 |
| - env_path = self._get_env_path(env_hash) |
227 |
| - shutil.rmtree(env_path) |
|
0 commit comments