Skip to content

Commit f95edc4

Browse files
authored
Merge pull request #3 from FBruzzesi/dockering
Dockering
2 parents 0a20e4c + 070f2e5 commit f95edc4

13 files changed

+152
-13
lines changed

.dockerignore

+34
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
# Folders
2+
.ipynb_checkpoints/
3+
.mypy_cache/
4+
.ruff_cache/
5+
.venv/
6+
data/
7+
docker/
8+
nbs/
9+
models/
10+
db/
11+
12+
.git/
13+
.github/
14+
docker/
15+
docs/
16+
kubernetes/
17+
18+
# Files
19+
.dockerignore
20+
.gitignore
21+
.pre-commit-config.yaml
22+
mkdocs.yaml
23+
README.md
24+
Makefile
25+
26+
# Various
27+
**/__pychache__
28+
**/*.egg-info
29+
**.pyc
30+
**.pytest_cache
31+
**.ipynb
32+
**.bat
33+
**.egg-info
34+
**/build

.gitignore

+5
Original file line numberDiff line numberDiff line change
@@ -160,3 +160,8 @@ cython_debug/
160160
# and can be added to the global gitignore or merged into this file. For a more nuclear
161161
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
162162
#.idea/
163+
164+
# Databases and Models
165+
*.db
166+
models/*
167+
!models/.gitkeep

db/.gitkeep

Whitespace-only changes.

docker-compose.yml

+22
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
services:
2+
3+
backend:
4+
build:
5+
context: .
6+
dockerfile: docker/backend.dockerfile
7+
ports:
8+
- "8081:8081"
9+
volumes:
10+
- ./db:/app/db
11+
- ./models:/app/models # Mount machine learning model folder
12+
13+
frontend:
14+
build:
15+
context: .
16+
dockerfile: docker/frontend.dockerfile
17+
ports:
18+
- "8501:8501"
19+
depends_on:
20+
- backend
21+
environment:
22+
- ENV=docker-compose

docker/backend.dockerfile

+37
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
# Use a Python image with uv pre-installed
2+
FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim
3+
4+
# Install the project into `/app`
5+
WORKDIR /app
6+
7+
# Enable bytecode compilation
8+
ENV UV_COMPILE_BYTECODE=1
9+
10+
# Copy from the cache instead of linking since it's a mounted volume
11+
ENV UV_LINK_MODE=copy
12+
13+
# Install the project's dependencies using the lockfile and settings
14+
RUN --mount=type=cache,target=/root/.cache/uv \
15+
--mount=type=bind,source=uv.lock,target=uv.lock \
16+
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
17+
uv sync --frozen --no-install-project --extra backend
18+
19+
# Then, add the rest of the project source code and install it
20+
# Installing separately from its dependencies allows optimal layer caching
21+
ADD . /app
22+
RUN --mount=type=cache,target=/root/.cache/uv \
23+
uv sync --frozen --extra backend
24+
25+
# Place executables in the environment at the front of the path
26+
ENV PATH="/app/.venv/bin:$PATH"
27+
28+
# Reset the entrypoint, don't invoke `uv`
29+
ENTRYPOINT []
30+
31+
EXPOSE 8081
32+
33+
# Run the FastAPI application by default
34+
# Uses `fastapi dev` to enable hot-reloading when the `watch` sync occurs
35+
# Uses `--host 0.0.0.0` to allow access from outside the container
36+
ENTRYPOINT ["fastapi", "run", "src/moin_moin/backend/api.py" , "--port", "8081"]
37+

docker/frontend.dockerfile

+36
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
# Use a Python image with uv pre-installed
2+
FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim
3+
4+
# Install the project into `/app`
5+
WORKDIR /app
6+
7+
# Enable bytecode compilation
8+
ENV UV_COMPILE_BYTECODE=1
9+
10+
# Copy from the cache instead of linking since it's a mounted volume
11+
ENV UV_LINK_MODE=copy
12+
13+
# Install the project's dependencies using the lockfile and settings
14+
RUN --mount=type=cache,target=/root/.cache/uv \
15+
--mount=type=bind,source=uv.lock,target=uv.lock \
16+
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
17+
uv sync --frozen --no-install-project --extra frontend
18+
19+
# Then, add the rest of the project source code and install it
20+
# Installing separately from its dependencies allows optimal layer caching
21+
ADD . /app
22+
RUN --mount=type=cache,target=/root/.cache/uv \
23+
uv sync --frozen --extra frontend
24+
25+
# Place executables in the environment at the front of the path
26+
ENV PATH="/app/.venv/bin:$PATH"
27+
28+
# Reset the entrypoint, don't invoke `uv`
29+
ENTRYPOINT []
30+
31+
EXPOSE 8501
32+
33+
# Run the FastAPI application by default
34+
# Uses `fastapi dev` to enable hot-reloading when the `watch` sync occurs
35+
# Uses `--host 0.0.0.0` to allow access from outside the container
36+
ENTRYPOINT ["streamlit", "run", "src/moin_moin/frontend/app.py", "--server.port=8501", "--server.address=0.0.0.0"]

models/.gitkeep

Whitespace-only changes.

pyproject.toml

+4-2
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,10 @@ torch = [{ index = "pytorch-cpu"}]
3535
moin-moin = "moin_moin:main"
3636

3737
[build-system]
38-
requires = ["hatchling"]
39-
build-backend = "hatchling.build"
38+
requires = [
39+
"setuptools >= 40.9.0",
40+
]
41+
build-backend = "setuptools.build_meta"
4042

4143

4244
[tool.ruff]

src/moin_moin/backend/_ml.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ class ClipModel:
1717
"""
1818

1919
def __init__(self: Self, text_options: dict[str, str]) -> None:
20-
self.model = SentenceTransformer("clip-ViT-B-32")
20+
self.model = SentenceTransformer("clip-ViT-B-32", cache_folder="models")
2121
self.model.eval()
2222
self.labels = list(text_options.keys())
2323
self.text_embedding = self.model.encode(list(text_options.values()))

src/moin_moin/backend/api.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333

3434
ML_MODEL: dict[str, ClipModel] = {}
3535

36-
DB_NAME: Final[str] = "sqlite:///moin-moin.db"
36+
DB_NAME: Final[str] = "sqlite:///db/moin-moin.db"
3737
ENGINE: Final[Engine] = create_engine(DB_NAME)
3838

3939
INSTITUTIONS = {

src/moin_moin/frontend/_conf.py

+5-2
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,12 @@
11
from __future__ import annotations
22

3+
import os
34
from typing import Final
45

5-
HOST: Final[str] = "http://127.0.0.1"
6-
PORT: Final[int] = 8081
6+
ENV = os.getenv("ENV", "local")
7+
HOST = "localhost" if ENV in ("local", "dev") else "backend"
8+
BACKEND_URL: Final[str] = f"http://{HOST}:8081"
9+
710
INSTITUTION_MAPPING: Final[dict[str, str]] = {
811
"Police Department": "#48b5a5",
912
"Fire Department": "#7b64ab",

src/moin_moin/frontend/_overview.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,10 @@
44
import pandas as pd
55
import streamlit as st
66

7-
from moin_moin.frontend._conf import HOST
7+
from moin_moin.frontend._conf import BACKEND_URL
88
from moin_moin.frontend._conf import INSTITUTION_MAPPING
9-
from moin_moin.frontend._conf import PORT
109

11-
result = httpx.get(f"{HOST}:{PORT}/load-records").json()
10+
result = httpx.get(f"{BACKEND_URL}/load-records").json()
1211

1312
location_df = (
1413
pd.DataFrame(result)

src/moin_moin/frontend/_user_input.py

+5-4
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,7 @@
1010
from geopy.geocoders import Nominatim
1111
from PIL import Image
1212

13-
from moin_moin.frontend._conf import HOST
14-
from moin_moin.frontend._conf import PORT
13+
from moin_moin.frontend._conf import BACKEND_URL
1514

1615
GEOLOCATOR = Nominatim(user_agent="location_sharing_app")
1716

@@ -64,20 +63,22 @@ def main() -> None:
6463
buffer.seek(0)
6564

6665
record_id = httpx.post(
67-
f"{HOST}:{PORT}/save",
66+
f"{BACKEND_URL}/save",
6867
data={
6968
"latitude": getattr(loc, "latitude", None),
7069
"longitude": getattr(loc, "longitude", None),
7170
"notes": notes,
7271
"tags": ",".join(tags) if tags else "",
7372
},
7473
files={"image_bytes": ("image.jpg", buffer, "image/jpeg")},
74+
timeout=10,
7575
).json()["record-id"]
7676

7777
result = httpx.post(
78-
f"{HOST}:{PORT}/predict",
78+
f"{BACKEND_URL}/predict",
7979
data={"record_id": record_id},
8080
files={"file": ("image.jpg", buffer, "image/jpeg")},
81+
timeout=10,
8182
).json()["prediction"]
8283

8384
st.header(f"Assigned Institution: {result}")

0 commit comments

Comments
 (0)