Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Amend docker setup for local fake data #390

Open
wants to merge 13 commits into
base: main
Choose a base branch
from
6 changes: 3 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@ RUN pip install -r /app/requirements.txt
WORKDIR /app

# copy files over
COPY ./src /app/src
COPY nowcasting_api /app/nowcasting_api
COPY ./script /app/script

# pin coverage
RUN pip install -U coverage

# make sure 'src' is in python path - this is so imports work
ENV PYTHONPATH=${PYTHONPATH}:/app/src
# make sure 'nowcasting_api' is in python path - this is so imports work
ENV PYTHONPATH=${PYTHONPATH}:/app/nowcasting_api
38 changes: 23 additions & 15 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,25 +62,33 @@ python3 -m venv ./venv
source venv/bin/activate
```

Install Requirements and Run
### Running the API

```bash
pip install -r requirements.txt
cd src && uvicorn main:app --reload
```

Warning:
If you don't have a local database set up, you can leave the `DB_URL` string empty (default not set)
and set `FAKE=True` and the API will return fake data. This is a work in progress,
so some routes might be need to be updated

### Docker
🛑 Currently non-functional, needs updating to migrate database to match datamodel
#### Option 1: Docker
🟢 __Preferred method__

1. Make sure docker is installed on your system.
2. Use `docker-compose up`
in the main directory to start up the application.
3. You will now be able to access it on `http://localhost:80`
in the main directory with the optional `--build` flag to build the image the first time
to start up the application. This builds the image, sets up the database, seeds some fake data
and starts the API.
3. You will now be able to access it on `http://localhost:8000`
4. The API should restart automatically when you make changes to the code, but the fake
data currently is static. To seed new fake data, just manually restart the API.

#### Option 2: Running the API with a local database

To set up the API with a local database, you will need to:
- start your own local postgres instance on your machine
- set `FAKE=1` in the `.env` file
- set `DB_URL` to your local postgres instance in the `.env` file
- run the following commands to install required packages, create the tables in your local postgres instance, and populate them with fake data:

```bash
pip install -r requirements.txt
cd src
uvicorn main:app --reload
```

### Running the test suite

Expand Down
56 changes: 56 additions & 0 deletions docker-compose-local-datamodel.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
services:
postgres:
platform: linux/amd64
image: postgres:14.5
restart: always
environment:
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
ports:
- "5432:5432"

# datamodel:
# image: openclimatefix/nowcasting_datamodel
# environment:
# - DB_URL=postgresql://postgres:postgres@postgres:5432/postgres
# depends_on:
# - "postgres"

api:
build:
context: .
dockerfile: Dockerfile
# image: openclimatefix/nowcasting_api:0.1.7
container_name: nowcasting_api
command: bash -c "sleep 2
&& apt-get update
&& apt-get install -y cron
&& echo 'starting cron'
&& cron
&& pip install file:/app/nowcasting_datamodel
&& sleep 2
&& python script/fake_data.py
&& uvicorn nowcasting_api.main:app --reload --host 0.0.0.0 --port 8000"
ports:
- "8000:8000"
environment:
- DB_URL=postgresql://postgres:postgres@postgres:5432/postgres
# - AUTH0_DOMAIN=nowcasting-dev.eu.auth0.com
# - AUTH0_API_AUDIENCE=https://nowcasting-api-eu-auth0.com/
volumes:
- ./nowcasting_api/:/app/nowcasting_api
- ./script/:/app/script
- ../nowcasting_datamodel/:/app/nowcasting_datamodel
working_dir: /app
configs:
- source: crontab
target: /etc/cron.d/crontab
mode: 0644
depends_on:
- "postgres"
# - "datamodel"

configs:
crontab:
content: |
*/15 * * * * root PYTHONPATH=/app DB_URL=postgresql://postgres:postgres@postgres:5432/postgres /usr/local/bin/python -m script.fake_data > /proc/1/fd/1 2>/proc/1/fd/2
44 changes: 29 additions & 15 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
version: "3"

services:
postgres:
platform: linux/amd64
image: postgres:14.5
restart: always
environment:
Expand All @@ -10,31 +9,46 @@ services:
ports:
- "5432:5432"

datamodel:
image: openclimatefix/nowcasting_datamodel
environment:
- DB_URL=postgresql://postgres:postgres@postgres:5432/postgres
depends_on:
- "postgres"
# datamodel:
# image: openclimatefix/nowcasting_datamodel
# environment:
# - DB_URL=postgresql://postgres:postgres@postgres:5432/postgres
# depends_on:
# - "postgres"

api:
build:
context: .
dockerfile: Dockerfile
# image: openclimatefix/nowcasting_api:0.1.7
container_name: nowcasting_api
command: bash -c "sleep 5
command: bash -c "sleep 2
&& apt-get update
&& apt-get install -y cron
&& cron
&& sleep 2
&& python script/fake_data.py
&& uvicorn src.main:app --host 0.0.0.0 --port 8000"
&& uvicorn nowcasting_api.main:app --reload --host 0.0.0.0 --port 8000"
ports:
- 8000:8000
- "8000:8000"
environment:
- DB_URL=postgresql://postgres:postgres@postgres:5432/postgres
- AUTH0_DOMAIN=nowcasting-dev.eu.auth0.com
- AUTH0_API_AUDIENCE=https://nowcasting-api-eu-auth0.com/
# - AUTH0_DOMAIN=nowcasting-dev.eu.auth0.com
# - AUTH0_API_AUDIENCE=https://nowcasting-api-eu-auth0.com/
volumes:
- ./src/:/app/src
- ./nowcasting_api/:/app/nowcasting_api
- ./script/:/app/script
# - ../nowcasting_datamodel/:/app/nowcasting_datamodel
working_dir: /app
configs:
- source: crontab
target: /etc/cron.d/crontab
mode: 0644
depends_on:
- "postgres"
- "datamodel"
# - "datamodel"

configs:
crontab:
content: |
*/15 * * * * root PYTHONPATH=/app DB_URL=postgresql://postgres:postgres@postgres:5432/postgres /usr/local/bin/python -m script.fake_data > /proc/1/fd/1 2>/proc/1/fd/2
Empty file added nowcasting_api/__init__.py
Empty file.
File renamed without changes.
File renamed without changes.
3 changes: 1 addition & 2 deletions src/database.py → nowcasting_api/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,6 @@
from nowcasting_datamodel.read.read_gsp import get_gsp_yield, get_gsp_yield_by_location
from nowcasting_datamodel.read.read_user import get_user as get_user_from_db
from nowcasting_datamodel.save.update import N_GSP
from sqlalchemy.orm.session import Session

from pydantic_models import (
GSPYield,
GSPYieldGroupByDatetime,
Expand All @@ -43,6 +41,7 @@
convert_forecasts_to_many_datetime_many_generation,
convert_location_sql_to_many_datetime_many_generation,
)
from sqlalchemy.orm.session import Session
from utils import filter_forecast_values, floor_30_minutes_dt, get_start_datetime


Expand Down
File renamed without changes.
69 changes: 48 additions & 21 deletions src/gsp.py → nowcasting_api/gsp.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Get GSP boundary data from eso """

import os
from datetime import UTC, datetime
from typing import List, Optional, Union

import structlog
Expand All @@ -27,7 +28,13 @@
LocationWithGSPYields,
OneDatetimeManyForecastValues,
)
from utils import N_CALLS_PER_HOUR, N_SLOW_CALLS_PER_HOUR, format_datetime, limiter
from utils import (
N_CALLS_PER_HOUR,
N_SLOW_CALLS_PER_HOUR,
floor_30_minutes_dt,
format_datetime,
limiter,
)

GSP_TOTAL = 317

Expand All @@ -54,8 +61,8 @@ def is_fake():
response_model=Union[ManyForecasts, List[OneDatetimeManyForecastValues]],
dependencies=[Depends(get_auth_implicit_scheme())],
)
@cache_response
@limiter.limit(f"{N_SLOW_CALLS_PER_HOUR}/hour")
# @cache_response
# @limiter.limit(f"{N_SLOW_CALLS_PER_HOUR}/hour")
def get_all_available_forecasts(
request: Request,
historic: Optional[bool] = True,
Expand Down Expand Up @@ -93,11 +100,15 @@ def get_all_available_forecasts(
if gsp_ids == "":
gsp_ids = None

if is_fake():
if gsp_ids is None:
gsp_ids = [int(gsp_id) for gsp_id in range(1, GSP_TOTAL)]

make_fake_forecasts(gsp_ids=gsp_ids, session=session)
# if is_fake():
# if gsp_ids is None:
# gsp_ids = [int(gsp_id) for gsp_id in range(1, 10)]
#
# fake_forecasts = make_fake_forecasts(
# gsp_ids=gsp_ids,
# session=session,
# t0_datetime_utc=floor_30_minutes_dt(datetime.now(tz=UTC)),
# )

logger.info(f"Get forecasts for all gsps. The option is {historic=} for user {user}")

Expand All @@ -115,8 +126,12 @@ def get_all_available_forecasts(
creation_utc_limit=creation_limit_utc,
)

logger.info(f"Got forecasts for all gsps. The option is {historic=} for user {user}")

if not compact:
logger.info(f"Normalizing forecasts")
forecasts.normalize()
logger.info(f"Normalized forecasts")

logger.info(
f"Got {len(forecasts.forecasts)} forecasts for all gsps. "
Expand Down Expand Up @@ -154,8 +169,8 @@ def get_forecasts_for_a_specific_gsp_old_route(
) -> Union[Forecast, List[ForecastValue]]:
"""Redirects old API route to new route /v0/solar/GB/gsp/{gsp_id}/forecast"""

if is_fake():
make_fake_forecast(gsp_id=gsp_id, session=session)
# if is_fake():
# make_fake_forecast(gsp_id=gsp_id, session=session)

return get_forecasts_for_a_specific_gsp(
request=request,
Expand Down Expand Up @@ -205,8 +220,12 @@ def get_forecasts_for_a_specific_gsp(
- **creation_utc_limit**: optional, only return forecasts made before this datetime.
returns the latest forecast made 60 minutes before the target time)
"""
if is_fake():
make_fake_forecast(gsp_id=gsp_id, session=session)
# if is_fake():
# make_fake_forecast(
# gsp_id=gsp_id,
# session=session,
# t0_datetime_utc=floor_30_minutes_dt(datetime.now(tz=UTC)),
# )

logger.info(f"Get forecasts for gsp id {gsp_id} forecast of forecast with only values.")
logger.info(f"This is for user {user}")
Expand Down Expand Up @@ -277,11 +296,15 @@ def get_truths_for_all_gsps(
if isinstance(gsp_ids, str):
gsp_ids = [int(gsp_id) for gsp_id in gsp_ids.split(",")]

if is_fake():
if gsp_ids is None:
gsp_ids = [int(gsp_id) for gsp_id in range(1, GSP_TOTAL)]

make_fake_gsp_yields(gsp_ids=gsp_ids, session=session)
# if is_fake():
# if gsp_ids is None:
# gsp_ids = [int(gsp_id) for gsp_id in range(1, GSP_TOTAL)]
#
# make_fake_gsp_yields(
# gsp_ids=gsp_ids,
# session=session,
# t0_datetime_utc=floor_30_minutes_dt(datetime.now(tz=UTC)),
# )

logger.info(f"Get PV Live estimates values for all gsp id and regime {regime} for user {user}")

Expand Down Expand Up @@ -316,8 +339,8 @@ def get_truths_for_a_specific_gsp_old_route(
) -> List[GSPYield]:
"""Redirects old API route to new route /v0/solar/GB/gsp/{gsp_id}/pvlive"""

if is_fake():
make_fake_gsp_yields(gsp_ids=[gsp_id], session=session)
# if is_fake():
# make_fake_gsp_yields(gsp_ids=[gsp_id], session=session)

return get_truths_for_a_specific_gsp(
request=request,
Expand Down Expand Up @@ -364,8 +387,12 @@ def get_truths_for_a_specific_gsp(
If not set, defaults to N_HISTORY_DAYS env var, which if not set defaults to yesterday.
"""

if is_fake():
make_fake_forecast(gsp_id=gsp_id, session=session)
# if is_fake():
# make_fake_forecast(
# gsp_id=gsp_id,
# session=session,
# t0_datetime_utc=floor_30_minutes_dt(datetime.now(tz=UTC)),
# )

logger.info(
f"Get PV Live estimates values for gsp id {gsp_id} " f"and regime {regime} for user {user}"
Expand Down
2 changes: 1 addition & 1 deletion src/main.py → nowcasting_api/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

structlog.configure(
wrapper_class=structlog.make_filtering_bound_logger(
getattr(logging, os.getenv("LOGLEVEL", "INFO"))
getattr(logging, os.getenv("LOGLEVEL", "DEBUG"))
),
processors=[
structlog.processors.EventRenamer("message", replace_by="_event"),
Expand Down
Loading
Loading