Skip to content

Commit

Permalink
fixPygeoapi Conn Issue (#39)
Browse files Browse the repository at this point in the history
* fixPygeoapiConnIssue

* Revert enable by default; fix env in ci/cd

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: Colton <cloftus@inst-builder-debian-12-build-build-psbhv.us-central1-a.c.gce-image-builder.internal>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
3 people authored Jan 18, 2025
1 parent 57b883e commit 0b93b44
Show file tree
Hide file tree
Showing 9 changed files with 18 additions and 17 deletions.
3 changes: 2 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
### This file is an example, you should copy it to .env
### It is not used or read anywhere in the application
### It is not used or read anywhere in the application besides for ci/cd

## Uncomment to switch the value of the following variables
OWDP_URL=http://localhost:8999
Expand All @@ -8,3 +8,4 @@ OWDP_URL=http://localhost:8999
## Integrations
## Used to send errors to Slack
SLACK_BOT_TOKEN=""
API_BACKEND_URL="http://localhost:8999/FROST-Server/v1.1"
1 change: 1 addition & 0 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ services:
restart: always
environment:
- SLACK_BOT_TOKEN
- API_BACKEND_URL=http://owdp-frost:8080/FROST-Server/v1.1
deploy:
resources:
limits:
Expand Down
3 changes: 2 additions & 1 deletion docker/container_monitor/eventHandler.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ func watchAndHandleEvents(docker *client.Client, slackAPI SlackClient, tailLengt
// Retrieve and send container logs
logs, err := getContainerLogs(docker, containerID)
if err != nil {
logger.Fatalf("Failed to retrieve logs for container %s: %v\n", containerName, err)
logger.Print("Failed to retrieve logs for container %s: %v\n", containerName, err)
return
}

logMsg := fmt.Sprintf("Last %d lines of logs for container `%s`:\n```%s```", tailLength, containerName, logs)
Expand Down
2 changes: 1 addition & 1 deletion docker/frost/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,4 @@ ENV plugins_coreModel_idType_location=STRING
ENV plugins_coreModel_idType_datastream=STRING
ENV mqtt_Enabled=false
# log after 1000ms or 1 second queries
ENV persistence.slowQueryThreshold=1000
ENV persistence.slowQueryThreshold=1000
5 changes: 4 additions & 1 deletion makefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,12 @@ caddy:
cp ./Caddyfile /etc/caddy/Caddyfile
sudo systemctl restart caddy

up:
composeUp:
docker compose --profile production up -d

composeBuild:
docker compose --profile production build

# get rid of the sensorthings db, mainly for testing purposes
wipedb:
docker volume rm oregonwaterdataportal-etl_postgis_volume
Expand Down
7 changes: 4 additions & 3 deletions userCode/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
# =================================================================

from userCode.util import get_env
import os

API_BACKEND_URL = get_env(
"API_BACKEND_URL", fallback="http://localhost:8999/FROST-Server/v1.1"
)
API_BACKEND_URL = get_env("API_BACKEND_URL")

RUNNING_AS_A_TEST_NOT_IN_PROD = "PYTEST_CURRENT_TEST" in os.environ
4 changes: 1 addition & 3 deletions userCode/odwr/dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,10 @@
schedule,
)
import httpx
import os
import requests
from typing import List, Optional, Tuple

from userCode.env import API_BACKEND_URL
from userCode.env import API_BACKEND_URL, RUNNING_AS_A_TEST_NOT_IN_PROD
from userCode.odwr.helper_classes import (
BatchHelper,
get_datastream_time_range,
Expand Down Expand Up @@ -199,7 +198,6 @@ async def fetch_obs(datastream: Datastream) -> List[Observation]:
# If we are running this as a test, we want to keep track of which observations we have seen so we can detect duplicates
# We don't want to cache every single observation unless we are running as a test since the db will catch duplicates as well
# This is a further check to be thorough
RUNNING_AS_A_TEST_NOT_IN_PROD = "PYTEST_CURRENT_TEST" in os.environ
if RUNNING_AS_A_TEST_NOT_IN_PROD:
key = (datastream.iotid, date)
assert (
Expand Down
5 changes: 1 addition & 4 deletions userCode/odwr/lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
# =================================================================

import csv
import os
from dagster import get_dagster_logger
import datetime
import io
Expand All @@ -20,7 +19,7 @@


from userCode.cache import ShelveCache
from userCode.env import API_BACKEND_URL
from userCode.env import API_BACKEND_URL, RUNNING_AS_A_TEST_NOT_IN_PROD
from userCode.odwr.types import (
BASE_OREGON_URL,
POTENTIAL_DATASTREAMS,
Expand Down Expand Up @@ -159,8 +158,6 @@ def download_oregon_tsv(
"""Get the tsv data for a specific dataset for a specific station in a given date range"""
tsv_url = generate_oregon_tsv_url(dataset, station_nbr, start_date, end_date)

RUNNING_AS_A_TEST_NOT_IN_PROD = "PYTEST_CURRENT_TEST" in os.environ

if RUNNING_AS_A_TEST_NOT_IN_PROD:
# If we are in a test, we want to use the cache to avoid making too many requests while testing
# But in production, we always want to fetch and not cache anything to avoid extra data
Expand Down
5 changes: 2 additions & 3 deletions userCode/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,11 @@
from dagster import RunFailureSensorContext, get_dagster_logger
import hashlib
import os
from typing import Any


def get_env(key: str, fallback: Any = None) -> str:
def get_env(key: str) -> str:
"""Fetch environment variable"""
val = os.environ.get(key, fallback)
val = os.environ.get(key)
if val is None:
raise Exception(f"Missing ENV var: {key}")

Expand Down

0 comments on commit 0b93b44

Please sign in to comment.