Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

move PV over to ecs task #667

Merged
merged 7 commits into from
Oct 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
124 changes: 124 additions & 0 deletions terraform/modules/services/airflow/docker-compose-0.0.3.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
version: "3"


volumes:
data:

services:

source-init:
image: python:3.10-slim
container_name: source-init
entrypoint: >
bash -c "echo 'Making folders'
&& mkdir -p /airflow
&& mkdir -p /airflow/logs
&& mkdir -p /airflow/dags
&& mkdir -p /airflow/plugins
&& echo 'Making read and write for all'
&& chmod -vR 777 /airflow/logs
&& chmod -vR 777 /airflow/dags"
volumes:
- data:/airflow

scheduler:
# depends_on:
# - "airflowinit"
image: apache/airflow:2.6.2
container_name: airflow-scheduler
command: scheduler
restart: on-failure
ports:
- "8793:8793"
environment:
AIRFLOW__CORE__FERNET_KEY: ${FERNET_KEY}
SECRET_KEY: ${SECRET_KEY}
AIRFLOW__CORE__EXECUTOR: "LocalExecutor"
AIRFLOW__CORE__LOAD_EXAMPLES: "False"
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: ${DB_URL_AIRFLOW}
DB_URL: ${DB_URL}
AIRFLOW_UID: "50000"
AIRFLOW__CORE__DAGS_FOLDER: "/airflow/dags"
AIRFLOW__LOGGING__BASE_LOG_FOLDER: "/airflow/logs"
AIRFLOW__LOGGING__LOGGING_LEVEL: $LOGLEVEL
AWS_DEFAULT_REGION: $AWS_DEFAULT_REGION
ENVIRONMENT: $ENVIRONMENT
ECS_SUBNET: $ECS_SUBNET
ECS_SECURITY_GROUP: $ECS_SECURITY_GROUP
user: "${AIRFLOW_UID:-50000}:0"
volumes:
- data:/airflow


webserver:
image: apache/airflow:2.6.2
container_name: airflow-webserver
command: webserver -w 4
# depends_on:
# - "airflowinit"
ports:
- 80:8080
restart: always
environment:
AIRFLOW__CORE__FERNET_KEY: ${FERNET_KEY}
SECRET_KEY: ${SECRET_KEY}
AIRFLOW__WEBSERVER__SECRET_KEY: ${SECRET_KEY}
AIRFLOW__CORE__EXECUTOR: "LocalExecutor"
AIRFLOW__CORE__LOAD_EXAMPLES: "False"
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: ${DB_URL_AIRFLOW}
FORWARDED_ALLOW_IPS: "*"
AIRFLOW__WEBSERVER__WORKER_CLASS: "gevent"
_AIRFLOW_PATCH_GEVENT: "1"
AIRFLOW_UID: "50000"
AIRFLOW__CORE__DAGS_FOLDER: "/airflow/dags"
AIRFLOW__LOGGING__BASE_LOG_FOLDER: "/airflow/logs"
AWS_DEFAULT_REGION: $AWS_DEFAULT_REGION
ENVIRONMENT: $ENVIRONMENT
ECS_SUBNET: $ECS_SUBNET
ECS_SECURITY_GROUP: $ECS_SECURITY_GROUP
user: "${AIRFLOW_UID:-50000}:0"
volumes:
- data:/airflow


# only need to run this once
# airflowinit:
# image: apache/airflow:2.6.2
# container_name: airflow-init
# environment:
# AIRFLOW__CORE__FERNET_KEY: ${FERNET_KEY}
# SECRET_KEY: ${SECRET_KEY}
# AIRFLOW__WEBSERVER__SECRET_KEY: ${SECRET_KEY}
# AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: ${DB_URL}
# _AIRFLOW_DB_UPGRADE: 'True'
# _AIRFLOW_WWW_USER_CREATE: 'True'
# _AIRFLOW_WWW_USER_USERNAME: 'airflow'
# _AIRFLOW_WWW_USER_PASSWORD: ${PASSWORD}
# AIRFLOW__CORE__DAGS_FOLDER: "/airflow/dags"
# AIRFLOW__LOGGING__BASE_LOG_FOLDER: "/airflow/logs"
# AIRFLOW_UID: "50000"
# user: "${AIRFLOW_UID:-50000}:0"
# command: >
# bash -c "pip install apache-airflow[amazon]
# && mkdir -p /airflow/logs /airflow/dags /airflow/plugins
# && chmod -v 777 /airflow/{logs,dags}
# && airflow db init"
# volumes:
# - data:/airflow


sync-s3:
image: amazon/aws-cli
container_name: sync-s3
entrypoint: >
bash -c "while true; aws s3 sync --exact-timestamps --delete 's3://ocf-airflow-${ENVIRONMENT}-bucket/dags' '/airflow/dags';
mkdir -p /airflow/{logs,dags};
chmod -R 777 /airflow/{logs,dags}; do sleep 2; done;"
volumes:
- data:/airflow
environment:
AWS_DEFAULT_REGION: $AWS_DEFAULT_REGION
ENVIRONMENT: $ENVIRONMENT
restart: always
# depends_on:
# - "airflowinit"
8 changes: 0 additions & 8 deletions terraform/modules/services/pv/README.md

This file was deleted.

12 changes: 0 additions & 12 deletions terraform/modules/services/pv/cloudwatch.tf

This file was deleted.

70 changes: 0 additions & 70 deletions terraform/modules/services/pv/ecs.tf

This file was deleted.

18 changes: 0 additions & 18 deletions terraform/modules/services/pv/iam.tf

This file was deleted.

19 changes: 0 additions & 19 deletions terraform/modules/services/pv/secrets.tf

This file was deleted.

43 changes: 0 additions & 43 deletions terraform/modules/services/pv/variables.tf

This file was deleted.

41 changes: 31 additions & 10 deletions terraform/nowcasting/development/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,6 @@ resource "aws_secretsmanager_secret" "pv_consumer_secret" {
}




# 3.2
module "nwp-national" {
source = "../../modules/services/ecs_task"
Expand Down Expand Up @@ -329,17 +327,40 @@ module "sat_clean_up" {

# 3.6
module "pv" {
source = "../../modules/services/pv"

region = var.region
environment = local.environment
public_subnet_ids = module.networking.public_subnet_ids
database_secret_forecast = module.database.forecast-database-secret
docker_version_ss = var.pv_ss_version
iam-policy-rds-read-secret_forecast = module.database.iam-policy-forecast-db-read
source = "../../modules/services/ecs_task"

ecs-task_name = "pv"
ecs-task_type = "consumer"
ecs-task_execution_role_arn = module.ecs.ecs_task_execution_role_arn
ecs-task_size = {
cpu = 256
memory = 512
}

aws-region = var.region
aws-environment = local.environment

s3-buckets = []

container-env_vars = [
{ "name" : "SENTRY_DSN", "value" : var.sentry_dsn },
{ "name" : "ENVIRONMENT", "value" : local.environment },
{ "name" : "LOGLEVEL", "value" : "INFO"},
{ "name" : "PROVIDER", "value" : "solar_sheffield_passiv"},
]
container-secret_vars = [
{secret_policy_arn: module.pvsite_database.secret.arn,
values: ["DB_URL"]},
{secret_policy_arn: aws_secretsmanager_secret.pv_consumer_secret.arn,
values: ["SS_USER_ID", "SS_KEY", "SS_URL"]}
]
container-tag = var.pv_ss_version
container-name = "openclimatefix/pvconsumer"
container-registry = "docker.io"
container-command = []
}


# 3.7
module "gsp-consumer" {
source = "../../modules/services/ecs_task"
Expand Down