Skip to content

Commit

Permalink
install logging as part of deploy (#566)
Browse files Browse the repository at this point in the history
  • Loading branch information
m3dwards authored Sep 11, 2024
1 parent 866fe39 commit a1dd945
Show file tree
Hide file tree
Showing 6 changed files with 44 additions and 32 deletions.
2 changes: 1 addition & 1 deletion resources/networks/6_node_bitcoin/node-defaults.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
chain: regtest

collectLogs: true
metricsExport: false
metricsExport: true

resources: {}
# We usually recommend not to specify default resources and to leave this as a conscious
Expand Down
14 changes: 0 additions & 14 deletions resources/scripts/install_logging.sh

This file was deleted.

39 changes: 39 additions & 0 deletions src/warnet/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
DEFAULTS_NAMESPACE_FILE,
FORK_OBSERVER_CHART,
HELM_COMMAND,
LOGGING_HELM_COMMANDS,
NAMESPACES_CHART_LOCATION,
NAMESPACES_FILE,
NETWORK_FILE,
Expand Down Expand Up @@ -41,6 +42,7 @@ def deploy(directory, debug):
directory = Path(directory)

if (directory / NETWORK_FILE).exists():
deploy_logging_stack(directory, debug)
deploy_network(directory, debug)
deploy_fork_observer(directory, debug)
elif (directory / NAMESPACES_FILE).exists():
Expand All @@ -51,6 +53,43 @@ def deploy(directory, debug):
)


def check_logging_required(directory: Path):
# check if node-defaults has logging or metrics enabled
default_file_path = directory / DEFAULTS_FILE
with default_file_path.open() as f:
default_file = yaml.safe_load(f)
if default_file.get("collectLogs", False):
return True
if default_file.get("metricsExport", False):
return True

# check to see if individual nodes have logging enabled
network_file_path = directory / NETWORK_FILE
with network_file_path.open() as f:
network_file = yaml.safe_load(f)
nodes = network_file.get("nodes", [])
for node in nodes:
if node.get("collectLogs", False):
return True
if node.get("metricsExport", False):
return True

return False


def deploy_logging_stack(directory: Path, debug: bool):
if not check_logging_required(directory):
return

click.echo("Found collectLogs or metricsExport in network definition, Deploying logging stack")

for command in LOGGING_HELM_COMMANDS:
if not stream_command(command):
print(f"Failed to run Helm command: {command}")
return False
return True


def deploy_fork_observer(directory: Path, debug: bool):
network_file_path = directory / NETWORK_FILE
with network_file_path.open() as f:
Expand Down
10 changes: 0 additions & 10 deletions src/warnet/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,10 @@

from .bitcoin import _rpc
from .constants import (
LOGGING_HELM_COMMANDS,
NETWORK_DIR,
SCENARIOS_DIR,
)
from .k8s import get_mission
from .process import stream_command


def setup_logging_helm() -> bool:
for command in LOGGING_HELM_COMMANDS:
if not stream_command(command):
print(f"Failed to run Helm command: {command}")
return False
return True


def copy_defaults(directory: Path, target_subdir: str, source_path: Path, exclude_list: list[str]):
Expand Down
1 change: 1 addition & 0 deletions test/data/logging/node-defaults.yaml
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
collectLogs: true
image:
repository: bitcoindevproject/bitcoin
pullPolicy: IfNotPresent
Expand Down
10 changes: 3 additions & 7 deletions test/logging_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import threading
from datetime import datetime
from pathlib import Path
from subprocess import PIPE, Popen, run
from subprocess import PIPE, Popen

import requests
from test_base import TestBase
Expand All @@ -22,8 +22,8 @@ def __init__(self):

def run_test(self):
try:
self.start_logging()
self.setup_network()
self.start_logging()
self.test_prometheus_and_grafana()
finally:
if self.connect_logging_process is not None:
Expand All @@ -32,10 +32,6 @@ def run_test(self):
self.cleanup()

def start_logging(self):
self.log.info("Running install_logging.sh")
# Block until complete
run([f"{self.scripts_dir / 'install_logging.sh'}"])
self.log.info("Running connect_logging.sh")
# Stays alive in background
self.connect_logging_process = Popen(
[f"{self.scripts_dir / 'connect_logging.sh'}"],
Expand All @@ -51,13 +47,13 @@ def start_logging(self):
)
self.connect_logging_thread.daemon = True
self.connect_logging_thread.start()
self.wait_for_endpoint_ready()

def setup_network(self):
self.log.info("Setting up network")
self.log.info(self.warnet(f"deploy {self.network_dir}"))
self.wait_for_all_tanks_status(target="running", timeout=10 * 60)
self.wait_for_all_edges()
self.wait_for_endpoint_ready()

def wait_for_endpoint_ready(self):
self.log.info("Waiting for Grafana to be ready to receive API calls...")
Expand Down

0 comments on commit a1dd945

Please sign in to comment.