Skip to content

Commit a1dd945

Browse files
authored
install logging as part of deploy (#566)
1 parent 866fe39 commit a1dd945

File tree

6 files changed

+44
-32
lines changed

6 files changed

+44
-32
lines changed

resources/networks/6_node_bitcoin/node-defaults.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
chain: regtest
22

33
collectLogs: true
4-
metricsExport: false
4+
metricsExport: true
55

66
resources: {}
77
# We usually recommend not to specify default resources and to leave this as a conscious

resources/scripts/install_logging.sh

Lines changed: 0 additions & 14 deletions
This file was deleted.

src/warnet/deploy.py

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
DEFAULTS_NAMESPACE_FILE,
1111
FORK_OBSERVER_CHART,
1212
HELM_COMMAND,
13+
LOGGING_HELM_COMMANDS,
1314
NAMESPACES_CHART_LOCATION,
1415
NAMESPACES_FILE,
1516
NETWORK_FILE,
@@ -41,6 +42,7 @@ def deploy(directory, debug):
4142
directory = Path(directory)
4243

4344
if (directory / NETWORK_FILE).exists():
45+
deploy_logging_stack(directory, debug)
4446
deploy_network(directory, debug)
4547
deploy_fork_observer(directory, debug)
4648
elif (directory / NAMESPACES_FILE).exists():
@@ -51,6 +53,43 @@ def deploy(directory, debug):
5153
)
5254

5355

56+
def check_logging_required(directory: Path):
57+
# check if node-defaults has logging or metrics enabled
58+
default_file_path = directory / DEFAULTS_FILE
59+
with default_file_path.open() as f:
60+
default_file = yaml.safe_load(f)
61+
if default_file.get("collectLogs", False):
62+
return True
63+
if default_file.get("metricsExport", False):
64+
return True
65+
66+
# check to see if individual nodes have logging enabled
67+
network_file_path = directory / NETWORK_FILE
68+
with network_file_path.open() as f:
69+
network_file = yaml.safe_load(f)
70+
nodes = network_file.get("nodes", [])
71+
for node in nodes:
72+
if node.get("collectLogs", False):
73+
return True
74+
if node.get("metricsExport", False):
75+
return True
76+
77+
return False
78+
79+
80+
def deploy_logging_stack(directory: Path, debug: bool):
81+
if not check_logging_required(directory):
82+
return
83+
84+
click.echo("Found collectLogs or metricsExport in network definition, Deploying logging stack")
85+
86+
for command in LOGGING_HELM_COMMANDS:
87+
if not stream_command(command):
88+
print(f"Failed to run Helm command: {command}")
89+
return False
90+
return True
91+
92+
5493
def deploy_fork_observer(directory: Path, debug: bool):
5594
network_file_path = directory / NETWORK_FILE
5695
with network_file_path.open() as f:

src/warnet/network.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -6,20 +6,10 @@
66

77
from .bitcoin import _rpc
88
from .constants import (
9-
LOGGING_HELM_COMMANDS,
109
NETWORK_DIR,
1110
SCENARIOS_DIR,
1211
)
1312
from .k8s import get_mission
14-
from .process import stream_command
15-
16-
17-
def setup_logging_helm() -> bool:
18-
for command in LOGGING_HELM_COMMANDS:
19-
if not stream_command(command):
20-
print(f"Failed to run Helm command: {command}")
21-
return False
22-
return True
2313

2414

2515
def copy_defaults(directory: Path, target_subdir: str, source_path: Path, exclude_list: list[str]):

test/data/logging/node-defaults.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
collectLogs: true
12
image:
23
repository: bitcoindevproject/bitcoin
34
pullPolicy: IfNotPresent

test/logging_test.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import threading
66
from datetime import datetime
77
from pathlib import Path
8-
from subprocess import PIPE, Popen, run
8+
from subprocess import PIPE, Popen
99

1010
import requests
1111
from test_base import TestBase
@@ -22,8 +22,8 @@ def __init__(self):
2222

2323
def run_test(self):
2424
try:
25-
self.start_logging()
2625
self.setup_network()
26+
self.start_logging()
2727
self.test_prometheus_and_grafana()
2828
finally:
2929
if self.connect_logging_process is not None:
@@ -32,10 +32,6 @@ def run_test(self):
3232
self.cleanup()
3333

3434
def start_logging(self):
35-
self.log.info("Running install_logging.sh")
36-
# Block until complete
37-
run([f"{self.scripts_dir / 'install_logging.sh'}"])
38-
self.log.info("Running connect_logging.sh")
3935
# Stays alive in background
4036
self.connect_logging_process = Popen(
4137
[f"{self.scripts_dir / 'connect_logging.sh'}"],
@@ -51,13 +47,13 @@ def start_logging(self):
5147
)
5248
self.connect_logging_thread.daemon = True
5349
self.connect_logging_thread.start()
50+
self.wait_for_endpoint_ready()
5451

5552
def setup_network(self):
5653
self.log.info("Setting up network")
5754
self.log.info(self.warnet(f"deploy {self.network_dir}"))
5855
self.wait_for_all_tanks_status(target="running", timeout=10 * 60)
5956
self.wait_for_all_edges()
60-
self.wait_for_endpoint_ready()
6157

6258
def wait_for_endpoint_ready(self):
6359
self.log.info("Waiting for Grafana to be ready to receive API calls...")

0 commit comments

Comments
 (0)