Skip to content

Commit

Permalink
use k8s client in commander to get pods instead of warnet.json
Browse files Browse the repository at this point in the history
  • Loading branch information
pinheadmz committed Nov 15, 2024
1 parent 71e217c commit 131689f
Show file tree
Hide file tree
Showing 5 changed files with 67 additions and 28 deletions.
3 changes: 2 additions & 1 deletion resources/charts/commander/templates/pod.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ spec:
mountPath: /shared
containers:
- name: {{ .Chart.Name }}
image: python:3.12-slim
image: bitcoindevproject/commander
imagePullPolicy: IfNotPresent
command: ["/bin/sh", "-c"]
args:
Expand All @@ -35,3 +35,4 @@ spec:
volumes:
- name: shared-volume
emptyDir: {}
serviceAccountName: {{ include "commander.fullname" . }}
35 changes: 35 additions & 0 deletions resources/charts/commander/templates/rbac.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ include "commander.fullname" . }}
namespace: {{ .Release.Namespace }}
labels:
app.kubernetes.io/name: {{ .Chart.Name }}
---
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
name: {{ include "commander.fullname" . }}
namespace: {{ .Release.Namespace }}
labels:
app.kubernetes.io/name: {{ .Chart.Name }}
rules:
- apiGroups: [""]
resources: ["pods"]
verbs: ["get", "list", "watch"]
---
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: {{ include "commander.fullname" . }}
namespace: {{ .Release.Namespace }}
labels:
app.kubernetes.io/name: {{ .Chart.Name }}
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: {{ include "commander.fullname" . }}
subjects:
- kind: ServiceAccount
name: {{ include "commander.fullname" . }}
namespace: {{ .Release.Namespace }}
5 changes: 5 additions & 0 deletions resources/images/commander/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Use an official Python runtime as the base image
FROM python:3.12-slim

# Python dependencies
RUN pip install --no-cache-dir kubernetes
33 changes: 25 additions & 8 deletions resources/scenarios/commander.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import tempfile
from typing import Dict

from kubernetes import client, config
from test_framework.authproxy import AuthServiceProxy
from test_framework.p2p import NetworkThread
from test_framework.test_framework import (
Expand All @@ -23,20 +24,37 @@
from test_framework.test_node import TestNode
from test_framework.util import PortSeed, get_rpc_proxy

WARNET_FILE = "/shared/warnet.json"

# hard-coded deterministic lnd credentials
ADMIN_MACAROON_HEX = "0201036c6e6402f801030a1062beabbf2a614b112128afa0c0b4fdd61201301a160a0761646472657373120472656164120577726974651a130a04696e666f120472656164120577726974651a170a08696e766f69636573120472656164120577726974651a210a086d616361726f6f6e120867656e6572617465120472656164120577726974651a160a076d657373616765120472656164120577726974651a170a086f6666636861696e120472656164120577726974651a160a076f6e636861696e120472656164120577726974651a140a057065657273120472656164120577726974651a180a067369676e6572120867656e657261746512047265616400000620b17be53e367290871681055d0de15587f6d1cd47d1248fe2662ae27f62cfbdc6"
# Don't worry about lnd's self-signed certificates
INSECURE_CONTEXT = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
INSECURE_CONTEXT.check_hostname = False
INSECURE_CONTEXT.verify_mode = ssl.CERT_NONE

try:
with open(WARNET_FILE) as file:
WARNET = json.load(file)
except Exception:
WARNET = []
# Figure out what namespace we are in
with open("/var/run/secrets/kubernetes.io/serviceaccount/namespace") as f:
NAMESPACE = f.read().strip()

# Use the in-cluster k8s client to determine what pods we have access to
config.load_incluster_config()
sclient = client.CoreV1Api()
pods = sclient.list_namespaced_pod(namespace=NAMESPACE)

WARNET = []
for pod in pods.items:
if "mission" not in pod.metadata.labels or pod.metadata.labels["mission"] != "tank":
continue

WARNET.append(
{
"tank": pod.metadata.name,
"chain": pod.metadata.labels["chain"],
"rpc_host": pod.status.pod_ip,
"rpc_port": int(pod.metadata.labels["RPCPort"]),
"rpc_user": "user",
"rpc_password": pod.metadata.labels["rpcpassword"],
}
)


# Ensure that all RPC calls are made with brand new http connections
Expand Down Expand Up @@ -160,7 +178,6 @@ def setup(self):
coveragedir=self.options.coveragedir,
)
node.rpc_connected = True
node.init_peers = tank["init_peers"]

# Tank might not even have an ln node, that's
# not our problem, it'll just 404 if scenario tries
Expand Down
19 changes: 0 additions & 19 deletions src/warnet/control.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,24 +252,7 @@ def run(
if additional_args and ("--help" in additional_args or "-h" in additional_args):
return subprocess.run([sys.executable, scenario_path, "--help"])

# Collect tank data for warnet.json
name = f"commander-{scenario_name.replace('_', '')}-{int(time.time())}"
tankpods = get_mission("tank")
tanks = [
{
"tank": tank.metadata.name,
"chain": tank.metadata.labels["chain"],
"rpc_host": tank.status.pod_ip,
"rpc_port": int(tank.metadata.labels["RPCPort"]),
"rpc_user": "user",
"rpc_password": tank.metadata.labels["rpcpassword"],
"init_peers": [],
}
for tank in tankpods
]

# Encode tank data for warnet.json
warnet_data = json.dumps(tanks).encode()

# Create in-memory buffer to store python archive instead of writing to disk
archive_buffer = io.BytesIO()
Expand Down Expand Up @@ -343,8 +326,6 @@ def filter(path):
# upload scenario files and network data to the init container
wait_for_init(name, namespace=namespace)
if write_file_to_container(
name, "init", "/shared/warnet.json", warnet_data, namespace=namespace
) and write_file_to_container(
name, "init", "/shared/archive.pyz", archive_data, namespace=namespace
):
print(f"Successfully uploaded scenario data to commander: {scenario_name}")
Expand Down

0 comments on commit 131689f

Please sign in to comment.