Skip to content

Commit 689d4c7

Browse files
committed
use k8s client in commander to get pods instead of warnet.json
1 parent 71e217c commit 689d4c7

File tree

5 files changed

+65
-29
lines changed

5 files changed

+65
-29
lines changed

resources/charts/commander/templates/pod.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ spec:
2323
mountPath: /shared
2424
containers:
2525
- name: {{ .Chart.Name }}
26-
image: python:3.12-slim
26+
image: bitcoindevproject/commander
2727
imagePullPolicy: IfNotPresent
2828
command: ["/bin/sh", "-c"]
2929
args:
@@ -35,3 +35,4 @@ spec:
3535
volumes:
3636
- name: shared-volume
3737
emptyDir: {}
38+
serviceAccountName: {{ include "commander.fullname" . }}
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
apiVersion: v1
2+
kind: ServiceAccount
3+
metadata:
4+
name: {{ include "commander.fullname" . }}
5+
namespace: {{ .Release.Namespace }}
6+
labels:
7+
app.kubernetes.io/name: {{ .Chart.Name }}
8+
---
9+
apiVersion: rbac.authorization.k8s.io/v1
10+
kind: Role
11+
metadata:
12+
name: {{ include "commander.fullname" . }}
13+
namespace: {{ .Release.Namespace }}
14+
labels:
15+
app.kubernetes.io/name: {{ .Chart.Name }}
16+
rules:
17+
- apiGroups: [""]
18+
resources: ["pods"]
19+
verbs: ["get", "list", "watch"]
20+
---
21+
apiVersion: rbac.authorization.k8s.io/v1
22+
kind: RoleBinding
23+
metadata:
24+
name: {{ include "commander.fullname" . }}
25+
namespace: {{ .Release.Namespace }}
26+
labels:
27+
app.kubernetes.io/name: {{ .Chart.Name }}
28+
roleRef:
29+
apiGroup: rbac.authorization.k8s.io
30+
kind: Role
31+
name: {{ include "commander.fullname" . }}
32+
subjects:
33+
- kind: ServiceAccount
34+
name: {{ include "commander.fullname" . }}
35+
namespace: {{ .Release.Namespace }}

resources/images/commander/Dockerfile

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
# Use an official Python runtime as the base image
2+
FROM python:3.12-slim
3+
4+
# Python dependencies
5+
RUN pip install --no-cache-dir kubernetes

resources/scenarios/commander.py

Lines changed: 23 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
import ssl
1212
import sys
1313
import tempfile
14+
from kubernetes import client, config
1415
from typing import Dict
1516

1617
from test_framework.authproxy import AuthServiceProxy
@@ -23,21 +24,35 @@
2324
from test_framework.test_node import TestNode
2425
from test_framework.util import PortSeed, get_rpc_proxy
2526

26-
WARNET_FILE = "/shared/warnet.json"
27-
2827
# hard-coded deterministic lnd credentials
2928
ADMIN_MACAROON_HEX = "0201036c6e6402f801030a1062beabbf2a614b112128afa0c0b4fdd61201301a160a0761646472657373120472656164120577726974651a130a04696e666f120472656164120577726974651a170a08696e766f69636573120472656164120577726974651a210a086d616361726f6f6e120867656e6572617465120472656164120577726974651a160a076d657373616765120472656164120577726974651a170a086f6666636861696e120472656164120577726974651a160a076f6e636861696e120472656164120577726974651a140a057065657273120472656164120577726974651a180a067369676e6572120867656e657261746512047265616400000620b17be53e367290871681055d0de15587f6d1cd47d1248fe2662ae27f62cfbdc6"
3029
# Don't worry about lnd's self-signed certificates
3130
INSECURE_CONTEXT = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
3231
INSECURE_CONTEXT.check_hostname = False
3332
INSECURE_CONTEXT.verify_mode = ssl.CERT_NONE
3433

35-
try:
36-
with open(WARNET_FILE) as file:
37-
WARNET = json.load(file)
38-
except Exception:
39-
WARNET = []
40-
34+
# Figure out what namespace we are in
35+
with open("/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r") as f:
36+
NAMESPACE = f.read().strip()
37+
38+
# Use the in-cluster k8s client to determine what pods we have access to
39+
config.load_incluster_config()
40+
sclient = client.CoreV1Api()
41+
pods = sclient.list_namespaced_pod(namespace=NAMESPACE)
42+
43+
WARNET = []
44+
for pod in pods.items:
45+
if "mission" not in pod.metadata.labels or pod.metadata.labels["mission"] != "tank":
46+
continue
47+
48+
WARNET.append({
49+
"tank": pod.metadata.name,
50+
"chain": pod.metadata.labels["chain"],
51+
"rpc_host": pod.status.pod_ip,
52+
"rpc_port": int(pod.metadata.labels["RPCPort"]),
53+
"rpc_user": "user",
54+
"rpc_password": pod.metadata.labels["rpcpassword"]
55+
})
4156

4257
# Ensure that all RPC calls are made with brand new http connections
4358
def auth_proxy_request(self, method, path, postdata):
@@ -160,7 +175,6 @@ def setup(self):
160175
coveragedir=self.options.coveragedir,
161176
)
162177
node.rpc_connected = True
163-
node.init_peers = tank["init_peers"]
164178

165179
# Tank might not even have an ln node, that's
166180
# not our problem, it'll just 404 if scenario tries

src/warnet/control.py

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -252,24 +252,7 @@ def run(
252252
if additional_args and ("--help" in additional_args or "-h" in additional_args):
253253
return subprocess.run([sys.executable, scenario_path, "--help"])
254254

255-
# Collect tank data for warnet.json
256255
name = f"commander-{scenario_name.replace('_', '')}-{int(time.time())}"
257-
tankpods = get_mission("tank")
258-
tanks = [
259-
{
260-
"tank": tank.metadata.name,
261-
"chain": tank.metadata.labels["chain"],
262-
"rpc_host": tank.status.pod_ip,
263-
"rpc_port": int(tank.metadata.labels["RPCPort"]),
264-
"rpc_user": "user",
265-
"rpc_password": tank.metadata.labels["rpcpassword"],
266-
"init_peers": [],
267-
}
268-
for tank in tankpods
269-
]
270-
271-
# Encode tank data for warnet.json
272-
warnet_data = json.dumps(tanks).encode()
273256

274257
# Create in-memory buffer to store python archive instead of writing to disk
275258
archive_buffer = io.BytesIO()
@@ -343,8 +326,6 @@ def filter(path):
343326
# upload scenario files and network data to the init container
344327
wait_for_init(name, namespace=namespace)
345328
if write_file_to_container(
346-
name, "init", "/shared/warnet.json", warnet_data, namespace=namespace
347-
) and write_file_to_container(
348329
name, "init", "/shared/archive.pyz", archive_data, namespace=namespace
349330
):
350331
print(f"Successfully uploaded scenario data to commander: {scenario_name}")

0 commit comments

Comments
 (0)