Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Experiment views #15

Merged
merged 4 commits into from
Apr 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .github/workflows/pull_request_closed.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,9 @@ jobs:
-e GH_TOKEN=${GH_TOKEN} \
-w /workspace \
${ADMIN_IMAGE} \
src/uno/scripts/ci-admin pr-closed \
src/uno/scripts/ci-admin \
-c ${{ github.run_id }} \
pr-closed \
-r ${{ github.repository }} \
-N ${{ github.event_name == 'pull_request' && github.event.pull_request.number || inputs.pr-number }} \
${{ (github.event_name == 'pull_request' && github.event.pull_request.merged || inputs.pr-merged) && '-m' || '' }}
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/release_cleanup.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,9 @@ jobs:
-e GH_TOKEN=${GH_TOKEN} \
-w /workspace \
${ADMIN_IMAGE} \
src/uno/scripts/ci-admin nightly-cleanup \
src/uno/scripts/ci-admin \
-c ${{ github.run_id }} \
nightly-cleanup \
-r ${{ github.repository }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
Expand Down
71 changes: 53 additions & 18 deletions scripts/ci-admin
Original file line number Diff line number Diff line change
Expand Up @@ -232,19 +232,28 @@ def fzf_filter(
###############################################################################
# Make a GH API call, filter the result with jq, and parse the resulting JSON
###############################################################################
def gh_api(url: str, jq_filter: str, default: object = None) -> dict | list | None:
def gh_api(
url: str,
jq_filter: str | None = None,
default: object = None,
noop: bool = False,
query: bool = True,
) -> dict | list | None:
cmd = [
"gh api "
f"-H 'Accept: {GitHubApiAccept}' "
f"-H 'X-GitHub-Api-Version: {GitHubApiVersion}' "
f"{url} | "
f"jq '{jq_filter}'"
f"{url}"
]
if jq_filter:
cmd[0] += f" | jq '{jq_filter}'"
log.command(cmd, shell=True, check=True)
result = subprocess.run(cmd, shell=True, check=True, stdout=subprocess.PIPE)
if not result.stdout:
return default
return json.loads(result.stdout.decode())
if not noop or query:
result = subprocess.run(cmd, shell=True, check=True, stdout=subprocess.PIPE)
if not result.stdout:
return default
return json.loads(result.stdout.decode())
return default


###############################################################################
Expand Down Expand Up @@ -277,14 +286,15 @@ class DataObject:
return obj_cls(*build_args)

@classmethod
def parse(cls, obj_cls: type[NamedTuple], package_line: str) -> object | None:
def parse(cls, obj_cls: type[NamedTuple], obj_line: str) -> object | None:
cls.data_object(obj_cls)
try:
parse_re = cls.Parsers[obj_cls]
fields = parse_re.findall(package_line)[0]
fields = parse_re.findall(obj_line)
fields = fields[0]
return cls.build(obj_cls, *fields)
except Exception:
log.error("failed to parse {}: '{}'", obj_cls.__qualname__, package_line)
log.error("failed to parse {}: '{}'", obj_cls.__qualname__, obj_line)
traceback.print_exc()
return None

Expand Down Expand Up @@ -327,6 +337,8 @@ def build(obj_cls: type[NamedTuple], *args) -> object | None:
# GitHub Workflow Run data object (parsed from query result)
###############################################################################
class WorkflowRun(NamedTuple):
repo: str
head_repo: str
id: int
created_at: datetime
updated_at: datetime
Expand All @@ -335,7 +347,8 @@ class WorkflowRun(NamedTuple):
outcome: str
name: str

DatetimeFields = [1, 2]
Current = ""
DatetimeFields = [3, 4]
SelectQuery = """\
def symbol:
sub(""; "")? // "NULL" |
Expand All @@ -347,6 +360,8 @@ def symbol:

[ .workflow_runs[]
| [
.repository.full_name,
.head_repository.full_name,
.id,
.created_at,
.updated_at,
Expand All @@ -361,6 +376,17 @@ def symbol:
def __str__(self) -> str:
return DataObject.str(self)

@property
def incomplete(self) -> bool:
return self.status in ["in_progress", "queued", "requested", "waiting", "pending"]

###############################################################################
#
###############################################################################
def cancel(self, noop: bool = False) -> None:
url = f"/repos/{self.repo}/actions/runs/{self.id}/cancel"
gh_api(url, query=False, noop=noop)

###############################################################################
# Query the list of workflow runs from a repository.
# If no filter is specified, present the user with `fzf` to select targets.
Expand All @@ -375,7 +401,7 @@ def symbol:
repo: str,
filter: str | None = None,
input: str | None = None,
runs: list[str] | None = None,
runs: list["WorkflowRun"] | None = None,
prompt: str | None = None,
noninteractive: bool = False,
) -> list["WorkflowRun"]:
Expand All @@ -386,7 +412,7 @@ def symbol:
for sline in [line.decode().strip()]
if sline
for run in [parse(cls, sline)]
if run
if run and run.id != cls.Current
]

if runs:
Expand All @@ -408,7 +434,9 @@ def symbol:
if result.stdout:
result = result.stdout.decode()
run_entries = json.loads(result)
target_runs.extend(DataObject.build(cls, *entry) for entry in run_entries)
target_runs.extend(
DataObject.build(cls, *entry) for entry in run_entries if entry[2] != cls.Current
)
if prompt is None:
prompt = "available runs"
sorted_runs = partial(sorted, key=lambda r: r.created_at)
Expand All @@ -427,20 +455,22 @@ def symbol:
filter: str | None = None,
noop: bool = False,
input: str | None = None,
runs: list[str] | None = None,
runs: list["WorkflowRun"] | None = None,
prompt: str | None = None,
) -> list["WorkflowRun"]:
def _delete_run(run: WorkflowRun):
if run.outcome == "NULL":
run.cancel(noop=noop)
delete_cmd = ["gh", "api", "-X", "DELETE", f"/repos/{repo}/actions/runs/{run.id}"]
log.command(delete_cmd, check=True)
subprocess.run(delete_cmd, check=True)
if not noop:
subprocess.run(delete_cmd, check=True)

deleted = []
if prompt is None:
prompt = "runs to delete"
for run in cls.select(repo, filter, input, runs, prompt=prompt):
if not noop:
_delete_run(run)
_delete_run(run)
deleted.append(run)
if noop:
log.warning("[{}] {} runs selected but not actually deleted", repo, len(deleted))
Expand Down Expand Up @@ -765,6 +795,9 @@ def nightly_cleanup(repo: str, noop: bool = False) -> None:
for run in runs:
if run in preserved:
continue
if run.incomplete:
log.warning("[{}] not removing incomplete run: {}", repo, run)
continue
removed.append(run)
return runs

Expand Down Expand Up @@ -822,6 +855,8 @@ def define_parser() -> argparse.ArgumentParser:
"-i", "--interactive", help="Run in interactive mode.", default=False, action="store_true"
)

parser.add_argument("-c", "--current", help="Calling workflow id.", default=None)

subparsers = parser.add_subparsers(dest="action")

parser_pr_closed = subparsers.add_parser(
Expand Down
84 changes: 13 additions & 71 deletions test/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@
import subprocess
from typing import Generator, Callable

from uno.test.integration import Experiment, Host, HostRole, Network
from uno.core.time import Timer
from uno.test.integration import Experiment, Host, Network


@pytest.fixture
Expand All @@ -30,92 +29,52 @@ def experiment(experiment_loader: Callable[[], None]) -> Generator[Experiment, N
def the_hosts(experiment: Experiment) -> list[Host]:
if experiment is None:
return []
return sorted(
(h for h in experiment.hosts if h.role == HostRole.HOST), key=lambda h: h.container_name
)
return experiment.host_hosts


@pytest.fixture
def the_routers(experiment: Experiment) -> list[Host]:
if experiment is None:
return []
return sorted(
(h for h in experiment.hosts if h.role == HostRole.ROUTER), key=lambda h: h.container_name
)
return experiment.router_hosts


@pytest.fixture
def the_cells(experiment: Experiment) -> list[Host]:
if experiment is None:
return []
return sorted(
(h for h in experiment.hosts if h.role == HostRole.CELL), key=lambda h: h.container_name
)
return experiment.cell_hosts


@pytest.fixture
def the_registry(experiment: Experiment) -> list[Host]:
def the_registry(experiment: Experiment) -> Host:
if experiment is None:
return []
return next(h for h in experiment.hosts if h.role == HostRole.REGISTRY)
return experiment.registry_host


@pytest.fixture
def the_particles(experiment: Experiment) -> list[Host]:
if experiment is None:
return []
return sorted(
(h for h in experiment.hosts if h.role == HostRole.PARTICLE), key=lambda h: h.container_name
)
return experiment.particle_hosts


@pytest.fixture
def the_fully_routed_cell_networks(
experiment: Experiment, the_cells: list[Host]
) -> Generator[set[Network], None, None]:
def the_fully_routed_cell_networks(experiment: Experiment) -> Generator[set[Network], None, None]:
if experiment is None:
yield set()
return

def _check_all_ready() -> bool:
if experiment is None:
return

for cell in the_cells:
if not cell.local_router_ready:
return False
return True

timer = Timer(
experiment.config["uvn_fully_routed_timeout"],
0.5,
_check_all_ready,
experiment.log,
"waiting for UVN to become consistent",
"UVN not consistent yet",
"UVN fully routed",
"UVN failed to reach consistency",
)
timer.wait()
yield experiment.uvn_networks
else:
experiment.wait_for_fully_routed_networks()
yield experiment.uvn_networks


@pytest.fixture
def the_agents(experiment: Experiment) -> Generator[dict[Host, subprocess.Popen], None, None]:
if experiment is None:
yield {}
return

import contextlib

with contextlib.ExitStack() as stack:
agents = {}
for host in experiment.hosts:
if host.role != HostRole.CELL:
continue
# agents.append(host.uno_agent())
agents[host] = stack.enter_context(host.uno_agent())
yield agents
yield from experiment.agent_processes


@pytest.fixture
Expand All @@ -125,22 +84,5 @@ def the_fully_routed_agents(
if experiment is None:
yield {}
return

def _check_all_consistent() -> bool:
for agent in the_agents:
if not agent.cell_fully_routed:
return False
return True

timer = Timer(
experiment.config["uvn_fully_routed_timeout"],
0.5,
_check_all_consistent,
experiment.log,
"waiting for UVN to become consistent",
"UVN not consistent yet",
"UVN fully routed",
"UVN failed to reach consistency",
)
timer.wait()
experiment.wait_for_fully_routed_agents(the_agents)
yield the_agents
3 changes: 3 additions & 0 deletions uno/core/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -406,6 +406,9 @@ def exception(self, e):
exc_info=sys.exc_info(), # if self.level >= self.Level.info else None
)

def cmdexec(self, cmd_args) -> None:
self.debug("+ " + " ".join(["{}"] * len(cmd_args)), cmd_args)

def command(self, cmd_args, rc, stdout=None, stderr=None, display=False):
if rc != 0:
self.error("command failed: {}", " ".join(map(str, cmd_args)))
Expand Down
2 changes: 2 additions & 0 deletions uno/test/integration/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,12 @@
from .host import Host
from .network import Network
from .experiment import Experiment
from .experiment_view import ExperimentView

__all__ = [
HostRole,
Host,
Network,
Experiment,
ExperimentView,
]
Loading