Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ jobs:
# Docker tags replace '/' with '-', so feature/fabll_part2 -> feature-fabll_part2
- uses: atopile/setup-atopile@v1.8
with:
docker-tag: feature-fabll_part2
docker-tag: main

- name: Print atopile + KiCad versions
run: |
Expand Down
146 changes: 127 additions & 19 deletions scripts/check_status.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#! uv run
#!/usr/bin/env python3
# /// script
# dependencies = [
# "typer>=0.12",
Expand All @@ -7,12 +7,26 @@
# "pandas>=2.0.0",
# ]
# ///
"""
Check build status of all packages using the atopile backend.

Usage:
# Using system ato:
uv run scripts/check_status.py

# Using atopile from a specific directory (e.g., atopile_reorg):
uv run scripts/check_status.py --atopile-dir ~/github/atopile_reorg

# Filter packages by regex:
uv run scripts/check_status.py --package-regex "adi-.*"
"""

import re
import time
import subprocess
from concurrent.futures import ProcessPoolExecutor, as_completed
from pathlib import Path
from typing import Optional
import typer
from rich.console import Console, Group
from rich.table import Table
Expand All @@ -22,9 +36,28 @@

console = Console()

# Global to hold the ato command path (set in main, used by workers)
_ato_cmd: list[str] = ["ato"]


def get_ato_command(atopile_dir: Optional[Path] = None) -> list[str]:
"""
Get the ato command to use.
If atopile_dir is provided, use the ato from that directory's venv.
"""
if atopile_dir:
venv_ato = atopile_dir / ".venv" / "bin" / "ato"
if venv_ato.exists():
return [str(venv_ato)]
else:
console.print(
f"[yellow]Warning: {venv_ato} not found, falling back to system ato[/yellow]"
)
return ["ato"]


def build_and_verify(
package_dir: Path, args: tuple
package_dir: Path, args: tuple, ato_cmd: list[str]
) -> tuple[
str,
bool,
Expand All @@ -38,15 +71,16 @@ def build_and_verify(
str | None,
]:
"""
Runs 'ato build --keep-picked-parts' then 'ato package verify' for a package.
Returns: (package_name, build_success, verify_success, build_seconds)
Runs 'ato build --keep-picked-parts' then 'ato package verify -s' for a package.
Returns: (package_name, build_success, verify_success, build_seconds,
build_rc, build_stdout, build_stderr, verify_rc, verify_stdout, verify_stderr)
"""
package_name = package_dir.name

# Build
build_start = time.perf_counter()
build_proc = subprocess.run(
["ato", "build", "--keep-picked-parts"] + list(args),
ato_cmd + ["build", "--keep-picked-parts"] + list(args),
cwd=package_dir,
capture_output=True,
text=True,
Expand All @@ -62,7 +96,7 @@ def build_and_verify(
verify_stderr: str | None = None
if build_success:
verify_proc = subprocess.run(
["ato", "package", "verify"],
ato_cmd + ["package", "verify", "-s"],
cwd=package_dir,
capture_output=True,
text=True,
Expand All @@ -87,22 +121,72 @@ def build_and_verify(
)


def _worker_init(ato_cmd: list[str]):
"""Initialize worker process with ato command."""
global _ato_cmd
_ato_cmd = ato_cmd


def _worker_build(
args: tuple[Path, tuple]
) -> tuple[str, bool, bool, float, int, str, str, int | None, str | None, str | None]:
"""Worker function that uses the global ato command."""
package_dir, build_args = args
return build_and_verify(package_dir, build_args, _ato_cmd)


@app.command()
def main(
args: list[str] = typer.Argument(None, help="Arguments to pass to ato build"),
package_regex: str = typer.Option(None, help="Regex to filter packages to build"),
atopile_dir: Optional[Path] = typer.Option(
None,
"--atopile-dir",
"-a",
help="Path to atopile directory (uses its .venv/bin/ato)",
),
max_workers: int = typer.Option(
None, "--workers", "-w", help="Max parallel workers (default: CPU count)"
),
):
"""Builds and verifies all packages in the 'packages' directory in parallel."""
"""
Builds all packages in the 'packages' directory in parallel.

Uses the atopile backend from the specified directory, or system ato if not specified.
"""
original_dir = Path.cwd()
packages_dir = Path("packages")

# Resolve atopile directory
if atopile_dir:
atopile_dir = Path(atopile_dir).expanduser().resolve()
if not atopile_dir.exists():
console.print(f"[red]❌ Error: atopile directory not found: {atopile_dir}[/red]")
raise typer.Exit(code=1)

ato_cmd = get_ato_command(atopile_dir)
console.print(f"[dim]Using ato command: {' '.join(ato_cmd)}[/dim]")

# Check ato version
try:
version_proc = subprocess.run(
ato_cmd + ["--version"],
capture_output=True,
text=True,
check=False,
)
if version_proc.returncode == 0:
console.print(f"[dim]ato version: {version_proc.stdout.strip()}[/dim]")
except Exception as e:
console.print(f"[yellow]Warning: Could not get ato version: {e}[/yellow]")

if not packages_dir.is_dir():
console.print(
f"[red]❌ Error: 'packages' directory not found in {original_dir}[/red]"
)
return
raise typer.Exit(code=1)

package_subdirs = [d for d in packages_dir.iterdir() if d.is_dir()]
package_subdirs = sorted([d for d in packages_dir.iterdir() if d.is_dir()])

if not package_subdirs:
console.print(f"[yellow]No packages found in {packages_dir}[/yellow]")
Expand All @@ -112,11 +196,13 @@ def main(
package_subdirs = [
d for d in package_subdirs if re.match(package_regex, d.name)
]
console.print(f"[dim]Filtered to {len(package_subdirs)} packages matching '{package_regex}'[/dim]")

build_args = tuple(args) if args else ()

# Accumulator for results and DataFrame
# Accumulator for results
results_rows: list[dict] = []
total_packages = len(package_subdirs)

def make_summary_tables() -> Group:
# Totals
Expand All @@ -127,24 +213,27 @@ def make_summary_tables() -> Group:
pass_both = sum(
1 for r in results_rows if r["build_success"] and r["verify_success"]
)
pending = total_packages - len(results_rows)

# Top summary table with counts in headers
summary = Table(show_header=True, header_style="bold magenta")
summary.add_column(f"Fails Build ({build_fail})", justify="center")
summary.add_column(f"Fails Verify ({verify_fail})", justify="center")
summary.add_column(f"Passes Both ({pass_both})", justify="center")
summary.add_column(f"Build Fail ({build_fail})", justify="center")
summary.add_column(f"Verify Fail ({verify_fail})", justify="center")
summary.add_column(f"Pass ({pass_both})", justify="center")
summary.add_column(f"Pending ({pending})", justify="center")
summary.add_row(
f"[red]{build_fail}[/red]",
f"[yellow]{verify_fail}[/yellow]",
f"[green]{pass_both}[/green]",
f"[dim]{pending}[/dim]",
)

# Detailed per-package table
detail = Table(show_header=True, header_style="bold cyan")
detail.add_column("Package", overflow="fold")
detail.add_column("Build", justify="center")
detail.add_column("Verify", justify="center")
detail.add_column("Build Time (s)", justify="right")
detail.add_column("Time (s)", justify="right")

# Sort by build time (descending: slowest first)
for r in sorted(results_rows, key=lambda x: x["build_seconds"], reverse=True):
Expand All @@ -155,7 +244,7 @@ def make_summary_tables() -> Group:
verify_cell = "[dim]-[/dim]"
else:
verify_cell = (
"[green]PASS[/green]" if r["verify_success"] else "[red]FAIL[/red]"
"[green]PASS[/green]" if r["verify_success"] else "[yellow]FAIL[/yellow]"
)
detail.add_row(
r["package_name"],
Expand All @@ -166,10 +255,16 @@ def make_summary_tables() -> Group:

return Group(summary, detail)

with Live(console=console, refresh_per_second=8) as live:
with ProcessPoolExecutor() as executor:
console.print(f"\n[bold]Building {total_packages} packages...[/bold]\n")

with Live(console=console, refresh_per_second=4) as live:
with ProcessPoolExecutor(
max_workers=max_workers,
initializer=_worker_init,
initargs=(ato_cmd,),
) as executor:
futures = {
executor.submit(build_and_verify, subdir, build_args): subdir
executor.submit(_worker_build, (subdir, build_args)): subdir
for subdir in package_subdirs
}
for future in as_completed(futures):
Expand Down Expand Up @@ -202,6 +297,19 @@ def make_summary_tables() -> Group:
# Update live tables
live.update(make_summary_tables())

# Print final summary
build_fail = sum(1 for r in results_rows if not r["build_success"])
verify_fail = sum(
1 for r in results_rows if r["build_success"] and not r["verify_success"]
)
pass_both = sum(
1 for r in results_rows if r["build_success"] and r["verify_success"]
)
console.print(
f"\n[bold]Summary: {pass_both} passed, {build_fail} build failures, "
f"{verify_fail} verify failures[/bold]"
)

# Construct DataFrame (optional) and save to CSV if pandas is available
try:
pd = __import__("pandas")
Expand All @@ -222,7 +330,7 @@ def make_summary_tables() -> Group:
"[yellow]pandas not available; skipping DataFrame export[/yellow]"
)

# Exit with non-zero if any failed build or failed verify; print detailed logs
# Exit with non-zero if any failed build or verify; print detailed logs
any_failed = any(
(not r["build_success"]) or (r["build_success"] and not r["verify_success"])
for r in results_rows
Expand Down