diff --git a/.github/workflows/release-rolling.yml b/.github/workflows/release-rolling.yml index 5c8b498..3b99a32 100644 --- a/.github/workflows/release-rolling.yml +++ b/.github/workflows/release-rolling.yml @@ -27,18 +27,23 @@ jobs: run: sudo apt-get update && sudo apt-get install -y musl-tools - name: Build patchbay (linux musl) - run: cargo build -p patchbay-runner --bin patchbay --release --target x86_64-unknown-linux-musl + run: cargo build -p patchbay-cli --release --target x86_64-unknown-linux-musl - name: Build patchbay-vm (linux musl) run: cargo build -p patchbay-vm --release --target x86_64-unknown-linux-musl + - name: Build patchbay-serve (linux musl) + run: cargo build -p patchbay-server --release --target x86_64-unknown-linux-musl + - name: Package linux artifacts run: | mkdir -p dist cp target/x86_64-unknown-linux-musl/release/patchbay dist/patchbay-x86_64-unknown-linux-musl cp target/x86_64-unknown-linux-musl/release/patchbay-vm dist/patchbay-vm-x86_64-unknown-linux-musl + cp target/x86_64-unknown-linux-musl/release/patchbay-serve dist/patchbay-serve-x86_64-unknown-linux-musl tar -C dist -czf dist/patchbay-x86_64-unknown-linux-musl.tar.gz patchbay-x86_64-unknown-linux-musl tar -C dist -czf dist/patchbay-vm-x86_64-unknown-linux-musl.tar.gz patchbay-vm-x86_64-unknown-linux-musl + tar -C dist -czf dist/patchbay-serve-x86_64-unknown-linux-musl.tar.gz patchbay-serve-x86_64-unknown-linux-musl - uses: actions/upload-artifact@v4 with: diff --git a/Cargo.lock b/Cargo.lock index 416c1fd..b964117 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -191,6 +191,15 @@ dependencies = [ "syn", ] +[[package]] +name = "atomic-polyfill" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cf2bce30dfe09ef0bfaef228b9d414faaf7e563035494d7fe092dba54b300f4" +dependencies = [ + "critical-section", +] + [[package]] name = "atomic-waker" version = "1.1.2" @@ -336,6 +345,12 @@ version = "3.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + [[package]] name = "bytes" version = "1.11.1" @@ -460,6 +475,15 @@ dependencies = [ "cc", ] +[[package]] +name = "cobs" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa961b519f0b462e3a3b4a34b64d119eeaca1d59af726fe450bbba07a9fc0a1" +dependencies = [ + "thiserror 2.0.18", +] + [[package]] name = "colorchoice" version = "1.0.4" @@ -496,6 +520,26 @@ dependencies = [ "tracing", ] +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -743,6 +787,27 @@ version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" +[[package]] +name = "embedded-io" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced" + +[[package]] +name = "embedded-io" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edd0f118536f44f5ccd48bcb8b111bdc3de888b58c74639dfb034a357d0f206d" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + [[package]] name = "enum-as-inner" version = "0.6.1" @@ -771,6 +836,12 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + [[package]] name = "filetime" version = "0.2.27" @@ -810,6 +881,21 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "form_urlencoded" version = "1.2.2" @@ -860,7 +946,7 @@ dependencies = [ "diatomic-waker", "futures-core", "pin-project-lite", - "spin", + "spin 0.10.0", ] [[package]] @@ -1032,6 +1118,15 @@ dependencies = [ "tracing", ] +[[package]] +name = "hash32" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67" +dependencies = [ + "byteorder", +] + [[package]] name = "hashbrown" version = "0.15.5" @@ -1047,6 +1142,20 @@ version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" +[[package]] +name = "heapless" +version = "0.7.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdc6457c0eb62c71aac4bc17216026d8410337c4126773b9c5daba343f17964f" +dependencies = [ + "atomic-polyfill", + "hash32", + "rustc_version", + "serde", + "spin 0.9.8", + "stable_deref_trait", +] + [[package]] name = "heck" version = "0.5.0" @@ -1184,6 +1293,22 @@ dependencies = [ "webpki-roots 1.0.6", ] +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + [[package]] name = "hyper-util" version = "0.1.20" @@ -1202,9 +1327,11 @@ dependencies = [ "percent-encoding", "pin-project-lite", "socket2 0.6.3", + "system-configuration", "tokio", "tower-service", "tracing", + "windows-registry", ] [[package]] @@ -1382,6 +1509,34 @@ dependencies = [ "serde", ] +[[package]] +name = "iroh-metrics" +version = "0.38.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "761b45ba046134b11eb3e432fa501616b45c4bf3a30c21717578bc07aa6461dd" +dependencies = [ + "iroh-metrics-derive", + "itoa", + "n0-error", + "portable-atomic", + "postcard", + "ryu", + "serde", + "tracing", +] + +[[package]] +name = "iroh-metrics-derive" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cab063c2bfd6c3d5a33a913d4fdb5252f140db29ec67c704f20f3da7e8f92dbf" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "is-docker" version = "0.2.0" @@ -1586,6 +1741,27 @@ dependencies = [ "uuid", ] +[[package]] +name = "n0-error" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af4782b4baf92d686d161c15460c83d16ebcfd215918763903e9619842665cae" +dependencies = [ + "n0-error-macros", + "spez", +] + +[[package]] +name = "n0-error-macros" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03755949235714b2b307e5ae89dd8c1c2531fb127d9b8b7b4adf9c876cd3ed18" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "n0-tracing-test" version = "0.3.0" @@ -1607,6 +1783,23 @@ dependencies = [ "syn", ] +[[package]] +name = "native-tls" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "465500e14ea162429d264d44189adc38b199b62b1c21eea9f69e4b73cb03bbf2" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + [[package]] name = "netlink-packet-core" version = "0.8.1" @@ -1783,6 +1976,50 @@ dependencies = [ "pathdiff", ] +[[package]] +name = "openssl" +version = "0.10.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "openssl-probe" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" + +[[package]] +name = "openssl-sys" +version = "0.9.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "option-ext" version = "0.2.0" @@ -1830,6 +2067,7 @@ dependencies = [ "futures-buffered", "hickory-resolver", "ipnet", + "iroh-metrics", "libc", "n0-tracing-test", "nix", @@ -1846,6 +2084,32 @@ dependencies = [ "tracing-subscriber", ] +[[package]] +name = "patchbay-cli" +version = "0.1.0" +dependencies = [ + "anyhow", + "chrono", + "clap", + "ctor", + "flate2", + "nix", + "open", + "patchbay", + "patchbay-runner", + "patchbay-server", + "patchbay-utils", + "patchbay-vm", + "reqwest", + "serde", + "serde_json", + "tar", + "tempfile", + "tokio", + "toml", + "tracing", +] + [[package]] name = "patchbay-runner" version = "0.1.0" @@ -1854,12 +2118,10 @@ dependencies = [ "chrono", "clap", "comfy-table", - "ctor", "flate2", "n0-tracing-test", "nix", "patchbay", - "patchbay-server", "patchbay-utils", "rcgen 0.14.7", "regex", @@ -1885,6 +2147,7 @@ dependencies = [ "clap", "dirs", "flate2", + "patchbay-utils", "rustls", "serde", "serde_json", @@ -1902,6 +2165,7 @@ name = "patchbay-utils" version = "0.1.0" dependencies = [ "anyhow", + "chrono", "flate2", "glob", "reqwest", @@ -1921,12 +2185,9 @@ dependencies = [ "clap", "dirs", "nix", - "open", - "patchbay-server", "patchbay-utils", "serde", "serde_json", - "tokio", "toml", "tracing", ] @@ -1965,6 +2226,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + [[package]] name = "plain" version = "0.2.3" @@ -1976,6 +2243,22 @@ name = "portable-atomic" version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" +dependencies = [ + "serde", +] + +[[package]] +name = "postcard" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6764c3b5dd454e283a30e6dfe78e9b31096d9e32036b5d1eaac7a6119ccb9a24" +dependencies = [ + "cobs", + "embedded-io 0.4.0", + "embedded-io 0.6.1", + "heapless", + "serde", +] [[package]] name = "potential_utf" @@ -2218,17 +2501,22 @@ checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" dependencies = [ "base64", "bytes", + "encoding_rs", "futures-channel", "futures-core", "futures-util", + "h2", "http", "http-body", "http-body-util", "hyper", "hyper-rustls", + "hyper-tls", "hyper-util", "js-sys", "log", + "mime", + "native-tls", "percent-encoding", "pin-project-lite", "quinn", @@ -2239,6 +2527,7 @@ dependencies = [ "serde_urlencoded", "sync_wrapper", "tokio", + "tokio-native-tls", "tokio-rustls", "tokio-util", "tower", @@ -2392,6 +2681,15 @@ dependencies = [ "sdd", ] +[[package]] +name = "schannel" +version = "0.1.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91c1b7e4904c873ef0710c1f407dde2e6287de2bebc1bbbf7d430bb7cbffd939" +dependencies = [ + "windows-sys 0.61.2", +] + [[package]] name = "scoped-tls" version = "1.0.1" @@ -2410,6 +2708,29 @@ version = "3.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "490dcfcbfef26be6800d11870ff2df8774fa6e86d047e3e8c8a76b25655e41ca" +[[package]] +name = "security-framework" +version = "3.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d" +dependencies = [ + "bitflags", + "core-foundation 0.10.1", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2691df843ecc5d231c0b14ece2acc3efb62c0a398c7e1d875f3983ce020e3" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "semver" version = "1.0.27" @@ -2595,6 +2916,26 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "spez" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c87e960f4dca2788eeb86bbdde8dd246be8948790b7618d656e68f9b720a86e8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + [[package]] name = "spin" version = "0.10.0" @@ -2685,6 +3026,27 @@ dependencies = [ "winapi", ] +[[package]] +name = "system-configuration" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a13f3d0daba03132c0aa9767f98351b3488edc2c100cda2d2ec2b04f3d8d3c8b" +dependencies = [ + "bitflags", + "core-foundation 0.9.4", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "tagptr" version = "0.2.0" @@ -2702,6 +3064,19 @@ dependencies = [ "xattr", ] +[[package]] +name = "tempfile" +version = "3.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32497e9a4c7b38532efcdebeef879707aa9f794296a4f0244f6f69e9bc8574bd" +dependencies = [ + "fastrand", + "getrandom 0.4.2", + "once_cell", + "rustix", + "windows-sys 0.61.2", +] + [[package]] name = "testdir" version = "0.9.3" @@ -2849,6 +3224,16 @@ dependencies = [ "syn", ] +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.26.4" @@ -3143,6 +3528,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + [[package]] name = "version_check" version = "0.9.5" @@ -3412,6 +3803,17 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" +[[package]] +name = "windows-registry" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" +dependencies = [ + "windows-link", + "windows-result", + "windows-strings", +] + [[package]] name = "windows-result" version = "0.4.1" diff --git a/Cargo.toml b/Cargo.toml index 3c9092a..4c9f08b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [workspace] -members = ["patchbay", "patchbay-utils", "patchbay-runner", "patchbay-vm", "patchbay-server"] +members = ["patchbay", "patchbay-utils", "patchbay-runner", "patchbay-vm", "patchbay-server", "patchbay-cli"] resolver = "2" [workspace.package] @@ -7,3 +7,10 @@ edition = "2021" license = "MIT OR Apache-2.0" authors = ["Franz Heinzmann "] repository = "https://github.com/n0-computer/patchbay" + +[workspace.dependencies] +patchbay = { path = "patchbay" } +patchbay-utils = { path = "patchbay-utils" } +patchbay-runner = { path = "patchbay-runner" } +patchbay-vm = { path = "patchbay-vm" } +patchbay-server = { path = "patchbay-server" } diff --git a/docs/guide/testing.md b/docs/guide/testing.md index 9d4dcf9..ffce27f 100644 --- a/docs/guide/testing.md +++ b/docs/guide/testing.md @@ -112,7 +112,8 @@ On Linux, tests run natively. Install patchbay's CLI if you want the `serve` command for viewing results: ```bash -cargo install --git https://github.com/n0-computer/patchbay patchbay-runner +cargo binstall patchbay-cli --no-confirm \ + || cargo install patchbay-cli --git https://github.com/n0-computer/patchbay ``` Then run your tests and serve the output: @@ -242,89 +243,47 @@ posted as a PR comment. Set two repository secrets: `PATCHBAY_URL` (e.g. `https://patchbay.example.com`) and `PATCHBAY_API_KEY`. -Add this to your workflow **after** the test step: +Install the patchbay CLI in your workflow, then add these steps **after** +the test step: ```yaml - - name: Push patchbay results + # Install patchbay CLI (binstall for speed, cargo install as fallback) + - name: Install patchbay CLI + run: | + cargo binstall patchbay-cli --no-confirm 2>/dev/null \ + || cargo install patchbay-cli --git https://github.com/n0-computer/patchbay + + # Run tests with patchbay (--persist keeps the run directory) + - name: Run tests + id: tests + run: patchbay test --persist -p my-crate --test my-test + + # Upload results to patchbay-serve + - name: Upload results if: always() env: PATCHBAY_URL: ${{ secrets.PATCHBAY_URL }} PATCHBAY_API_KEY: ${{ secrets.PATCHBAY_API_KEY }} run: | set -euo pipefail - PROJECT="${{ github.event.repository.name }}" - TESTDIR="$(cargo metadata --format-version=1 --no-deps | jq -r .target_directory)/testdir-current" - - if [ ! -d "$TESTDIR" ]; then - echo "No testdir output found, skipping push" + RUN_DIR=$(ls -dt .patchbay/work/run-* 2>/dev/null | head -1) + if [ -z "$RUN_DIR" ]; then + echo "No run directory found, skipping upload" exit 0 fi + patchbay upload "$RUN_DIR" \ + --project "$PROJECT" \ + --url "$PATCHBAY_URL" \ + --api-key "$PATCHBAY_API_KEY" +``` - # Create run.json manifest - cat > "$TESTDIR/run.json" <> "$GITHUB_ENV" - echo "Results uploaded: $VIEW_URL" - - - name: Comment on PR - if: always() && github.event.pull_request && env.PATCHBAY_VIEW_URL - uses: actions/github-script@v7 - with: - script: | - const marker = ''; - const body = `${marker}\n**patchbay results:** ${process.env.PATCHBAY_VIEW_URL}`; - const { data: comments } = await github.rest.issues.listComments({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.issue.number, - }); - const existing = comments.find(c => c.body.includes(marker)); - if (existing) { - await github.rest.issues.updateComment({ - owner: context.repo.owner, - repo: context.repo.repo, - comment_id: existing.id, - body, - }); - } else { - await github.rest.issues.createComment({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.issue.number, - body, - }); - } -``` +For a complete workflow template including the PR comment step, see +[`patchbay-server/github-workflow-template.yml`](https://github.com/n0-computer/patchbay/blob/main/patchbay-server/github-workflow-template.yml). The PR comment is auto-updated on each push, so you always see the latest run. diff --git a/patchbay-cli/Cargo.toml b/patchbay-cli/Cargo.toml new file mode 100644 index 0000000..10bb1c4 --- /dev/null +++ b/patchbay-cli/Cargo.toml @@ -0,0 +1,49 @@ +[package] +name = "patchbay-cli" +version = "0.1.0" +description = "Unified CLI for patchbay simulations (native and VM)" +edition.workspace = true +license.workspace = true +authors.workspace = true +repository.workspace = true + +[[bin]] +name = "patchbay" +path = "src/main.rs" + +[dependencies] +anyhow = "1" +chrono = { version = "0.4", default-features = false, features = ["clock"] } +clap = { version = "4", features = ["derive"] } +patchbay = { workspace = true } +patchbay-runner = { workspace = true } +patchbay-vm = { workspace = true, optional = true } +patchbay-server = { workspace = true, optional = true } +patchbay-utils = { workspace = true } +ctor = "0.6" +nix = { version = "0.30", features = ["signal", "process"] } +flate2 = "1" +open = "5" +reqwest = { version = "0.12", features = ["blocking", "json"], optional = true } +serde = { version = "1", features = ["derive"] } +serde_json = "1" +tar = "0.4" +tokio = { version = "1", features = ["rt", "macros", "sync", "time", "fs", "process"] } +toml = "1.0" +tracing = "0.1" + +[dev-dependencies] +patchbay = { workspace = true } +serde_json = "1" +tempfile = "3" + +[features] +default = ["serve", "upload", "vm"] +serve = ["dep:patchbay-server"] +upload = ["dep:reqwest"] +vm = ["dep:patchbay-vm"] + +[package.metadata.binstall] +pkg-url = "{ repo }/releases/download/rolling/patchbay-{ target }.tar.gz" +bin-dir = "{ bin }-{ target }{ binary-ext }" +pkg-fmt = "tgz" diff --git a/patchbay-cli/src/compare.rs b/patchbay-cli/src/compare.rs new file mode 100644 index 0000000..bf4c616 --- /dev/null +++ b/patchbay-cli/src/compare.rs @@ -0,0 +1,336 @@ +//! Compare mode: run tests/sims in two git worktrees and diff results. + +use std::{ + path::{Path, PathBuf}, + process::Command, + time::Duration, +}; + +use anyhow::{bail, Context, Result}; +use patchbay_utils::manifest::{self, TestResult, TestStatus}; + +/// Set up a git worktree for the given ref. +pub fn setup_worktree(git_ref: &str, base: &Path) -> Result { + let tree_dir = base.join(".patchbay/tree").join(sanitize_ref(git_ref)); + if tree_dir.exists() { + // Remove existing worktree first + let _ = Command::new("git") + .args(["worktree", "remove", "--force"]) + .arg(&tree_dir) + .status(); + } + std::fs::create_dir_all(tree_dir.parent().unwrap())?; + let status = Command::new("git") + .args(["worktree", "add", "--detach"]) + .arg(&tree_dir) + .arg(git_ref) + .status() + .context("git worktree add")?; + if !status.success() { + bail!("failed to create worktree for ref '{git_ref}'"); + } + Ok(tree_dir) +} + +/// Remove worktree if tracked files are unchanged. +/// Uses --force to handle untracked files (e.g. target/). +pub fn cleanup_worktree(tree_dir: &Path) -> Result<()> { + let diff = Command::new("git") + .args(["diff", "--quiet"]) + .current_dir(tree_dir) + .status() + .context("git diff")?; + if diff.success() { + let _ = Command::new("git") + .args(["worktree", "remove", "--force"]) + .arg(tree_dir) + .status(); + } + Ok(()) +} + +fn sanitize_ref(r: &str) -> String { + r.replace(['/', '\\'], "_") +} + +// ── Test comparison ── + +// Types re-exported from patchbay_utils::manifest: +// TestResult, TestStatus, RunManifest, RunKind + +pub use manifest::parse_test_output; + +/// Run tests in a directory and capture results. +pub fn run_tests_in_dir( + dir: &Path, + args: &crate::test::TestArgs, + verbose: bool, +) -> Result<(Vec, String)> { + use std::io::BufRead; + + let mut cmd = args.cargo_test_cmd_in(Some(dir)); + // Use a per-worktree target dir to avoid sharing cached binaries + // between different git refs. + cmd.env("CARGO_TARGET_DIR", dir.join("target")); + cmd.stdout(std::process::Stdio::piped()); + cmd.stderr(std::process::Stdio::piped()); + let mut child = cmd.spawn().context("spawn cargo test")?; + + let stdout_pipe = child.stdout.take().unwrap(); + let stderr_pipe = child.stderr.take().unwrap(); + let v = verbose; + let out_t = std::thread::spawn(move || { + let mut buf = String::new(); + for line in std::io::BufReader::new(stdout_pipe) + .lines() + .map_while(Result::ok) + { + if v { + println!("{line}"); + } + buf.push_str(&line); + buf.push('\n'); + } + buf + }); + let err_t = std::thread::spawn(move || { + let mut buf = String::new(); + for line in std::io::BufReader::new(stderr_pipe) + .lines() + .map_while(Result::ok) + { + if verbose { + eprintln!("{line}"); + } + buf.push_str(&line); + buf.push('\n'); + } + buf + }); + + let _ = child.wait().context("wait for cargo test")?; + let stdout = out_t.join().unwrap_or_default(); + let stderr = err_t.join().unwrap_or_default(); + let combined = format!("{stdout}\n{stderr}"); + let results = parse_test_output(&combined); + Ok((results, combined)) +} + +/// Persist test results from a worktree run so future compares can reuse them. +/// +/// Writes `run.json` into `.patchbay/work/run-{timestamp}/`. +pub fn persist_worktree_run( + _tree_dir: &Path, + results: &[TestResult], + commit_sha: &str, +) -> Result<()> { + use manifest::{RunKind, RunManifest}; + + let ts = chrono::Utc::now().format("%Y%m%d_%H%M%S"); + let dest = PathBuf::from(format!(".patchbay/work/run-{ts}")); + std::fs::create_dir_all(&dest)?; + + let pass = results + .iter() + .filter(|r| r.status == TestStatus::Pass) + .count() as u32; + let fail = results + .iter() + .filter(|r| r.status == TestStatus::Fail) + .count() as u32; + let total = results.len() as u32; + let outcome = if fail == 0 { "pass" } else { "fail" }; + + let manifest = RunManifest { + kind: RunKind::Test, + project: None, + commit: Some(commit_sha.to_string()), + branch: None, + dirty: false, + pr: None, + pr_url: None, + title: None, + started_at: None, + ended_at: None, + runtime: None, + outcome: Some(outcome.to_string()), + pass: Some(pass), + fail: Some(fail), + total: Some(total), + tests: results.to_vec(), + os: Some(std::env::consts::OS.to_string()), + arch: Some(std::env::consts::ARCH.to_string()), + patchbay_version: option_env!("CARGO_PKG_VERSION").map(|v| v.to_string()), + }; + + let json = serde_json::to_string_pretty(&manifest)?; + std::fs::write(dest.join("run.json"), json)?; + println!("patchbay: persisted run to {}", dest.display()); + Ok(()) +} + +fn test_index(results: &[TestResult]) -> std::collections::HashMap<&str, &TestResult> { + results.iter().map(|r| (r.name.as_str(), r)).collect() +} + +fn merged_names(left: &[TestResult], right: &[TestResult]) -> Vec { + let mut names: Vec = left + .iter() + .chain(right.iter()) + .map(|r| r.name.clone()) + .collect(); + names.sort(); + names.dedup(); + names +} + +/// Aggregate pass/fail/total for one side of a comparison. +pub struct SideStats { + pub pass: usize, + pub fail: usize, + pub total: usize, +} + +/// Computed comparison result (not persisted — compare is always computed on the fly). +pub struct CompareResult { + pub left: SideStats, + pub right: SideStats, + pub fixes: usize, + pub regressions: usize, + pub score: i32, +} + +/// Compare two sets of test results and return computed stats. +pub fn compare_results(left: &[TestResult], right: &[TestResult]) -> CompareResult { + let left_map = test_index(left); + let right_map = test_index(right); + + let left_pass = left.iter().filter(|r| r.status == TestStatus::Pass).count(); + let left_fail = left.iter().filter(|r| r.status == TestStatus::Fail).count(); + let right_pass = right + .iter() + .filter(|r| r.status == TestStatus::Pass) + .count(); + let right_fail = right + .iter() + .filter(|r| r.status == TestStatus::Fail) + .count(); + + let mut fixes = 0; + let mut regressions = 0; + let all_names = merged_names(left, right); + for name in &all_names { + let ls = left_map.get(name.as_str()).map(|r| r.status); + let rs = right_map.get(name.as_str()).map(|r| r.status); + match (ls, rs) { + (Some(TestStatus::Fail), Some(TestStatus::Pass)) => fixes += 1, + (Some(TestStatus::Pass), Some(TestStatus::Fail)) => regressions += 1, + _ => {} + } + } + + let left_time: Duration = left.iter().filter_map(|r| r.duration).sum(); + let right_time: Duration = right.iter().filter_map(|r| r.duration).sum(); + + let mut score: i32 = 0; + score += fixes as i32 * 3; + score -= regressions as i32 * 5; + if !left_time.is_zero() { + let pct = + (right_time.as_secs_f64() - left_time.as_secs_f64()) / left_time.as_secs_f64() * 100.0; + if pct < -2.0 { + score += 1; + } + if pct > 5.0 { + score -= 1; + } + } + + CompareResult { + left: SideStats { + pass: left_pass, + fail: left_fail, + total: left.len(), + }, + right: SideStats { + pass: right_pass, + fail: right_fail, + total: right.len(), + }, + fixes, + regressions, + score, + } +} + +fn status_str(s: TestStatus) -> &'static str { + match s { + TestStatus::Pass => "PASS", + TestStatus::Fail => "FAIL", + TestStatus::Ignored => "SKIP", + } +} + +/// Print a comparison summary table. +pub fn print_summary( + left_ref: &str, + right_ref: &str, + left: &[TestResult], + right: &[TestResult], + result: &CompareResult, +) { + println!("\nCompare: {left_ref} \u{2194} {right_ref}\n"); + println!( + "Tests: {}/{} pass ({} fail) \u{2192} {}/{} pass ({} fail)", + result.left.pass, + result.left.total, + result.left.fail, + result.right.pass, + result.right.total, + result.right.fail + ); + if result.fixes > 0 { + println!("Fixes: {} (fail\u{2192}pass)", result.fixes); + } + if result.regressions > 0 { + println!("Regressions: {} (pass\u{2192}fail)", result.regressions); + } + + let left_map = test_index(left); + let right_map = test_index(right); + let all_names = merged_names(left, right); + + println!( + "\n{:<50} {:>8} {:>8} {:>10}", + "Test", "Left", "Right", "Delta" + ); + println!("{}", "-".repeat(80)); + for name in &all_names { + let name = name.as_str(); + let ls = left_map.get(name).map(|r| r.status); + let rs = right_map.get(name).map(|r| r.status); + let ls_str = ls.map(status_str).unwrap_or("-"); + let rs_str = rs.map(status_str).unwrap_or("-"); + let delta = match (ls, rs) { + (Some(TestStatus::Fail), Some(TestStatus::Pass)) => "fixed", + (Some(TestStatus::Pass), Some(TestStatus::Fail)) => "REGRESS", + (None, Some(_)) => "new", + (Some(_), None) => "removed", + _ => "", + }; + let display_name = if name.len() > 48 { + &name[name.len() - 48..] + } else { + name + }; + println!( + "{:<50} {:>8} {:>8} {:>10}", + display_name, ls_str, rs_str, delta + ); + } + + println!( + "\nScore: {:+} ({} fixes, {} regressions)", + result.score, result.fixes, result.regressions + ); +} diff --git a/patchbay-runner/src/init.rs b/patchbay-cli/src/init.rs similarity index 100% rename from patchbay-runner/src/init.rs rename to patchbay-cli/src/init.rs diff --git a/patchbay-runner/src/main.rs b/patchbay-cli/src/main.rs similarity index 58% rename from patchbay-runner/src/main.rs rename to patchbay-cli/src/main.rs index 5d5c672..d4b82cf 100644 --- a/patchbay-runner/src/main.rs +++ b/patchbay-cli/src/main.rs @@ -1,9 +1,15 @@ -//! Runs the `patchbay` CLI entrypoint. +//! Unified CLI entrypoint for patchbay simulations (native and VM). -mod sim; +mod compare; +mod init; +mod test; +#[cfg(feature = "upload")] +mod upload; +mod util; +#[cfg(target_os = "linux")] +use std::collections::HashMap; use std::{ - collections::HashMap, path::{Path, PathBuf}, process::Command as ProcessCommand, time::Duration, @@ -12,15 +18,21 @@ use std::{ use anyhow::{anyhow, bail, Context, Result}; use clap::{Parser, Subcommand}; use patchbay::check_caps; +use patchbay_runner::sim; #[cfg(feature = "serve")] use patchbay_server::DEFAULT_UI_BIND; #[cfg(not(feature = "serve"))] const DEFAULT_UI_BIND: &str = "127.0.0.1:7421"; +#[cfg(feature = "vm")] +use patchbay_vm::VmOps; use serde::{Deserialize, Serialize}; #[derive(Parser)] #[command(name = "patchbay", about = "Run a patchbay simulation")] struct Cli { + /// Verbose output (stream subcommand output live). + #[arg(short = 'v', long, global = true)] + verbose: bool, #[command(subcommand)] command: Command, } @@ -34,7 +46,7 @@ enum Command { sims: Vec, /// Work directory for logs, binaries, and results. - #[arg(long, default_value = ".patchbay-work")] + #[arg(long, default_value = ".patchbay/work")] work_dir: PathBuf, /// Binary override in `::` form. @@ -71,7 +83,7 @@ enum Command { #[arg()] sims: Vec, /// Work directory for caches and prepared outputs. - #[arg(long, default_value = ".patchbay-work")] + #[arg(long, default_value = ".patchbay/work")] work_dir: PathBuf, /// Binary override in `::` form. #[arg(long = "binary")] @@ -91,7 +103,7 @@ enum Command { /// Output directory containing lab run subdirectories. /// /// Ignored when `--testdir` is set. - #[arg(default_value = ".patchbay-work")] + #[arg(default_value = ".patchbay/work")] outdir: PathBuf, /// Serve `/testdir-current` instead of a path. /// @@ -106,14 +118,16 @@ enum Command { open: bool, }, /// Build topology from sim/topology config for interactive namespace debugging. + #[cfg(target_os = "linux")] Inspect { /// Sim TOML or topology TOML file path. input: PathBuf, /// Work directory for inspect session metadata. - #[arg(long, default_value = ".patchbay-work")] + #[arg(long, default_value = ".patchbay/work")] work_dir: PathBuf, }, /// Run a command inside a node namespace from an inspect session. + #[cfg(target_os = "linux")] RunIn { /// Device or router name from the inspected topology. node: String, @@ -121,12 +135,176 @@ enum Command { #[arg(long)] inspect: Option, /// Work directory containing inspect session metadata. - #[arg(long, default_value = ".patchbay-work")] + #[arg(long, default_value = ".patchbay/work")] work_dir: PathBuf, /// Command and args to execute in the node namespace. #[arg(trailing_var_arg = true, allow_hyphen_values = true, required = true)] cmd: Vec, }, + /// Run tests (delegates to cargo test on native, VM test flow on VM). + Test { + #[command(flatten)] + args: test::TestArgs, + + /// Persist run output to `.patchbay/work/run-{timestamp}/`. + #[arg(long)] + persist: bool, + + /// Force VM backend. + #[arg(long, num_args = 0..=1, default_missing_value = "auto")] + vm: Option, + }, + /// Compare test or sim results across git refs. + Compare { + #[command(subcommand)] + command: CompareCommand, + }, + /// Upload a run/compare directory to a patchbay-server instance. + Upload { + /// Directory to upload (e.g. .patchbay/work/compare-20260325_120000). + dir: PathBuf, + /// Project name for scoping on the server. + #[arg(long, env = "PATCHBAY_PROJECT")] + project: String, + /// Server URL (e.g. https://patchbay.example.com). + #[arg(long, env = "PATCHBAY_URL")] + url: String, + /// API key for authentication. + #[arg(long, env = "PATCHBAY_API_KEY")] + api_key: String, + }, + /// VM management and simulation execution. + #[cfg(feature = "vm")] + Vm { + #[command(subcommand)] + command: VmCommand, + /// Which VM backend to use. + #[arg(long, default_value = "auto", global = true)] + backend: patchbay_vm::Backend, + }, +} + +#[derive(Subcommand)] +enum CompareCommand { + /// Compare test results between git refs. + /// + /// Usage: patchbay compare test [ref2] [-- test-filter-and-args] + Test { + /// Git ref to compare (left side). + left_ref: String, + + /// Second git ref (right side). If omitted, compares against current worktree. + right_ref: Option, + + /// Force rebuild even if a cached run exists for the commit. + #[arg(long)] + force_build: bool, + + /// Fail instead of building if no cached run exists for a ref. + #[arg(long)] + no_ref_build: bool, + + #[command(flatten)] + args: test::TestArgs, + }, + /// Compare sim results between git refs. + Run { + /// Git ref to compare (left side). + left_ref: String, + + /// Second git ref (right side). + right_ref: Option, + + /// Sim TOML files or directories. + #[arg(long = "sim", required = true)] + sims: Vec, + }, +} + +/// VM sub-subcommands (mirrors patchbay-vm's standalone CLI). +#[cfg(feature = "vm")] +#[derive(Subcommand)] +enum VmCommand { + /// Boot or reuse VM and ensure mounts. + Up { + #[arg(long)] + recreate: bool, + }, + /// Stop VM and helper processes. + Down, + /// Show VM running status. + Status, + /// Best-effort cleanup of VM helper artifacts/processes. + Cleanup, + /// Execute command in the guest (SSH for QEMU, exec for container). + Ssh { + #[arg(trailing_var_arg = true, allow_hyphen_values = true)] + cmd: Vec, + }, + /// Run one or more sims in VM using guest patchbay binary. + Run { + #[arg(required = true)] + sims: Vec, + #[arg(long, default_value = ".patchbay/work")] + work_dir: PathBuf, + #[arg(long = "binary")] + binary_overrides: Vec, + #[arg(short = 'v', long, default_value_t = false)] + verbose: bool, + #[arg(long)] + recreate: bool, + #[arg(long, default_value = "latest")] + patchbay_version: String, + #[arg(long, default_value_t = false)] + open: bool, + #[arg(long, default_value = DEFAULT_UI_BIND)] + bind: String, + }, + /// Serve embedded UI + work directory over HTTP. + Serve { + #[arg(long, default_value = ".patchbay/work")] + work_dir: PathBuf, + /// Serve `/binaries/tests/testdir-current` instead of work_dir. + #[arg(long, default_value_t = false)] + testdir: bool, + #[arg(long, default_value = DEFAULT_UI_BIND)] + bind: String, + #[arg(long, default_value_t = false)] + open: bool, + }, + /// Build and run tests in VM. + Test { + /// Test name filter (passed to test binaries at runtime). + #[arg()] + filter: Option, + #[arg(long, default_value_t = patchbay_vm::default_test_target())] + target: String, + #[arg(short = 'p', long = "package")] + packages: Vec, + #[arg(long = "test")] + tests: Vec, + #[arg(short = 'j', long)] + jobs: Option, + #[arg(short = 'F', long)] + features: Vec, + #[arg(long)] + release: bool, + #[arg(long)] + lib: bool, + #[arg(long)] + no_fail_fast: bool, + #[arg(long)] + recreate: bool, + #[arg(last = true)] + cargo_args: Vec, + }, +} + +fn resolve_project_root(opt: Option) -> Result { + match opt { + Some(p) => Ok(p), + None => std::env::current_dir().context("resolve current directory"), + } } fn main() -> Result<()> { @@ -171,10 +349,7 @@ async fn tokio_main() -> Result<()> { #[cfg(not(feature = "serve"))] bail!("--open requires the `serve` feature"); } - let project_root = match project_root { - Some(p) => p, - None => std::env::current_dir().context("resolve current directory")?, - }; + let project_root = resolve_project_root(project_root)?; let sims = resolve_sim_args(sims, &project_root)?; let res = sim::run_sims( sims, @@ -201,10 +376,7 @@ async fn tokio_main() -> Result<()> { no_build, project_root, } => { - let project_root = match project_root { - Some(p) => p, - None => std::env::current_dir().context("resolve current directory")?, - }; + let project_root = resolve_project_root(project_root)?; let sims = resolve_sim_args(sims, &project_root)?; sim::prepare_sims( sims, @@ -234,13 +406,255 @@ async fn tokio_main() -> Result<()> { } patchbay_server::serve(dir, &bind).await } + #[cfg(target_os = "linux")] Command::Inspect { input, work_dir } => inspect_command(input, work_dir).await, + #[cfg(target_os = "linux")] Command::RunIn { node, inspect, work_dir, cmd, } => run_in_command(node, inspect, work_dir, cmd), + Command::Test { args, persist, vm } => { + #[cfg(feature = "vm")] + if let Some(vm_backend) = vm { + let backend = match vm_backend.as_str() { + "auto" => patchbay_vm::Backend::Auto.resolve(), + "qemu" => patchbay_vm::Backend::Qemu, + "container" => patchbay_vm::Backend::Container, + other => bail!("unknown VM backend: {other}"), + }; + return test::run_vm(args, backend); + } + #[cfg(not(feature = "vm"))] + if vm.is_some() { + bail!("VM support not compiled (enable the `vm` feature)"); + } + test::run_native(args, cli.verbose, persist) + } + Command::Compare { command } => { + let cwd = std::env::current_dir().context("get cwd")?; + let work_dir = cwd.join(".patchbay/work"); + match command { + CompareCommand::Test { + left_ref, + right_ref, + force_build, + no_ref_build, + args, + } => { + use patchbay_utils::manifest::{self as mf, RunKind}; + + let right_label = right_ref.as_deref().unwrap_or("worktree"); + println!( + "patchbay compare test: {} \u{2194} {}", + left_ref, right_label + ); + + // Helper: resolve results for a ref, using cache or building. + let resolve_ref_results = + |git_ref: &str, label: &str| -> Result> { + let sha = mf::resolve_ref(git_ref) + .with_context(|| format!("could not resolve ref '{git_ref}'"))?; + + // Check cache (unless --force-build). + if !force_build { + if let Some((_dir, manifest)) = + mf::find_run_for_commit(&work_dir, &sha, RunKind::Test) + { + println!("Using cached run for {label} ({sha:.8})"); + return Ok(manifest.tests); + } + } + + // No cache — fail if --no-ref-build. + if no_ref_build { + bail!( + "no cached run for {label} ({sha:.8}); \ + run `patchbay test --persist` on that ref first, \ + or remove --no-ref-build" + ); + } + + // Build in worktree. + println!("Running tests in {label} ..."); + let tree_dir = compare::setup_worktree(git_ref, &cwd)?; + let (results, _output) = + compare::run_tests_in_dir(&tree_dir, &args, cli.verbose)?; + + // Persist the run so future compares can reuse it. + compare::persist_worktree_run(&tree_dir, &results, &sha)?; + + compare::cleanup_worktree(&tree_dir)?; + Ok(results) + }; + + let left_results = resolve_ref_results(&left_ref, &left_ref)?; + + let right_results = if let Some(ref r) = right_ref { + resolve_ref_results(r, r)? + } else { + // Compare against current worktree: always run fresh. + println!("Running tests in worktree ..."); + let (results, _output) = + compare::run_tests_in_dir(&cwd, &args, cli.verbose)?; + results + }; + + // Compare + let result = compare::compare_results(&left_results, &right_results); + compare::print_summary( + &left_ref, + right_label, + &left_results, + &right_results, + &result, + ); + + if result.regressions > 0 { + bail!("{} regressions detected", result.regressions); + } + Ok(()) + } + CompareCommand::Run { + sims: _, + left_ref: _, + right_ref: _, + } => { + // TODO: implement compare run (sim comparison) + bail!("compare run is not yet implemented"); + } + } + } + Command::Upload { + dir, + project, + url, + api_key, + } => { + if !dir.exists() { + bail!("directory does not exist: {}", dir.display()); + } + #[cfg(feature = "upload")] + { + upload::upload(&dir, &project, &url, &api_key) + } + #[cfg(not(feature = "upload"))] + { + let _ = (&dir, &project, &url, &api_key); + bail!("upload support not compiled in (enable the `upload` feature)") + } + } + #[cfg(feature = "vm")] + Command::Vm { command, backend } => dispatch_vm(command, backend).await, + } +} + +/// Dispatch VM subcommands to the patchbay-vm library. +#[cfg(feature = "vm")] +async fn dispatch_vm(command: VmCommand, backend: patchbay_vm::Backend) -> Result<()> { + let backend = backend.resolve(); + + match command { + VmCommand::Up { recreate } => backend.up(recreate), + VmCommand::Down => backend.down(), + VmCommand::Status => backend.status(), + VmCommand::Cleanup => backend.cleanup(), + VmCommand::Ssh { cmd } => backend.exec(cmd), + VmCommand::Run { + sims, + work_dir, + binary_overrides, + verbose, + recreate, + patchbay_version, + open, + bind, + } => { + if open { + let url = format!("http://{bind}"); + println!("patchbay UI: {url}"); + let _ = open::that(&url); + let work = work_dir.clone(); + let bind_clone = bind.clone(); + tokio::spawn(async move { + if let Err(e) = patchbay_server::serve(work, &bind_clone).await { + tracing::error!("server error: {e}"); + } + }); + } + let args = patchbay_vm::RunVmArgs { + sim_inputs: sims, + work_dir, + binary_overrides, + verbose, + recreate, + patchbay_version, + }; + let res = backend.run_sims(args); + if open && res.is_ok() { + println!("run finished; server still running (Ctrl-C to exit)"); + loop { + tokio::time::sleep(Duration::from_secs(60)).await; + } + } + res + } + VmCommand::Serve { + work_dir, + testdir, + bind, + open, + } => { + let dir = if testdir { + work_dir + .join("binaries") + .join("tests") + .join("testdir-current") + } else { + work_dir + }; + println!("patchbay: serving {} at http://{bind}/", dir.display()); + if open { + let url = format!("http://{bind}"); + let _ = open::that(&url); + } + patchbay_server::serve(dir, &bind).await + } + VmCommand::Test { + filter, + target, + packages, + tests, + jobs, + features, + release, + lib, + no_fail_fast, + recreate, + cargo_args, + } => { + let test_args = test::TestArgs { + include_ignored: false, + ignored: false, + packages, + tests, + jobs, + features, + release, + lib, + no_fail_fast, + extra_args: { + let mut args = Vec::new(); + if let Some(f) = filter { + args.push(f); + } + args.extend(cargo_args); + args + }, + }; + backend.run_tests(test_args.into_vm_args(target, recreate)) + } } } @@ -307,6 +721,7 @@ fn resolve_testdir_native() -> Result { Ok(PathBuf::from(target_dir).join("testdir-current")) } +#[cfg(target_os = "linux")] #[derive(Debug, Clone, Serialize, Deserialize)] struct InspectSession { prefix: String, @@ -316,18 +731,22 @@ struct InspectSession { node_keeper_pids: HashMap, } +#[cfg(target_os = "linux")] fn inspect_dir(work_dir: &std::path::Path) -> PathBuf { work_dir.join("inspect") } +#[cfg(target_os = "linux")] fn inspect_session_path(work_dir: &std::path::Path, prefix: &str) -> PathBuf { inspect_dir(work_dir).join(format!("{prefix}.json")) } +#[cfg(target_os = "linux")] fn env_key_suffix(name: &str) -> String { patchbay::util::sanitize_for_env_key(name) } +#[cfg(target_os = "linux")] fn load_topology_for_inspect( input: &std::path::Path, ) -> Result<(patchbay::config::LabConfig, bool)> { @@ -350,6 +769,7 @@ fn load_topology_for_inspect( } } +#[cfg(target_os = "linux")] fn keeper_commmand() -> ProcessCommand { let mut cmd = ProcessCommand::new("sh"); cmd.args(["-lc", "while :; do sleep 3600; done"]) @@ -359,6 +779,7 @@ fn keeper_commmand() -> ProcessCommand { cmd } +#[cfg(target_os = "linux")] async fn inspect_command(input: PathBuf, work_dir: PathBuf) -> Result<()> { check_caps()?; @@ -440,6 +861,7 @@ async fn inspect_command(input: PathBuf, work_dir: PathBuf) -> Result<()> { } } +#[cfg(target_os = "linux")] fn resolve_inspect_ref(inspect: Option) -> Result { if let Some(value) = inspect { let trimmed = value.trim(); @@ -457,6 +879,7 @@ fn resolve_inspect_ref(inspect: Option) -> Result { Ok(trimmed.to_string()) } +#[cfg(target_os = "linux")] fn load_inspect_session(work_dir: &std::path::Path, inspect_ref: &str) -> Result { let as_path = PathBuf::from(inspect_ref); let session_path = if as_path.extension().and_then(|v| v.to_str()) == Some("json") @@ -472,6 +895,7 @@ fn load_inspect_session(work_dir: &std::path::Path, inspect_ref: &str) -> Result .with_context(|| format!("parse inspect session {}", session_path.display())) } +#[cfg(target_os = "linux")] fn run_in_command( node: String, inspect: Option, @@ -517,12 +941,14 @@ mod tests { use super::*; + #[cfg(target_os = "linux")] #[test] fn env_key_suffix_normalizes_names() { assert_eq!(env_key_suffix("relay"), "relay"); assert_eq!(env_key_suffix("fetcher-1"), "fetcher_1"); } + #[cfg(target_os = "linux")] #[test] fn inspect_session_path_uses_prefix_json() { let base = PathBuf::from("/tmp/patchbay-work"); @@ -530,6 +956,7 @@ mod tests { assert!(path.ends_with("inspect/lab-p123.json")); } + #[cfg(target_os = "linux")] fn write_temp_file(dir: &Path, rel: &str, body: &str) -> PathBuf { let path = dir.join(rel); if let Some(parent) = path.parent() { @@ -539,6 +966,7 @@ mod tests { path } + #[cfg(target_os = "linux")] #[test] fn inspect_loader_detects_sim_input() { let root = std::env::temp_dir().join(format!( @@ -558,6 +986,7 @@ mod tests { assert!(is_sim); } + #[cfg(target_os = "linux")] #[test] fn inspect_loader_detects_topology_input() { let root = std::env::temp_dir().join(format!( diff --git a/patchbay-cli/src/test.rs b/patchbay-cli/src/test.rs new file mode 100644 index 0000000..cfa53f5 --- /dev/null +++ b/patchbay-cli/src/test.rs @@ -0,0 +1,356 @@ +//! Test command implementation. + +use std::{ + path::{Path, PathBuf}, + process::Command, +}; + +use anyhow::{bail, Context, Result}; +use patchbay_utils::manifest::{self, RunKind, RunManifest, TestStatus}; + +/// Check if cargo-nextest is available. +fn has_nextest() -> bool { + Command::new("cargo-nextest") + .arg("--version") + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .map(|s| s.success()) + .unwrap_or(false) +} + +/// Shared test arguments used by both `patchbay test` and `patchbay compare test`. +#[derive(Debug, Clone, clap::Args)] +pub struct TestArgs { + /// Include ignored tests (like `cargo test -- --include-ignored`). + #[arg(long)] + pub include_ignored: bool, + + /// Run only ignored tests (like `cargo test -- --ignored`). + #[arg(long)] + pub ignored: bool, + + /// Package to test. + #[arg(short = 'p', long = "package")] + pub packages: Vec, + + /// Test target name. + #[arg(long = "test")] + pub tests: Vec, + + /// Number of build jobs. + #[arg(short = 'j', long)] + pub jobs: Option, + + /// Features to enable. + #[arg(short = 'F', long)] + pub features: Vec, + + /// Build in release mode. + #[arg(long)] + pub release: bool, + + /// Test only library. + #[arg(long)] + pub lib: bool, + + /// Don't stop on first failure. + #[arg(long)] + pub no_fail_fast: bool, + + /// Extra args passed after `--` to cargo/test binaries (filter, etc). + #[arg(last = true)] + pub extra_args: Vec, +} + +impl TestArgs { + /// Build a `cargo test` command with all flags applied. + /// Does NOT set stdout/stderr — caller decides piping. + pub fn cargo_test_cmd(&self) -> Command { + self.cargo_test_cmd_in(None) + } + + /// Build a `cargo test` command, optionally running in a specific directory. + pub fn cargo_test_cmd_in(&self, dir: Option<&Path>) -> Command { + let mut cmd = Command::new("cargo"); + cmd.arg("test"); + cmd.env("RUSTFLAGS", crate::util::patchbay_rustflags()); + if let Some(d) = dir { + cmd.current_dir(d); + } + for p in &self.packages { + cmd.arg("-p").arg(p); + } + for t in &self.tests { + cmd.arg("--test").arg(t); + } + if let Some(j) = self.jobs { + cmd.arg("-j").arg(j.to_string()); + } + for f in &self.features { + cmd.arg("-F").arg(f); + } + if self.release { + cmd.arg("--release"); + } + if self.lib { + cmd.arg("--lib"); + } + if self.no_fail_fast { + cmd.arg("--no-fail-fast"); + } + // Everything after `--`: --ignored/--include-ignored + extra args + if self.include_ignored || self.ignored || !self.extra_args.is_empty() { + cmd.arg("--"); + if self.ignored { + cmd.arg("--ignored"); + } else if self.include_ignored { + cmd.arg("--include-ignored"); + } + for a in &self.extra_args { + cmd.arg(a); + } + } + cmd + } + + /// Convert to patchbay-vm TestVmArgs. + #[cfg(feature = "vm")] + pub fn into_vm_args(self, target: String, recreate: bool) -> patchbay_vm::TestVmArgs { + let mut cargo_args = Vec::new(); + if let Some(j) = self.jobs { + cargo_args.extend(["--jobs".into(), j.to_string()]); + } + for f in &self.features { + cargo_args.extend(["--features".into(), f.clone()]); + } + if self.release { + cargo_args.push("--release".into()); + } + if self.lib { + cargo_args.push("--lib".into()); + } + if self.no_fail_fast { + cargo_args.push("--no-fail-fast".into()); + } + cargo_args.extend(self.extra_args); + patchbay_vm::TestVmArgs { + filter: None, + target, + packages: self.packages, + tests: self.tests, + recreate, + cargo_args, + } + } +} + +/// Resolve `target_directory` from cargo metadata. +fn cargo_target_dir() -> Option { + let output = Command::new("cargo") + .args(["metadata", "--format-version=1", "--no-deps"]) + .output() + .ok()?; + if !output.status.success() { + return None; + } + let meta: serde_json::Value = serde_json::from_slice(&output.stdout).ok()?; + meta["target_directory"].as_str().map(PathBuf::from) +} + +/// Run tests natively via cargo test/nextest. +/// +/// Captures stdout/stderr (printing live when `verbose` is true), parses +/// test results, and writes `run.json` to `testdir-current/`. +/// When `persist` is true, copies output to `.patchbay/work/run-{timestamp}/`. +pub fn run_native(args: TestArgs, verbose: bool, persist: bool) -> Result<()> { + use std::io::BufRead; + + let use_nextest = has_nextest(); + let mut cmd = if use_nextest { + let mut cmd = Command::new("cargo"); + cmd.arg("nextest").arg("run"); + cmd.env("RUSTFLAGS", crate::util::patchbay_rustflags()); + for p in &args.packages { + cmd.arg("-p").arg(p); + } + for t in &args.tests { + cmd.arg("--test").arg(t); + } + if let Some(j) = args.jobs { + cmd.arg("-j").arg(j.to_string()); + } + for f in &args.features { + cmd.arg("-F").arg(f); + } + if args.release { + cmd.arg("--release"); + } + if args.lib { + cmd.arg("--lib"); + } + if args.no_fail_fast { + cmd.arg("--no-fail-fast"); + } + if args.include_ignored { + cmd.arg("--run-ignored").arg("all"); + } else if args.ignored { + cmd.arg("--run-ignored").arg("ignored-only"); + } + for a in &args.extra_args { + cmd.arg(a); + } + cmd + } else { + eprintln!("patchbay: cargo-nextest not found, using cargo test"); + args.cargo_test_cmd() + }; + + // Set PATCHBAY_OUTDIR so test fixtures can discover the output directory. + if let Some(target_dir) = cargo_target_dir() { + let outdir = target_dir.join("testdir-current"); + cmd.env("PATCHBAY_OUTDIR", &outdir); + } + + // Pipe stdout/stderr so we can capture output while optionally printing live. + cmd.stdout(std::process::Stdio::piped()); + cmd.stderr(std::process::Stdio::piped()); + + let started_at = chrono::Utc::now(); + let mut child = cmd.spawn().context("failed to spawn test command")?; + + let stdout_pipe = child.stdout.take().unwrap(); + let stderr_pipe = child.stderr.take().unwrap(); + let v = verbose; + let out_t = std::thread::spawn(move || { + let mut buf = String::new(); + for line in std::io::BufReader::new(stdout_pipe) + .lines() + .map_while(Result::ok) + { + if v { + println!("{line}"); + } + buf.push_str(&line); + buf.push('\n'); + } + buf + }); + let err_t = std::thread::spawn(move || { + let mut buf = String::new(); + for line in std::io::BufReader::new(stderr_pipe) + .lines() + .map_while(Result::ok) + { + if verbose { + eprintln!("{line}"); + } + buf.push_str(&line); + buf.push('\n'); + } + buf + }); + + let status = child.wait().context("failed to wait for test command")?; + let ended_at = chrono::Utc::now(); + let stdout = out_t.join().unwrap_or_default(); + let stderr = err_t.join().unwrap_or_default(); + + let combined = format!("{stdout}\n{stderr}"); + let results = manifest::parse_test_output(&combined); + + // Write run.json into testdir-current/. + let pass = results + .iter() + .filter(|r| r.status == TestStatus::Pass) + .count() as u32; + let fail = results + .iter() + .filter(|r| r.status == TestStatus::Fail) + .count() as u32; + let total = results.len() as u32; + let git = manifest::git_context(); + let runtime = (ended_at - started_at).to_std().ok(); + let outcome = if status.success() { "pass" } else { "fail" }; + + let manifest = RunManifest { + kind: RunKind::Test, + project: None, + commit: git.commit, + branch: git.branch, + dirty: git.dirty, + pr: None, + pr_url: None, + title: None, + started_at: Some(started_at), + ended_at: Some(ended_at), + runtime, + outcome: Some(outcome.to_string()), + pass: Some(pass), + fail: Some(fail), + total: Some(total), + tests: results, + os: Some(std::env::consts::OS.to_string()), + arch: Some(std::env::consts::ARCH.to_string()), + patchbay_version: option_env!("CARGO_PKG_VERSION").map(|v| v.to_string()), + }; + + if let Some(target_dir) = cargo_target_dir() { + let testdir = target_dir.join("testdir-current"); + if let Err(e) = std::fs::create_dir_all(&testdir) { + eprintln!("patchbay: warning: could not create testdir: {e}"); + } + let run_json = testdir.join("run.json"); + match serde_json::to_string_pretty(&manifest) { + Ok(json) => { + if let Err(e) = std::fs::write(&run_json, json) { + eprintln!("patchbay: warning: could not write run.json: {e}"); + } + } + Err(e) => eprintln!("patchbay: warning: could not serialize run.json: {e}"), + } + } + + // --persist: copy output dir to .patchbay/work/run-{timestamp}/ + if persist { + persist_run()?; + } + + if !status.success() { + bail!("tests failed (exit code {})", status.code().unwrap_or(-1)); + } + Ok(()) +} + +/// Copy testdir-current/ into `.patchbay/work/run-{timestamp}/`. +fn persist_run() -> Result<()> { + let target_dir = cargo_target_dir().context("could not determine cargo target dir")?; + let testdir = target_dir.join("testdir-current"); + if !testdir.exists() { + return Ok(()); + } + let ts = chrono::Utc::now().format("%Y%m%d_%H%M%S"); + let dest = PathBuf::from(format!(".patchbay/work/run-{ts}")); + std::fs::create_dir_all(dest.parent().unwrap())?; + // -rL: dereference symlinks (testdir-current is a symlink to testdir-N) + let status = Command::new("cp") + .args(["-rL"]) + .arg(&testdir) + .arg(&dest) + .status() + .context("cp testdir")?; + if !status.success() { + bail!("failed to copy testdir to {}", dest.display()); + } + println!("patchbay: persisted run to {}", dest.display()); + Ok(()) +} + +/// Run tests in a VM via patchbay-vm. +#[cfg(feature = "vm")] +pub fn run_vm(args: TestArgs, backend: patchbay_vm::Backend) -> anyhow::Result<()> { + use patchbay_vm::VmOps; + let backend = backend.resolve(); + let target = patchbay_vm::default_test_target(); + backend.run_tests(args.into_vm_args(target, false)) +} diff --git a/patchbay-cli/src/upload.rs b/patchbay-cli/src/upload.rs new file mode 100644 index 0000000..cb7420b --- /dev/null +++ b/patchbay-cli/src/upload.rs @@ -0,0 +1,87 @@ +//! Upload run/compare directories to a patchbay-server instance. + +use std::path::Path; + +use anyhow::{bail, Context, Result}; +use patchbay_utils::manifest::{RunKind, RunManifest}; + +/// Build a RunManifest from CI environment variables. +pub fn manifest_from_env(project: &str) -> RunManifest { + RunManifest { + kind: RunKind::Sim, // default; overridden if run.json already exists + project: Some(project.to_string()), + branch: std::env::var("GITHUB_REF_NAME") + .ok() + .or_else(|| std::env::var("GITHUB_HEAD_REF").ok()), + commit: std::env::var("GITHUB_SHA").ok(), + pr: std::env::var("GITHUB_PR_NUMBER") + .ok() + .and_then(|s| s.parse().ok()), + pr_url: None, + title: std::env::var("GITHUB_PR_TITLE").ok(), + outcome: None, + started_at: Some(chrono::Utc::now()), + ended_at: None, + runtime: None, + dirty: false, + pass: None, + fail: None, + total: None, + tests: Vec::new(), + os: None, + arch: None, + patchbay_version: None, + } +} + +/// Create a tar.gz archive of a directory in memory. +fn tar_gz_dir(dir: &Path) -> Result> { + let mut buf = Vec::new(); + { + let gz = flate2::write::GzEncoder::new(&mut buf, flate2::Compression::fast()); + let mut archive = tar::Builder::new(gz); + archive.append_dir_all(".", dir).context("tar directory")?; + let gz = archive.into_inner().context("finish tar")?; + gz.finish().context("finish gzip")?; + } + Ok(buf) +} + +/// Upload a directory to patchbay-server. +/// +/// Creates a `run.json` manifest in the directory before uploading. +pub fn upload(dir: &Path, project: &str, url: &str, api_key: &str) -> Result<()> { + // Write run.json manifest if not already present + let manifest_path = dir.join("run.json"); + if !manifest_path.exists() { + let manifest = manifest_from_env(project); + let json = serde_json::to_string_pretty(&manifest)?; + std::fs::write(&manifest_path, json).context("write run.json")?; + } + + let body = tar_gz_dir(dir)?; + let push_url = format!("{}/api/push/{}", url.trim_end_matches('/'), project); + + let client = reqwest::blocking::Client::new(); + let resp = client + .post(&push_url) + .header("Authorization", format!("Bearer {api_key}")) + .header("Content-Type", "application/gzip") + .body(body) + .send() + .context("upload request failed")?; + + if !resp.status().is_success() { + let status = resp.status(); + let body = resp.text().unwrap_or_default(); + bail!("upload failed ({status}): {body}"); + } + + let result: serde_json::Value = resp.json().context("parse response")?; + let base = url.trim_end_matches('/'); + if let Some(run) = result.get("run").and_then(serde_json::Value::as_str) { + let view_url = format!("{base}/run/{run}"); + println!("{view_url}"); + } + Ok(()) +} diff --git a/patchbay-cli/src/util.rs b/patchbay-cli/src/util.rs new file mode 100644 index 0000000..a12687d --- /dev/null +++ b/patchbay-cli/src/util.rs @@ -0,0 +1,9 @@ +/// Build RUSTFLAGS with --cfg patchbay_tests appended. +pub fn patchbay_rustflags() -> String { + let existing = std::env::var("RUSTFLAGS").unwrap_or_default(); + if existing.is_empty() { + "--cfg patchbay_tests".to_string() + } else { + format!("{existing} --cfg patchbay_tests") + } +} diff --git a/patchbay-cli/tests/compare_integration.rs b/patchbay-cli/tests/compare_integration.rs new file mode 100644 index 0000000..a78ca7a --- /dev/null +++ b/patchbay-cli/tests/compare_integration.rs @@ -0,0 +1,287 @@ +//! Integration test for `patchbay compare test`. +//! Copies the counter fixture into a temp git repo, makes two commits +//! with different PACKET_COUNT values, and runs compare between them. + +use std::{path::Path, process::Command}; + +fn git(dir: &Path, args: &[&str]) { + let status = Command::new("git") + .args(args) + .current_dir(dir) + .env("GIT_AUTHOR_NAME", "test") + .env("GIT_AUTHOR_EMAIL", "test@test") + .env("GIT_COMMITTER_NAME", "test") + .env("GIT_COMMITTER_EMAIL", "test@test") + .status() + .unwrap(); + assert!(status.success(), "git {args:?} failed"); +} + +#[test] +#[ignore] // Slow: builds fixture crate from scratch in worktrees +fn compare_detects_regression() { + let tmp = tempfile::tempdir().unwrap(); + let dir = tmp.path(); + let cli_dir = Path::new(env!("CARGO_MANIFEST_DIR")); + let patchbay_crate = cli_dir.parent().unwrap().join("patchbay"); + let fixture_dir = cli_dir.join("tests/fixtures/counter"); + + // Copy fixture files into temp dir + std::fs::create_dir_all(dir.join("tests")).unwrap(); + std::fs::copy( + fixture_dir.join("tests/counter.rs"), + dir.join("tests/counter.rs"), + ) + .unwrap(); + + // Copy Cargo.toml and replace the relative patchbay path with absolute + let cargo_toml = std::fs::read_to_string(fixture_dir.join("Cargo.toml")).unwrap(); + let cargo_toml = cargo_toml.replace( + "path = \"../../../../patchbay\"", + &format!("path = \"{}\"", patchbay_crate.display()), + ); + std::fs::write(dir.join("Cargo.toml"), cargo_toml).unwrap(); + + // Commit 1: passing (PACKET_COUNT = 5) + git(dir, &["init"]); + git(dir, &["add", "."]); + git(dir, &["commit", "-m", "passing"]); + git(dir, &["tag", "v1"]); + + // Commit 2: regressing (PACKET_COUNT = 2, below THRESHOLD = 3) + let src = std::fs::read_to_string(dir.join("tests/counter.rs")).unwrap(); + let regressed = src.replace( + "const PACKET_COUNT: u32 = 5;", + "const PACKET_COUNT: u32 = 2;", + ); + std::fs::write(dir.join("tests/counter.rs"), regressed).unwrap(); + git(dir, &["add", "."]); + git(dir, &["commit", "-m", "regressing"]); + git(dir, &["tag", "v2"]); + + // Run compare + let patchbay_bin = env!("CARGO_BIN_EXE_patchbay"); + let output = Command::new(patchbay_bin) + .args(["-v", "compare", "test", "v1", "v2"]) + .current_dir(dir) + .output() + .unwrap(); + + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + eprintln!("stdout:\n{stdout}"); + eprintln!("stderr:\n{stderr}"); + + // Compare should detect the regression and exit non-zero + assert!( + !output.status.success(), + "expected non-zero exit due to regression" + ); + + // stdout should contain the summary output + assert!(stdout.contains("Compare:"), "missing Compare header"); + assert!(stdout.contains("Score:"), "missing Score line"); + + // Find the two persisted run directories in .patchbay/work/ + let work = dir.join(".patchbay/work"); + assert!(work.exists(), ".patchbay/work dir not created"); + + let run_dirs: Vec<_> = std::fs::read_dir(&work) + .unwrap() + .filter_map(|e| e.ok()) + .filter(|e| e.file_name().to_string_lossy().starts_with("run-")) + .collect(); + assert_eq!( + run_dirs.len(), + 2, + "expected 2 run directories, found {}", + run_dirs.len() + ); + + // Parse run.json from each directory + let mut manifests: Vec = run_dirs + .iter() + .map(|d| { + let run_json = d.path().join("run.json"); + assert!( + run_json.exists(), + "run.json not found in {}", + d.path().display() + ); + serde_json::from_str(&std::fs::read_to_string(&run_json).unwrap()).unwrap() + }) + .collect(); + + // Both should have kind: "test" + for m in &manifests { + assert_eq!(m["kind"], "test", "run.json should have kind 'test'"); + assert!( + !m["dirty"].as_bool().unwrap_or(true), + "run should not be dirty" + ); + assert!(m["commit"].is_string(), "run.json should have a commit SHA"); + } + + // Resolve expected SHAs for v1 and v2 + let v1_sha = { + let out = Command::new("git") + .args(["rev-parse", "v1"]) + .current_dir(dir) + .output() + .unwrap(); + String::from_utf8(out.stdout).unwrap().trim().to_string() + }; + let v2_sha = { + let out = Command::new("git") + .args(["rev-parse", "v2"]) + .current_dir(dir) + .output() + .unwrap(); + String::from_utf8(out.stdout).unwrap().trim().to_string() + }; + + // Sort manifests so left (v1) comes first + manifests.sort_by_key(|m| m["commit"].as_str().unwrap() == v2_sha); + let left_manifest = &manifests[0]; + let right_manifest = &manifests[1]; + + assert_eq!( + left_manifest["commit"].as_str().unwrap(), + v1_sha, + "left run should match v1 SHA" + ); + assert_eq!( + right_manifest["commit"].as_str().unwrap(), + v2_sha, + "right run should match v2 SHA" + ); + + // Left side: both tests pass (PACKET_COUNT=5 >= THRESHOLD=3) + assert_eq!( + left_manifest["pass"].as_u64().unwrap(), + 2, + "left should have 2 passes" + ); + assert_eq!( + left_manifest["fail"].as_u64().unwrap(), + 0, + "left should have 0 failures" + ); + assert_eq!(left_manifest["total"].as_u64().unwrap(), 2); + + // Right side: udp_threshold fails (PACKET_COUNT=2 < THRESHOLD=3) + assert_eq!( + right_manifest["pass"].as_u64().unwrap(), + 1, + "right should have 1 pass" + ); + assert_eq!( + right_manifest["fail"].as_u64().unwrap(), + 1, + "right should have 1 failure" + ); + assert_eq!(right_manifest["total"].as_u64().unwrap(), 2); + + // Per-test results + let left_tests = left_manifest["tests"].as_array().unwrap(); + let right_tests = right_manifest["tests"].as_array().unwrap(); + assert_eq!(left_tests.len(), 2, "should have 2 left test results"); + assert_eq!(right_tests.len(), 2, "should have 2 right test results"); + + // Find the threshold test in right results — it should fail + let threshold_right = right_tests + .iter() + .find(|r| r["name"].as_str().unwrap().contains("udp_threshold")) + .expect("udp_threshold test not found in right results"); + assert_eq!(threshold_right["status"], "fail"); + + // Find the threshold test in left results — it should pass + let threshold_left = left_tests + .iter() + .find(|r| r["name"].as_str().unwrap().contains("udp_threshold")) + .expect("udp_threshold test not found in left results"); + assert_eq!(threshold_left["status"], "pass"); + + // Worktrees should be cleaned up (no changes = removed) + let tree_dir = dir.join(".patchbay/tree"); + if tree_dir.exists() { + let remaining: Vec<_> = std::fs::read_dir(&tree_dir) + .unwrap() + .filter_map(|e| e.ok()) + .collect(); + assert!( + remaining.is_empty(), + "worktrees should be cleaned up, found: {remaining:?}" + ); + } + + // Validate metrics.jsonl from testdir output. + // The fixture uses testdir!() so Lab output goes to + // /testdir-current//device.sender.metrics.jsonl + // Use cargo metadata to find the target dir in the temp repo. + let testdir_current = { + let meta_out = Command::new("cargo") + .args(["metadata", "--format-version=1", "--no-deps"]) + .current_dir(dir) + .output() + .unwrap(); + let meta: serde_json::Value = serde_json::from_slice(&meta_out.stdout).unwrap_or_default(); + let target = meta["target_directory"] + .as_str() + .map(|s| Path::new(s).join("testdir-current")); + target.unwrap_or_else(|| dir.join("target/testdir-current")) + }; + if testdir_current.exists() { + let metrics_files: Vec<_> = walkdir(&testdir_current) + .into_iter() + .filter(|p| { + p.file_name() + .is_some_and(|f| f.to_string_lossy().ends_with(".metrics.jsonl")) + }) + .collect(); + assert!( + !metrics_files.is_empty(), + "expected metrics.jsonl files in {}, found none", + testdir_current.display() + ); + + // At least one metrics file should contain packet_count + let mut found_packet_count = false; + for path in &metrics_files { + let content = std::fs::read_to_string(path).unwrap(); + for line in content.lines() { + if let Ok(val) = serde_json::from_str::(line) { + if let Some(m) = val.get("m").and_then(|m| m.as_object()) { + if let Some(count) = m.get("packet_count").and_then(|v| v.as_f64()) { + found_packet_count = true; + assert!( + count == 5.0 || count == 2.0, + "unexpected packet_count value: {count}" + ); + } + } + } + } + } + assert!( + found_packet_count, + "no packet_count metric found in metrics files" + ); + } +} + +/// Recursively collect all file paths under a directory. +fn walkdir(dir: &Path) -> Vec { + let mut files = Vec::new(); + if let Ok(entries) = std::fs::read_dir(dir) { + for entry in entries.flatten() { + let path = entry.path(); + if path.is_dir() { + files.extend(walkdir(&path)); + } else { + files.push(path); + } + } + } + files +} diff --git a/patchbay-cli/tests/fixtures/counter/Cargo.lock b/patchbay-cli/tests/fixtures/counter/Cargo.lock new file mode 100644 index 0000000..969f6f0 --- /dev/null +++ b/patchbay-cli/tests/fixtures/counter/Cargo.lock @@ -0,0 +1,1006 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "aho-corasick" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" +dependencies = [ + "memchr", +] + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "bumpalo" +version = "3.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cc" +version = "1.2.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a0dd1ca384932ff3641c8718a02769f1698e7563dc6974ffd03346116310423" +dependencies = [ + "find-msvc-tools", + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chrono" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0" +dependencies = [ + "iana-time-zone", + "num-traits", + "serde", + "windows-link", +] + +[[package]] +name = "convert_case" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "counter-fixture" +version = "0.0.0" +dependencies = [ + "anyhow", + "patchbay", + "tokio", +] + +[[package]] +name = "derive_more" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn", + "unicode-xid", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "find-msvc-tools" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" + +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "iana-time-zone" +version = "0.1.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "ipnet" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d98f6fed1fde3f8c21bc40a1abb88dd75e67924f9cffc3ef95607bad8017f8e2" +dependencies = [ + "serde", +] + +[[package]] +name = "itoa" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682" + +[[package]] +name = "js-sys" +version = "0.3.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49715b7073f385ba4bc528e5747d02e66cb39c6146efb66b781f131f0fb399c" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.183" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d" + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "matchers" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "mio" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +dependencies = [ + "libc", + "wasi", + "windows-sys", +] + +[[package]] +name = "netlink-packet-core" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3463cbb78394cb0141e2c926b93fc2197e473394b761986eca3b9da2c63ae0f4" +dependencies = [ + "paste", +] + +[[package]] +name = "netlink-packet-route" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ce3636fa715e988114552619582b530481fd5ef176a1e5c1bf024077c2c9445" +dependencies = [ + "bitflags", + "libc", + "log", + "netlink-packet-core", +] + +[[package]] +name = "netlink-proto" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b65d130ee111430e47eed7896ea43ca693c387f097dd97376bffafbf25812128" +dependencies = [ + "bytes", + "futures", + "log", + "netlink-packet-core", + "netlink-sys", + "thiserror 2.0.18", +] + +[[package]] +name = "netlink-sys" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd6c30ed10fa69cc491d491b85cc971f6bdeb8e7367b7cde2ee6cc878d583fae" +dependencies = [ + "bytes", + "futures-util", + "libc", + "log", + "tokio", +] + +[[package]] +name = "nix" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" +dependencies = [ + "bitflags", + "cfg-if", + "cfg_aliases", + "libc", +] + +[[package]] +name = "nu-ansi-term" +version = "0.50.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.21.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50" + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "patchbay" +version = "0.1.0" +dependencies = [ + "anyhow", + "chrono", + "derive_more", + "futures", + "ipnet", + "libc", + "nix", + "rtnetlink", + "serde", + "serde_json", + "strum", + "tokio", + "tokio-util", + "toml", + "tracing", + "tracing-core", + "tracing-subscriber", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "regex-automata" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" + +[[package]] +name = "rtnetlink" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b960d5d873a75b5be9761b1e73b146f52dddcd27bac75263f40fba686d4d7b5" +dependencies = [ + "futures-channel", + "futures-util", + "log", + "netlink-packet-core", + "netlink-packet-route", + "netlink-proto", + "netlink-sys", + "nix", + "thiserror 1.0.69", + "tokio", +] + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "serde_spanned" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "876ac351060d4f882bb1032b6369eb0aef79ad9df1ea8bc404874d8cc3d0cd98" +dependencies = [ + "serde_core", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "socket2" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "strum" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9628de9b8791db39ceda2b119bbe13134770b56c138ec1d3af810d045c04f9bd" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab85eea0270ee17587ed4156089e10b9e6880ee688791d45a905f5b1ca36f664" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" +dependencies = [ + "thiserror-impl 2.0.18", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "tokio" +version = "1.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d" +dependencies = [ + "bytes", + "libc", + "mio", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys", +] + +[[package]] +name = "tokio-macros" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-util" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "1.1.0+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8195ca05e4eb728f4ba94f3e3291661320af739c4e43779cbdfae82ab239fcc" +dependencies = [ + "indexmap", + "serde_core", + "serde_spanned", + "toml_datetime", + "toml_parser", + "toml_writer", + "winnow", +] + +[[package]] +name = "toml_datetime" +version = "1.1.0+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97251a7c317e03ad83774a8752a7e81fb6067740609f75ea2b585b569a59198f" +dependencies = [ + "serde_core", +] + +[[package]] +name = "toml_parser" +version = "1.1.0+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2334f11ee363607eb04df9b8fc8a13ca1715a72ba8662a26ac285c98aabb4011" +dependencies = [ + "winnow", +] + +[[package]] +name = "toml_writer" +version = "1.1.0+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d282ade6016312faf3e41e57ebbba0c073e4056dab1232ab1cb624199648f8ed" + +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7f578e5945fb242538965c2d0b04418d38ec25c79d160cd279bf0731c8d319" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex-automata", + "sharded-slab", + "thread_local", + "tracing", + "tracing-core", +] + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-segmentation" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da36089a805484bcccfffe0739803392c8298778a2d2f09febf76fac5ad9025b" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6532f9a5c1ece3798cb1c2cfdba640b9b3ba884f5db45973a6f442510a87d38e" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18a2d50fcf105fb33bb15f00e7a77b772945a2ee45dcf454961fd843e74c18e6" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03ce4caeaac547cdf713d280eda22a730824dd11e6b8c3ca9e42247b25c631e3" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75a326b8c223ee17883a4251907455a2431acc2791c98c26279376490c378c16" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "windows-core" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.59.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "winnow" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a90e88e4667264a994d34e6d1ab2d26d398dcdca8b7f52bec8668957517fc7d8" + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/patchbay-cli/tests/fixtures/counter/Cargo.toml b/patchbay-cli/tests/fixtures/counter/Cargo.toml new file mode 100644 index 0000000..0bd5710 --- /dev/null +++ b/patchbay-cli/tests/fixtures/counter/Cargo.toml @@ -0,0 +1,14 @@ +[workspace] + +[package] +name = "counter-fixture" +version = "0.0.0" +edition = "2021" +publish = false + +[dev-dependencies] +patchbay = { path = "../../../../patchbay" } +ctor = "0.6" +testdir = "0.9" +tokio = { version = "1", features = ["rt", "macros", "net", "time"] } +anyhow = "1" diff --git a/patchbay-cli/tests/fixtures/counter/tests/counter.rs b/patchbay-cli/tests/fixtures/counter/tests/counter.rs new file mode 100644 index 0000000..e36ba0d --- /dev/null +++ b/patchbay-cli/tests/fixtures/counter/tests/counter.rs @@ -0,0 +1,89 @@ +//! Fixture test: sends UDP packets between two patchbay devices. +//! PACKET_COUNT and THRESHOLD are compile-time constants that the +//! integration test modifies between commits to create regressions. + +const PACKET_COUNT: u32 = 5; +const THRESHOLD: u32 = 3; + +#[cfg(target_os = "linux")] +#[ctor::ctor] +fn init() { + patchbay::init_userns().expect("init_userns"); +} + +#[tokio::test(flavor = "current_thread")] +async fn udp_counter() -> anyhow::Result<()> { + let outdir = testdir::testdir!(); + let lab = patchbay::Lab::with_opts( + patchbay::LabOpts::default() + .outdir(patchbay::OutDir::Nested(outdir)) + .label("udp-counter"), + ) + .await?; + let dc = lab.add_router("dc").build().await?; + let sender = lab + .add_device("sender") + .iface("eth0", dc.id(), None) + .build() + .await?; + let receiver = lab + .add_device("receiver") + .iface("eth0", dc.id(), None) + .build() + .await?; + + let recv_ip = receiver.ip().unwrap(); + let port: u16 = 9999; + + // Spawn UDP listener in the receiver's namespace. + let rx_handle = receiver.spawn(move |_dev| async move { + let sock = tokio::net::UdpSocket::bind(format!("{recv_ip}:{port}")).await?; + let mut count = 0u32; + let mut buf = [0u8; 64]; + for _ in 0..PACKET_COUNT { + let _ = tokio::time::timeout( + std::time::Duration::from_secs(5), + sock.recv_from(&mut buf), + ) + .await??; + count += 1; + } + Ok::<_, anyhow::Error>(count) + })?; + + // Give the listener a moment to bind. + tokio::time::sleep(std::time::Duration::from_millis(50)).await; + + // Send packets from the sender's namespace. + let send_ip = sender.ip().unwrap(); + let tx_handle = sender.spawn(move |_dev| async move { + let sock = tokio::net::UdpSocket::bind(format!("{send_ip}:0")).await?; + for i in 0..PACKET_COUNT { + sock.send_to( + format!("pkt-{i}").as_bytes(), + format!("{recv_ip}:{port}"), + ) + .await?; + tokio::time::sleep(std::time::Duration::from_millis(10)).await; + } + Ok::<_, anyhow::Error>(()) + })?; + + tx_handle.await??; + let received = rx_handle.await??; + + sender.record("packet_count", PACKET_COUNT as f64); + assert_eq!(received, PACKET_COUNT); + Ok(()) +} + +#[tokio::test(flavor = "current_thread")] +async fn udp_threshold() -> anyhow::Result<()> { + assert!( + PACKET_COUNT >= THRESHOLD, + "packet count {} below threshold {}", + PACKET_COUNT, + THRESHOLD + ); + Ok(()) +} diff --git a/patchbay-runner/Cargo.toml b/patchbay-runner/Cargo.toml index c321340..e61addd 100644 --- a/patchbay-runner/Cargo.toml +++ b/patchbay-runner/Cargo.toml @@ -8,20 +8,14 @@ license.workspace = true authors.workspace = true repository.workspace = true -[[bin]] -name = "patchbay" -path = "src/main.rs" - [dependencies] anyhow = "1" chrono = { version = "0.4", default-features = false, features = ["clock"] } clap = { version = "4", features = ["derive"] } comfy-table = "7" -ctor = "0.6" flate2 = "1" -patchbay = { path = "../patchbay" } -patchbay-server = { path = "../patchbay-server", optional = true } -patchbay-utils = { path = "../patchbay-utils" } +patchbay = { workspace = true } +patchbay-utils = { workspace = true } nix = { version = "0.30", features = ["signal", "process"] } rcgen = "0.14" regex = "1" @@ -33,10 +27,6 @@ toml = "1.0" tracing = "0.1" tracing-subscriber = { version = "0.3", features = ["env-filter"] } -[features] -default = ["serve"] -serve = ["dep:patchbay-server"] - [dev-dependencies] n0-tracing-test = "0.3.0" serial_test = "3" diff --git a/patchbay-runner/src/lib.rs b/patchbay-runner/src/lib.rs index afc044a..fb0b855 100644 --- a/patchbay-runner/src/lib.rs +++ b/patchbay-runner/src/lib.rs @@ -8,8 +8,7 @@ pub use patchbay::{ }; pub use patchbay_utils::assets::BinaryOverride; -mod init; -pub(crate) mod sim; +pub mod sim; use std::{path::PathBuf, time::Duration}; diff --git a/patchbay-runner/src/sim/progress.rs b/patchbay-runner/src/sim/progress.rs index 1b52a89..94573f9 100644 --- a/patchbay-runner/src/sim/progress.rs +++ b/patchbay-runner/src/sim/progress.rs @@ -24,8 +24,12 @@ pub(crate) struct ManifestSimSummary { pub(crate) error: Option, } +/// Per-sim execution report written to `manifest.json`. +/// +/// Distinct from [`patchbay_utils::manifest::RunManifest`] which is the +/// unified `run.json` metadata (git context, outcome, etc.). #[derive(Debug, Clone, Serialize)] -pub(crate) struct RunManifest { +pub(crate) struct SimRunReport { pub(crate) run: String, pub(crate) started_at: String, pub(crate) status: String, @@ -70,8 +74,8 @@ pub(crate) fn now_stamp() -> String { chrono::Utc::now().format("%y%m%d-%H%M%S").to_string() } -pub(crate) async fn write_run_manifest(run_root: &Path, manifest: &RunManifest) -> Result<()> { - write_json(run_root.join("manifest.json"), manifest).await +pub(crate) async fn write_sim_report(run_root: &Path, report: &SimRunReport) -> Result<()> { + write_json(run_root.join("manifest.json"), report).await } pub(crate) async fn write_progress(run_root: &Path, progress: &RunProgress) -> Result<()> { diff --git a/patchbay-runner/src/sim/report.rs b/patchbay-runner/src/sim/report.rs index 4422462..19ef73c 100644 --- a/patchbay-runner/src/sim/report.rs +++ b/patchbay-runner/src/sim/report.rs @@ -278,7 +278,7 @@ pub async fn write_combined_results_for_runs(work_root: &Path, run_names: &[Stri Ok(()) } -/// Print a concise per-sim summary for one invocation run. +/// Print a concise per-sim summary for a group of runs. pub fn print_run_summary_table_for_runs(work_root: &Path, run_names: &[String]) -> Result<()> { let runs = load_runs(work_root, run_names)?; if runs.is_empty() { diff --git a/patchbay-runner/src/sim/runner.rs b/patchbay-runner/src/sim/runner.rs index 56497e0..84d93d4 100644 --- a/patchbay-runner/src/sim/runner.rs +++ b/patchbay-runner/src/sim/runner.rs @@ -20,7 +20,7 @@ use crate::sim::{ env::SimEnv, progress::{ collect_run_environment, format_timestamp, now_stamp, write_json, write_progress, - write_run_manifest, ManifestSimSummary, ProgressSim, RunManifest, RunProgress, + write_sim_report, ManifestSimSummary, ProgressSim, RunProgress, SimRunReport, }, report::{ print_run_summary_table_for_runs, write_combined_results_for_runs, write_results, @@ -201,8 +201,8 @@ pub async fn run_sims( }; write_progress(&run_root, &progress).await?; let initial_manifest = - build_run_manifest(&run_root, run_start, None, None, None, &progress, &[])?; - write_run_manifest(&run_root, &initial_manifest).await?; + build_sim_report(&run_root, run_start, None, None, None, &progress, &[])?; + write_sim_report(&run_root, &initial_manifest).await?; let mut sim_dir_names = Vec::new(); let mut outcomes = Vec::new(); @@ -254,8 +254,8 @@ pub async fn run_sims( .await .context("write incremental combined results")?; let running_manifest = - build_run_manifest(&run_root, run_start, None, None, None, &progress, &outcomes)?; - write_run_manifest(&run_root, &running_manifest).await?; + build_sim_report(&run_root, run_start, None, None, None, &progress, &outcomes)?; + write_sim_report(&run_root, &running_manifest).await?; } write_combined_results_for_runs(&run_root, &sim_dir_names) .await @@ -266,7 +266,7 @@ pub async fn run_sims( progress.status = "done".to_string(); progress.updated_at = format_timestamp(run_end); write_progress(&run_root, &progress).await?; - let run_manifest = build_run_manifest( + let run_manifest = build_sim_report( &run_root, run_start, Some(run_end), @@ -275,7 +275,7 @@ pub async fn run_sims( &progress, &outcomes, )?; - write_run_manifest(&run_root, &run_manifest).await?; + write_sim_report(&run_root, &run_manifest).await?; let failed: Vec<&SimRunOutcome> = outcomes.iter().filter(|outcome| !outcome.success).collect(); if !failed.is_empty() { let mut msg = String::from("one or more simulations failed:"); @@ -742,7 +742,7 @@ fn prepare_run_root(work_root: &Path) -> Result { std::fs::create_dir_all(work_root) .with_context(|| format!("create work root {}", work_root.display()))?; let stamp = now_stamp(); - let run_base = format!("sim-{}", stamp); + let run_base = format!("run-{}", stamp); let run_dir = create_unique_dir(work_root, &run_base)?; let run_name = run_dir .file_name() @@ -962,7 +962,7 @@ async fn write_sim_summary(run_work_dir: &Path, summary: &SimSummary) -> Result< write_json(run_work_dir.join("sim.json"), summary).await } -fn build_run_manifest( +fn build_sim_report( run_root: &Path, started_at: SystemTime, ended_at: Option, @@ -970,7 +970,7 @@ fn build_run_manifest( success: Option, progress: &RunProgress, outcomes: &[SimRunOutcome], -) -> Result { +) -> Result { let run = run_root .file_name() .and_then(|s| s.to_str()) @@ -1006,7 +1006,7 @@ fn build_run_manifest( } }) .collect(); - Ok(RunManifest { + Ok(SimRunReport { run, started_at: format_timestamp(started_at), status: progress.status.clone(), diff --git a/patchbay-runner/src/sim/steps.rs b/patchbay-runner/src/sim/steps.rs index 55a1962..02dc887 100644 --- a/patchbay-runner/src/sim/steps.rs +++ b/patchbay-runner/src/sim/steps.rs @@ -851,7 +851,7 @@ pub(crate) fn join_pump(handle: thread::JoinHandle>, label: &str) -> } } -pub(crate) fn parse_duration(s: &str) -> Result { +pub fn parse_duration(s: &str) -> Result { if let Some(n) = s.strip_suffix("ms") { return Ok(Duration::from_millis( n.trim().parse().context("parse milliseconds")?, diff --git a/patchbay-server/Cargo.toml b/patchbay-server/Cargo.toml index cafcc03..e09d6fb 100644 --- a/patchbay-server/Cargo.toml +++ b/patchbay-server/Cargo.toml @@ -29,5 +29,11 @@ tracing = "0.1" tracing-subscriber = { version = "0.3", features = ["env-filter"] } uuid = { version = "1", features = ["v4"] } chrono = "0.4" +patchbay-utils = { path = "../patchbay-utils" } axum-server = "0.7" rustls = "0.23" + +[package.metadata.binstall] +pkg-url = "{ repo }/releases/download/rolling/patchbay-serve-{ target }.tar.gz" +bin-dir = "{ bin }-{ target }{ binary-ext }" +pkg-fmt = "tgz" diff --git a/patchbay-server/README.md b/patchbay-server/README.md index 2b9b036..10f1337 100644 --- a/patchbay-server/README.md +++ b/patchbay-server/README.md @@ -50,10 +50,10 @@ Body: tar.gz of the run directory Returns: ```json -{"ok": true, "project": "myproject", "run": "myproject-20260320_120000-uuid", "invocation": "myproject-20260320_120000-uuid"} +{"ok": true, "project": "myproject", "run": "myproject-20260320_120000-uuid", "group": "myproject-20260320_120000-uuid"} ``` -The `invocation` value is used for deep linking: `https://your-server/#/inv/{invocation}` +The `group` value is used for deep linking: `https://your-server/batch/{group}` ## Flags diff --git a/patchbay-server/github-workflow-template.yml b/patchbay-server/github-workflow-template.yml index a7e6f84..c4a0aeb 100644 --- a/patchbay-server/github-workflow-template.yml +++ b/patchbay-server/github-workflow-template.yml @@ -29,90 +29,75 @@ jobs: # ── Build tools — adjust to your project ── - uses: dtolnay/rust-toolchain@stable + # ── Install patchbay CLI ── + # Install pre-built binary via binstall (fast), or build from source. + - name: Install patchbay CLI + run: | + curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash + cargo binstall patchbay-cli --git-url https://github.com/n0-computer/patchbay --no-confirm \ + || cargo install patchbay-cli --git https://github.com/n0-computer/patchbay + # ── Run tests — replace with your own command ── + # Use `patchbay test` which writes structured output to .patchbay/work/. + # --persist keeps the run directory after tests complete. - name: Run tests id: tests - run: cargo test --release -p my-crate --test patchbay -- --test-threads=1 + run: patchbay test --persist -p my-crate --test my-test env: RUST_LOG: ${{ runner.debug && 'TRACE' || 'DEBUG' }} - # ── Push results to patchbay-serve ── - - name: Push results + # ── Upload results to patchbay-serve ── + - name: Upload results + id: upload if: always() env: PATCHBAY_URL: ${{ secrets.PATCHBAY_URL }} PATCHBAY_API_KEY: ${{ secrets.PATCHBAY_API_KEY }} - TEST_STATUS: ${{ steps.tests.outcome }} run: | set -euo pipefail PROJECT="${{ github.event.repository.name }}" - TESTDIR="$(cargo metadata --format-version=1 --no-deps | jq -r .target_directory)/testdir-current" - [ ! -d "$TESTDIR" ] && echo "No testdir output, skipping" && exit 0 - cat > "$TESTDIR/run.json" </dev/null | head -1) + if [ -z "$RUN_DIR" ]; then + echo "No run directory found, skipping upload" + exit 0 + fi - RESPONSE=$(tar -czf - -C "$TESTDIR" . | \ - curl -s -w "\n%{http_code}" -X POST \ - -H "Authorization: Bearer $PATCHBAY_API_KEY" \ - -H "Content-Type: application/gzip" \ - --data-binary @- "$PATCHBAY_URL/api/push/$PROJECT") - HTTP_CODE=$(echo "$RESPONSE" | tail -1) - BODY=$(echo "$RESPONSE" | head -n -1) - [ "$HTTP_CODE" != "200" ] && echo "Push failed ($HTTP_CODE): $BODY" && exit 1 - - INVOCATION=$(echo "$BODY" | jq -r .invocation) - echo "PATCHBAY_VIEW_URL=$PATCHBAY_URL/#/inv/$INVOCATION" >> "$GITHUB_ENV" - echo "PATCHBAY_TEST_STATUS=$TEST_STATUS" >> "$GITHUB_ENV" - echo "Results: $PATCHBAY_URL/#/inv/$INVOCATION" + VIEW_URL=$(patchbay upload "$RUN_DIR" \ + --project "$PROJECT" \ + --url "$PATCHBAY_URL" \ + --api-key "$PATCHBAY_API_KEY") + echo "view_url=$VIEW_URL" >> "$GITHUB_OUTPUT" # ── Post or update PR comment ── - name: Comment on PR - if: always() && env.PATCHBAY_VIEW_URL + if: always() && github.event.pull_request uses: actions/github-script@v7 with: script: | - let prNumber = context.issue?.number; - if (!prNumber) { - const { data: prs } = await github.rest.pulls.list({ - owner: context.repo.owner, repo: context.repo.repo, - head: `${context.repo.owner}:${{ github.ref_name }}`, - state: 'open', - }); - if (!prs.length) return; - prNumber = prs[0].number; - } - - const status = process.env.PATCHBAY_TEST_STATUS; + const marker = ''; + const status = '${{ steps.tests.outcome }}'; const icon = status === 'success' ? '✅' : '❌'; const sha = '${{ github.sha }}'; const shortSha = sha.slice(0, 7); const commitUrl = `${{ github.server_url }}/${{ github.repository }}/commit/${sha}`; const date = new Date().toISOString().replace('T', ' ').slice(0, 19) + ' UTC'; - const marker = ''; + const viewUrl = '${{ steps.upload.outputs.view_url }}'; const body = [ marker, - `${icon} **patchbay:** ${status} | ${process.env.PATCHBAY_VIEW_URL}`, + `${icon} **patchbay:** ${status} | [view results](${viewUrl})`, `${date} · [\`${shortSha}\`](${commitUrl})`, ].join('\n'); const { data: comments } = await github.rest.issues.listComments({ - owner: context.repo.owner, repo: context.repo.repo, issue_number: prNumber, + owner: context.repo.owner, repo: context.repo.repo, + issue_number: context.issue.number, }); const existing = comments.find(c => c.body.includes(marker)); const params = { owner: context.repo.owner, repo: context.repo.repo }; if (existing) { await github.rest.issues.updateComment({ ...params, comment_id: existing.id, body }); } else { - await github.rest.issues.createComment({ ...params, issue_number: prNumber, body }); + await github.rest.issues.createComment({ ...params, issue_number: context.issue.number, body }); } diff --git a/patchbay-server/src/lib.rs b/patchbay-server/src/lib.rs index d032d6a..41702b1 100644 --- a/patchbay-server/src/lib.rs +++ b/patchbay-server/src/lib.rs @@ -39,6 +39,7 @@ const STATE_JSON: &str = "state.json"; /// Per-node full tracing log suffix. const TRACING_JSONL_EXT: &str = "tracing.jsonl"; +const METRICS_JSONL_EXT: &str = "metrics.jsonl"; /// Default bind address for the devtools server. pub const DEFAULT_UI_BIND: &str = "127.0.0.1:7421"; @@ -56,8 +57,8 @@ const RUN_SCAN_INTERVAL: Duration = Duration::from_secs(2); /// Metadata for a single Lab run directory. /// -/// A directory is a run if it contains `events.jsonl`. -#[derive(Debug, Clone, Serialize)] +/// A directory is a run if it contains `events.jsonl` or `run.json`. +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct RunInfo { /// Directory name (e.g. `"20260303_143001-my-lab"`). pub name: String, @@ -71,9 +72,10 @@ pub struct RunInfo { /// This is the per-sim lab state, not the CI test outcome — see /// [`RunManifest::test_outcome`] for the overall pass/fail from CI. pub status: Option, - /// Invocation group (first path component for nested runs, `None` for flat/direct). - pub invocation: Option, - /// CI manifest from `run.json` in the invocation directory, if present. + /// Group (first path component for nested runs, `None` for flat/direct). + #[serde(alias = "batch")] + pub group: Option, + /// CI manifest from `run.json` in the group directory, if present. #[serde(skip_serializing_if = "Option::is_none")] pub manifest: Option, } @@ -88,7 +90,7 @@ const MAX_SCAN_DEPTH: usize = 3; /// that contain `events.jsonl`. pub fn discover_runs(base: &Path) -> anyhow::Result> { // If the base dir itself is a run, serve only that. - if base.join(EVENTS_JSONL).exists() { + if base.join(EVENTS_JSONL).exists() || base.join(RUN_JSON).exists() { let name = base .file_name() .map(|n| n.to_string_lossy().into_owned()) @@ -99,7 +101,7 @@ pub fn discover_runs(base: &Path) -> anyhow::Result> { path: base.to_path_buf(), label, status, - invocation: None, + group: None, manifest: read_run_json(base), }]); } @@ -107,14 +109,14 @@ pub fn discover_runs(base: &Path) -> anyhow::Result> { let mut runs = Vec::new(); scan_runs_recursive(base, base, 1, &mut runs)?; - // Attach run.json manifests from invocation directories. + // Attach run.json manifests from group directories. let mut manifest_cache: std::collections::HashMap> = std::collections::HashMap::new(); for run in &mut runs { - let inv = run.invocation.clone().unwrap_or_else(|| run.name.clone()); + let group_key = run.group.clone().unwrap_or_else(|| run.name.clone()); let manifest = manifest_cache - .entry(inv.clone()) - .or_insert_with(|| read_run_json(&base.join(&inv))) + .entry(group_key.clone()) + .or_insert_with(|| read_run_json(&base.join(&group_key))) .clone(); run.manifest = manifest; } @@ -146,18 +148,18 @@ fn scan_runs_recursive( if !path.is_dir() { continue; } - if path.join(EVENTS_JSONL).exists() { - // Use the relative path from root as the run name so nested - // runs are addressable via the API (e.g. "sim-20260305/ping-e2e"). + let has_events = path.join(EVENTS_JSONL).exists(); + let has_run_json = path.join(RUN_JSON).exists(); + + if has_events { + // Leaf run: has events.jsonl → it's an actual lab output dir. let name = path .strip_prefix(root) .unwrap_or(&path) .to_string_lossy() .into_owned(); let (label, status) = read_run_metadata(&path); - // Derive invocation from the first path component (the timestamped - // directory) when the run is nested more than one level deep. - let invocation = name + let group = name .split('/') .next() .filter(|first| *first != name) @@ -167,9 +169,13 @@ fn scan_runs_recursive( path, label, status, - invocation, + group, manifest: None, // populated after scan }); + } else if has_run_json { + // Group directory: has run.json but no events.jsonl. + // Recurse to find child runs, they inherit this manifest. + scan_runs_recursive(root, &path, depth + 1, runs)?; } else { scan_runs_recursive(root, &path, depth + 1, runs)?; } @@ -232,17 +238,28 @@ fn build_router(state: AppState) -> Router { let mut r = Router::new() .route("/", get(index_html)) .route("/runs", get(index_html)) + // SPA fallback: serve index.html for client-side routes. + .route("/run/{*rest}", get(index_html)) + .route("/group/{*rest}", get(index_html)) + .route("/compare/{*rest}", get(index_html)) + .route("/inv/{*rest}", get(index_html)) .route("/api/runs", get(get_runs)) .route("/api/runs/subscribe", get(runs_sse)) + .route("/api/runs/{run}/manifest", get(get_run_manifest)) .route("/api/runs/{run}/state", get(get_run_state)) .route("/api/runs/{run}/events", get(run_events_sse)) .route("/api/runs/{run}/events.json", get(run_events_json)) .route("/api/runs/{run}/logs", get(get_run_logs)) .route("/api/runs/{run}/logs/{*path}", get(get_run_log_file)) .route("/api/runs/{run}/files/{*path}", get(get_run_file)) + .route( + "/api/groups/{name}/combined-results", + get(get_group_combined), + ) + // Legacy alias — keep for backward-compat (links shared on Discord). .route( "/api/invocations/{name}/combined-results", - get(get_invocation_combined), + get(get_group_combined), ); if state.push.is_some() { r = r.route("/api/push/{project}", post(push_run)); @@ -343,8 +360,55 @@ async fn index_html() -> Html<&'static str> { Html(include_str!("../../ui/dist/index.html")) } -async fn get_runs(State(state): State) -> impl IntoResponse { - let runs = discover_runs(&state.base).unwrap_or_default(); +#[derive(Deserialize)] +struct RunsQuery { + project: Option, + kind: Option, + limit: Option, + offset: Option, +} + +async fn get_runs( + Query(params): Query, + State(state): State, +) -> impl IntoResponse { + let mut runs = discover_runs(&state.base).unwrap_or_default(); + + // Filter by project (matched against manifest.project). + if let Some(ref project) = params.project { + runs.retain(|r| { + r.manifest + .as_ref() + .and_then(|m| m.project.as_deref()) + .map(|p| p == project) + .unwrap_or(false) + }); + } + + // Filter by kind (matched against manifest.kind, e.g. "test" or "sim"). + if let Some(ref kind) = params.kind { + runs.retain(|r| { + r.manifest + .as_ref() + .map(|m| { + let k = serde_json::to_value(m.kind) + .ok() + .and_then(|v| v.as_str().map(String::from)); + k.as_deref() == Some(kind.as_str()) + }) + .unwrap_or(false) + }); + } + + // Pagination. + let offset = params.offset.unwrap_or(0); + if offset > 0 { + runs = runs.into_iter().skip(offset).collect(); + } + if let Some(limit) = params.limit { + runs.truncate(limit); + } + ( StatusCode::OK, [("content-type", "application/json")], @@ -369,6 +433,31 @@ struct EventsQuery { after: Option, } +async fn get_run_manifest( + AxPath(run): AxPath, + State(state): State, +) -> impl IntoResponse { + let Some(run_dir) = safe_run_dir(&state.base, &run) else { + return ( + StatusCode::FORBIDDEN, + [("content-type", "application/json")], + r#"{"error":"forbidden"}"#.to_string(), + ); + }; + match read_run_json(&run_dir) { + Some(manifest) => ( + StatusCode::OK, + [("content-type", "application/json")], + serde_json::to_string(&manifest).unwrap_or_else(|_| "null".to_string()), + ), + None => ( + StatusCode::NOT_FOUND, + [("content-type", "application/json")], + r#"{"error":"run.json not found"}"#.to_string(), + ), + } +} + async fn get_run_state( AxPath(run): AxPath, State(state): State, @@ -549,8 +638,8 @@ async fn get_run_file( serve_file(&file_path).await } -/// Serve `combined-results.json` from an invocation directory. -async fn get_invocation_combined( +/// Serve `combined-results.json` from a group directory. +async fn get_group_combined( AxPath(name): AxPath, State(state): State, ) -> impl IntoResponse { @@ -561,8 +650,8 @@ async fn get_invocation_combined( r#"{"error":"forbidden"}"#.to_string(), ); } - let inv_dir = state.base.join(&name); - let file = inv_dir.join("combined-results.json"); + let group_dir = state.base.join(&name); + let file = group_dir.join("combined-results.json"); // Verify the resolved path stays under base. let ok = file .canonicalize() @@ -711,6 +800,8 @@ enum LogKind { TracingJsonl, /// Lab-level event log (`events.jsonl`). LabEvents, + /// Per-node metrics (`*.metrics.jsonl`). + Metrics, /// Generic JSON lines file (`*.jsonl`). Jsonl, /// Single JSON document (`*.json`). @@ -734,6 +825,9 @@ fn detect_log_kind(filename: &str, sample: &[u8]) -> Option { if filename == EVENTS_JSONL { return Some(LogKind::LabEvents); } + if filename.ends_with(&format!(".{METRICS_JSONL_EXT}")) { + return Some(LogKind::Metrics); + } if filename.ends_with(&format!(".{TRACING_JSONL_EXT}")) { return Some(LogKind::TracingJsonl); } @@ -822,44 +916,15 @@ async fn scan_log_files(run_dir: &Path) -> Vec { // ── Run manifest (run.json) ───────────────────────────────────────── -/// Manifest included with pushed runs, providing CI context. -#[derive(Debug, Clone, Serialize, Deserialize, Default)] -pub struct RunManifest { - /// Project name (from URL path). - #[serde(default)] - pub project: String, - /// Git branch name. - #[serde(default)] - pub branch: Option, - /// Git commit SHA. - #[serde(default)] - pub commit: Option, - /// PR number. - #[serde(default)] - pub pr: Option, - /// PR URL. - #[serde(default)] - pub pr_url: Option, - /// When this run was created. - #[serde(default)] - pub created_at: Option, - /// Human-readable run title/label. - #[serde(default)] - pub title: Option, - /// Overall CI test outcome (e.g. `"success"`, `"failure"`). - /// - /// This is the result of the CI test step, not the lab lifecycle status. - /// The lab lifecycle status lives in `state.json` as `RunInfo::status` - /// and tracks per-sim states like "running" or "finished". - #[serde(default, alias = "status")] - pub test_outcome: Option, -} +pub use patchbay_utils::manifest::RunManifest; const RUN_JSON: &str = "run.json"; fn read_run_json(dir: &Path) -> Option { let text = fs::read_to_string(dir.join(RUN_JSON)).ok()?; - serde_json::from_str(&text).ok() + let mut manifest: RunManifest = serde_json::from_str(&text).ok()?; + manifest.resolve_test_dirs(dir); + Some(manifest) } // ── Push endpoint ─────────────────────────────────────────────────── @@ -907,27 +972,75 @@ async fn push_run( ); } - // Extract tar.gz + // Extract tar.gz — iterate entries manually to reject unsafe paths. let decoder = flate2::read::GzDecoder::new(&body[..]); let mut archive = tar::Archive::new(decoder); - if let Err(e) = archive.unpack(&run_dir) { - // Clean up on failure - let _ = std::fs::remove_dir_all(&run_dir); - return ( - StatusCode::BAD_REQUEST, - format!("failed to extract archive: {e}"), - ); + archive.set_preserve_permissions(false); + archive.set_unpack_xattrs(false); + archive.set_overwrite(false); + + let entries = match archive.entries() { + Ok(e) => e, + Err(e) => { + let _ = std::fs::remove_dir_all(&run_dir); + return ( + StatusCode::BAD_REQUEST, + format!("failed to read archive entries: {e}"), + ); + } + }; + for entry in entries { + let mut entry = match entry { + Ok(e) => e, + Err(e) => { + let _ = std::fs::remove_dir_all(&run_dir); + return ( + StatusCode::BAD_REQUEST, + format!("failed to read archive entry: {e}"), + ); + } + }; + let path = match entry.path() { + Ok(p) => p.into_owned(), + Err(e) => { + let _ = std::fs::remove_dir_all(&run_dir); + return ( + StatusCode::BAD_REQUEST, + format!("failed to read entry path: {e}"), + ); + } + }; + if path.is_absolute() + || path + .components() + .any(|c| c == std::path::Component::ParentDir) + { + let _ = std::fs::remove_dir_all(&run_dir); + return ( + StatusCode::BAD_REQUEST, + "invalid path in archive".to_string(), + ); + } + if let Err(e) = entry.unpack_in(&run_dir) { + let _ = std::fs::remove_dir_all(&run_dir); + return ( + StatusCode::BAD_REQUEST, + format!("failed to extract archive: {e}"), + ); + } } // Notify subscribers about new run let _ = state.runs_tx.send(()); - // run_name is the invocation name (first path component for all sims inside) + // run_name is the group name (first path component for all sims inside) let result = serde_json::json!({ "ok": true, "project": project, "run": run_name, - "invocation": run_name, + "group": run_name, + "batch": run_name, // backward compat + "invocation": run_name, // backward compat (old CI templates read .invocation) }); (StatusCode::OK, serde_json::to_string(&result).unwrap()) @@ -984,7 +1097,7 @@ fn dir_size(path: &Path) -> u64 { let mut total = 0; if let Ok(entries) = fs::read_dir(path) { for entry in entries.flatten() { - let ft = entry.file_type().unwrap_or_else(|_| unreachable!()); + let Ok(ft) = entry.file_type() else { continue }; if ft.is_file() { total += entry.metadata().map(|m| m.len()).unwrap_or(0); } else if ft.is_dir() { diff --git a/patchbay-utils/Cargo.toml b/patchbay-utils/Cargo.toml index edcd428..e6a6a08 100644 --- a/patchbay-utils/Cargo.toml +++ b/patchbay-utils/Cargo.toml @@ -17,4 +17,5 @@ serde_json = "1" sha2 = "0.10" tar = "0.4" tracing = "0.1" +chrono = { version = "0.4", default-features = false, features = ["clock", "serde", "std"] } tracing-subscriber = { version = "0.3", features = ["env-filter"] } diff --git a/patchbay-utils/src/lib.rs b/patchbay-utils/src/lib.rs index f1d651d..d944c34 100644 --- a/patchbay-utils/src/lib.rs +++ b/patchbay-utils/src/lib.rs @@ -1,5 +1,6 @@ pub mod assets; pub mod binary_cache; +pub mod manifest; /// Initialises tracing for the current process (idempotent). /// diff --git a/patchbay-utils/src/manifest.rs b/patchbay-utils/src/manifest.rs new file mode 100644 index 0000000..0869011 --- /dev/null +++ b/patchbay-utils/src/manifest.rs @@ -0,0 +1,483 @@ +//! Unified run manifest types shared across the patchbay workspace. +//! +//! Every execution (test or sim) writes a `run.json` manifest with git context. +//! This module defines the canonical types for that manifest. + +use std::{ + fs, + path::{Path, PathBuf}, + process::Command, + time::Duration, +}; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; + +// ── Duration serde helpers ────────────────────────────────────────── + +/// Serialize/deserialize a [`Duration`] as integer milliseconds. +pub mod duration_ms { + use std::time::Duration; + + use serde::{Deserialize, Deserializer, Serializer}; + + pub fn serialize(d: &Duration, s: S) -> Result { + s.serialize_u64(d.as_millis() as u64) + } + + pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result { + Ok(Duration::from_millis(u64::deserialize(d)?)) + } +} + +/// Serialize/deserialize an `Option` as integer milliseconds. +pub mod option_duration_ms { + use std::time::Duration; + + use serde::{Deserialize, Deserializer, Serializer}; + + pub fn serialize(d: &Option, s: S) -> Result { + match d { + Some(d) => s.serialize_u64(d.as_millis() as u64), + None => s.serialize_none(), + } + } + + pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result, D::Error> { + Ok(Option::::deserialize(d)?.map(Duration::from_millis)) + } +} + +// ── Core types ────────────────────────────────────────────────────── + +/// What produced a run. +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum RunKind { + Test, + #[default] + Sim, +} + +/// Per-test pass/fail/ignored status. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum TestStatus { + Pass, + Fail, + Ignored, +} + +/// A single test result with name, status, and optional duration. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TestResult { + pub name: String, + pub status: TestStatus, + /// Test duration, serialized as integer milliseconds. + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "option_duration_ms" + )] + pub duration: Option, + /// Relative directory path for this test's output (e.g. `"patchbay/holepunch_simple"`). + /// Populated by the server when the directory exists on disk. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub dir: Option, +} + +/// Unified manifest written as `run.json` alongside every run. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RunManifest { + // ── Identity ── + #[serde(default)] + pub kind: RunKind, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub project: Option, + + // ── Git context ── + #[serde(default, skip_serializing_if = "Option::is_none")] + pub commit: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub branch: Option, + #[serde(default)] + pub dirty: bool, + + // ── CI context (populated from env vars when available) ── + #[serde(default, skip_serializing_if = "Option::is_none")] + pub pr: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub pr_url: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub title: Option, + + // ── Execution ── + #[serde(default, skip_serializing_if = "Option::is_none")] + pub started_at: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub ended_at: Option>, + /// Total runtime, serialized as integer milliseconds. + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "option_duration_ms" + )] + pub runtime: Option, + + // ── Outcome ── + /// "pass" or "fail". Aliases for backward compat with old run.json fields. + #[serde( + default, + skip_serializing_if = "Option::is_none", + alias = "test_outcome", + alias = "status" + )] + pub outcome: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub pass: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub fail: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub total: Option, + + // ── Per-test results (kind == Test only) ── + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub tests: Vec, + + // ── Environment ── + #[serde(default, skip_serializing_if = "Option::is_none")] + pub os: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub arch: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub patchbay_version: Option, +} + +impl RunManifest { + /// Populate `dir` fields by scanning the run directory for subdirs that + /// contain `events.jsonl`, then matching them to test results by the bare + /// function name (last path segment of the dir, last token of the nextest name). + pub fn resolve_test_dirs(&mut self, run_dir: &std::path::Path) { + // Collect all dirs with events.jsonl, recursively (up to 2 levels). + let mut test_dirs: Vec = Vec::new(); + collect_event_dirs(run_dir, run_dir, 0, 2, &mut test_dirs); + + // Build a map: bare function name → relative dir path. + // e.g. "holepunch_simple" → "patchbay/holepunch_simple" + let dir_by_fn: std::collections::HashMap<&str, &str> = test_dirs + .iter() + .filter_map(|d| { + let fn_name = d.rsplit('/').next()?; + Some((fn_name, d.as_str())) + }) + .collect(); + + // Match each test result to a directory by bare function name. + // Nextest name: "iroh::patchbay holepunch_simple" → last token "holepunch_simple" + for test in &mut self.tests { + let fn_name = test + .name + .rsplit_once(' ') + .map(|(_, name)| name) + .unwrap_or(&test.name); + if let Some(&dir) = dir_by_fn.get(fn_name) { + test.dir = Some(dir.to_string()); + } + } + } +} + +/// Recursively collect relative paths to directories containing `events.jsonl`. +fn collect_event_dirs( + root: &std::path::Path, + dir: &std::path::Path, + depth: usize, + max_depth: usize, + out: &mut Vec, +) { + if depth > max_depth { + return; + } + let entries = match std::fs::read_dir(dir) { + Ok(e) => e, + Err(_) => return, + }; + for entry in entries.flatten() { + let path = entry.path(); + if !path.is_dir() { + continue; + } + if path.join("events.jsonl").exists() { + if let Ok(rel) = path.strip_prefix(root) { + out.push(rel.to_string_lossy().into_owned()); + } + } else { + collect_event_dirs(root, &path, depth + 1, max_depth, out); + } + } +} + +// ── Git helpers ───────────────────────────────────────────────────── + +/// Snapshot of git repository state. +pub struct GitContext { + pub commit: Option, + pub branch: Option, + pub dirty: bool, +} + +/// Capture the current git HEAD commit, branch, and dirty state. +pub fn git_context() -> GitContext { + let commit = Command::new("git") + .args(["rev-parse", "HEAD"]) + .output() + .ok() + .filter(|o| o.status.success()) + .and_then(|o| String::from_utf8(o.stdout).ok()) + .map(|s| s.trim().to_string()); + let branch = Command::new("git") + .args(["rev-parse", "--abbrev-ref", "HEAD"]) + .output() + .ok() + .filter(|o| o.status.success()) + .and_then(|o| String::from_utf8(o.stdout).ok()) + .map(|s| s.trim().to_string()) + .filter(|s| s != "HEAD"); + // Check both unstaged and staged changes. + let unstaged = !Command::new("git") + .args(["diff", "--quiet"]) + .status() + .map(|s| s.success()) + .unwrap_or(true); + let staged = !Command::new("git") + .args(["diff", "--cached", "--quiet"]) + .status() + .map(|s| s.success()) + .unwrap_or(true); + let dirty = unstaged || staged; + GitContext { + commit, + branch, + dirty, + } +} + +/// Resolve a git ref (branch name, tag, or SHA prefix) to a full commit SHA. +pub fn resolve_ref(git_ref: &str) -> Option { + Command::new("git") + .args(["rev-parse", git_ref]) + .output() + .ok() + .filter(|o| o.status.success()) + .and_then(|o| String::from_utf8(o.stdout).ok()) + .map(|s| s.trim().to_string()) +} + +// ── Run lookup ────────────────────────────────────────────────────── + +/// Find a persisted run matching commit SHA and kind. +/// +/// Scans `work_dir/*/run.json` for a manifest whose `commit` and `kind` +/// match and whose `dirty` flag is `false`. +pub fn find_run_for_commit( + work_dir: &Path, + commit: &str, + kind: RunKind, +) -> Option<(PathBuf, RunManifest)> { + for entry in fs::read_dir(work_dir).ok()?.flatten() { + let run_json = entry.path().join("run.json"); + if let Ok(text) = fs::read_to_string(&run_json) { + if let Ok(m) = serde_json::from_str::(&text) { + if m.kind == kind && m.commit.as_deref() == Some(commit) && !m.dirty { + return Some((entry.path(), m)); + } + } + } + } + None +} + +// ── Test output parsing ───────────────────────────────────────────── + +/// Parse `cargo test` and `cargo nextest` stdout into per-test results. +/// +/// Recognises two formats: +/// - cargo test: `test some::path ... ok` +/// - nextest: ` PASS [ 1.234s] crate::module::test_name` +pub fn parse_test_output(output: &str) -> Vec { + let mut seen = std::collections::HashSet::new(); + let mut results = Vec::new(); + for line in output.lines() { + let line = line.trim(); + + // cargo test format: "test name ... ok|FAILED|ignored" + if let Some(rest) = line.strip_prefix("test ") { + if let Some((name, status_str)) = rest.rsplit_once(" ... ") { + let status = match status_str.trim() { + "ok" => TestStatus::Pass, + "FAILED" => TestStatus::Fail, + "ignored" => TestStatus::Ignored, + _ => continue, + }; + let name = name.trim().to_string(); + if seen.insert(name.clone()) { + results.push(TestResult { + name, + status, + duration: None, + dir: None, + }); + } + } + continue; + } + + // nextest format: "PASS [ 1.234s] crate::test_name" + // "FAIL [ 0.567s] crate::test_name" + // "IGNORE crate::test_name" + // "TIMEOUT [ 60.0s] crate::test_name" + if let Some((status, rest)) = parse_nextest_line(line) { + let duration = parse_nextest_duration(rest); + let name = rest + .find(']') + .map(|i| &rest[i + 1..]) + .unwrap_or(rest) + .trim() + .to_string(); + if !name.is_empty() && seen.insert(name.clone()) { + results.push(TestResult { + name, + status, + duration, + dir: None, + }); + } + } + } + results +} + +fn parse_nextest_line(line: &str) -> Option<(TestStatus, &str)> { + let prefixes = [ + ("PASS", TestStatus::Pass), + ("FAIL", TestStatus::Fail), + ("IGNORE", TestStatus::Ignored), + ("TIMEOUT", TestStatus::Fail), + ]; + for (prefix, status) in prefixes { + if let Some(rest) = line.strip_prefix(prefix) { + if rest.starts_with(' ') || rest.starts_with('[') { + return Some((status, rest.trim())); + } + } + } + None +} + +fn parse_nextest_duration(s: &str) -> Option { + // "[ 1.234s] name" → extract "1.234" + let s = s.strip_prefix('[')?; + let end = s.find(']')?; + let inner = s[..end].trim().strip_suffix('s')?; + let secs: f64 = inner.parse().ok()?; + Some(Duration::from_secs_f64(secs)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_test_output() { + let output = "\ +running 3 tests +test foo::bar ... ok +test baz ... FAILED +test qux ... ignored + +test result: FAILED. 1 passed; 1 failed; 1 ignored; +"; + let results = parse_test_output(output); + assert_eq!(results.len(), 3); + assert_eq!(results[0].name, "foo::bar"); + assert_eq!(results[0].status, TestStatus::Pass); + assert_eq!(results[1].name, "baz"); + assert_eq!(results[1].status, TestStatus::Fail); + assert_eq!(results[2].name, "qux"); + assert_eq!(results[2].status, TestStatus::Ignored); + } + + #[test] + fn test_parse_nextest_output() { + let output = "\ + Compiling my-crate v0.1.0 + PASS [ 1.234s] my-crate::tests::foo + FAIL [ 0.567s] my-crate::tests::bar + TIMEOUT [ 60.001s] my-crate::tests::baz + IGNORE my-crate::tests::qux +"; + let results = parse_test_output(output); + assert_eq!(results.len(), 4); + assert_eq!(results[0].name, "my-crate::tests::foo"); + assert_eq!(results[0].status, TestStatus::Pass); + assert_eq!(results[0].duration, Some(Duration::from_millis(1234))); + assert_eq!(results[1].name, "my-crate::tests::bar"); + assert_eq!(results[1].status, TestStatus::Fail); + assert_eq!(results[2].name, "my-crate::tests::baz"); + assert_eq!(results[2].status, TestStatus::Fail); // timeout = fail + assert_eq!(results[3].name, "my-crate::tests::qux"); + assert_eq!(results[3].status, TestStatus::Ignored); + assert_eq!(results[3].duration, None); + } + + #[test] + fn test_duration_ms_roundtrip() { + #[derive(Serialize, Deserialize, PartialEq, Debug)] + struct T { + #[serde(with = "duration_ms")] + d: Duration, + } + let t = T { + d: Duration::from_millis(1234), + }; + let json = serde_json::to_string(&t).unwrap(); + assert_eq!(json, r#"{"d":1234}"#); + let t2: T = serde_json::from_str(&json).unwrap(); + assert_eq!(t, t2); + } + + #[test] + fn test_option_duration_ms_roundtrip() { + #[derive(Serialize, Deserialize, PartialEq, Debug)] + struct T { + #[serde(with = "option_duration_ms")] + d: Option, + } + let t = T { + d: Some(Duration::from_millis(42)), + }; + let json = serde_json::to_string(&t).unwrap(); + assert_eq!(json, r#"{"d":42}"#); + let t2: T = serde_json::from_str(&json).unwrap(); + assert_eq!(t, t2); + + let none = T { d: None }; + let json = serde_json::to_string(&none).unwrap(); + assert_eq!(json, r#"{"d":null}"#); + let t3: T = serde_json::from_str(&json).unwrap(); + assert_eq!(none, t3); + } + + #[test] + fn test_run_manifest_backward_compat() { + // Old-style run.json with test_outcome instead of outcome + let json = r#"{ + "kind": "sim", + "test_outcome": "success", + "project": "iroh" + }"#; + let m: RunManifest = serde_json::from_str(json).unwrap(); + assert_eq!(m.outcome.as_deref(), Some("success")); + assert_eq!(m.kind, RunKind::Sim); + } +} diff --git a/patchbay-vm/Cargo.toml b/patchbay-vm/Cargo.toml index 620b6f2..86ab6e4 100644 --- a/patchbay-vm/Cargo.toml +++ b/patchbay-vm/Cargo.toml @@ -8,16 +8,15 @@ license.workspace = true authors.workspace = true repository.workspace = true +[lib] + [dependencies] anyhow = "1" clap = { version = "4", features = ["derive"] } dirs = "6" -patchbay-utils = { path = "../patchbay-utils" } -patchbay-server = { path = "../patchbay-server" } -tokio = { version = "1", features = ["rt", "macros", "time"] } -tracing = "0.1" -open = "5" +patchbay-utils = { workspace = true } nix = { version = "0.30", features = ["signal", "process"] } serde = { version = "1", features = ["derive"] } serde_json = "1" toml = "1.0" +tracing = "0.1" diff --git a/patchbay-vm/src/container.rs b/patchbay-vm/src/container.rs index de7a8cf..d49adb4 100644 --- a/patchbay-vm/src/container.rs +++ b/patchbay-vm/src/container.rs @@ -27,7 +27,7 @@ use crate::{ // Constants // --------------------------------------------------------------------------- -const CONTAINER_STATE_DIR: &str = ".container-vm"; +const CONTAINER_STATE_DIR: &str = ".patchbay/vm"; const DEFAULT_CONTAINER_NAME: &str = "patchbay"; const DEFAULT_IMAGE: &str = "debian:trixie-slim"; @@ -84,7 +84,7 @@ impl ContainerConfig { Ok(dir) => dir, Err(_) => cwd.join("target"), }; - let default_work = cwd.join(".patchbay-work"); + let default_work = cwd.join(".patchbay/work"); Ok(Self { name: env_or("CONTAINER_VM_NAME", DEFAULT_CONTAINER_NAME), diff --git a/patchbay-vm/src/lib.rs b/patchbay-vm/src/lib.rs new file mode 100644 index 0000000..f0f516e --- /dev/null +++ b/patchbay-vm/src/lib.rs @@ -0,0 +1,176 @@ +pub mod common; +pub mod container; +pub mod qemu; +pub mod util; + +use clap::ValueEnum; +pub use common::{RunVmArgs, TestVmArgs}; + +#[derive(Clone, Debug, ValueEnum)] +pub enum Backend { + /// Auto-detect: prefer `container` on macOS Apple Silicon, fall back to QEMU. + Auto, + /// QEMU with a full Debian cloud image and SSH access. + Qemu, + /// Apple `container` CLI (macOS 26 + Apple Silicon only). + Container, +} + +pub fn default_test_target() -> String { + if std::env::consts::ARCH == "aarch64" { + "aarch64-unknown-linux-musl".to_string() + } else { + "x86_64-unknown-linux-musl".to_string() + } +} + +/// VM backend operations. +pub trait VmOps { + fn up(&self, recreate: bool) -> anyhow::Result<()>; + fn down(&self) -> anyhow::Result<()>; + fn status(&self) -> anyhow::Result<()>; + fn cleanup(&self) -> anyhow::Result<()>; + fn exec(&self, cmd: Vec) -> anyhow::Result<()>; + fn run_sims(&self, args: RunVmArgs) -> anyhow::Result<()>; + fn run_tests(&self, args: TestVmArgs) -> anyhow::Result<()>; +} + +/// QEMU backend. +pub struct Qemu; + +impl VmOps for Qemu { + fn up(&self, recreate: bool) -> anyhow::Result<()> { + qemu::up_cmd(recreate) + } + fn down(&self) -> anyhow::Result<()> { + qemu::down_cmd() + } + fn status(&self) -> anyhow::Result<()> { + qemu::status_cmd() + } + fn cleanup(&self) -> anyhow::Result<()> { + qemu::cleanup_cmd() + } + fn exec(&self, cmd: Vec) -> anyhow::Result<()> { + qemu::ssh_cmd_cli(cmd) + } + fn run_sims(&self, args: RunVmArgs) -> anyhow::Result<()> { + qemu::run_sims_in_vm(args) + } + fn run_tests(&self, args: TestVmArgs) -> anyhow::Result<()> { + qemu::run_tests_in_vm(args) + } +} + +/// Apple container backend. +pub struct Container; + +impl VmOps for Container { + fn up(&self, recreate: bool) -> anyhow::Result<()> { + container::up_cmd(recreate) + } + fn down(&self) -> anyhow::Result<()> { + container::down_cmd() + } + fn status(&self) -> anyhow::Result<()> { + container::status_cmd() + } + fn cleanup(&self) -> anyhow::Result<()> { + container::cleanup_cmd() + } + fn exec(&self, cmd: Vec) -> anyhow::Result<()> { + container::exec_cmd_cli(cmd) + } + fn run_sims(&self, args: RunVmArgs) -> anyhow::Result<()> { + container::run_sims(args) + } + fn run_tests(&self, args: TestVmArgs) -> anyhow::Result<()> { + container::run_tests(args) + } +} + +impl Backend { + /// Resolve `Auto` into a concrete backend. + pub fn resolve(self) -> Self { + match self { + Self::Auto => { + if std::env::consts::OS == "macos" + && std::env::consts::ARCH == "aarch64" + && common::command_exists("container").unwrap_or(false) + { + Self::Container + } else { + Self::Qemu + } + } + other => other, + } + } +} + +/// Implement VmOps on Backend by delegating to the resolved backend. +impl VmOps for Backend { + fn up(&self, recreate: bool) -> anyhow::Result<()> { + match self { + Self::Container => Container.up(recreate), + Self::Auto => { + anyhow::bail!("Backend::Auto must be resolved before use; call .resolve() first") + } + Self::Qemu => Qemu.up(recreate), + } + } + fn down(&self) -> anyhow::Result<()> { + match self { + Self::Container => Container.down(), + Self::Auto => { + anyhow::bail!("Backend::Auto must be resolved before use; call .resolve() first") + } + Self::Qemu => Qemu.down(), + } + } + fn status(&self) -> anyhow::Result<()> { + match self { + Self::Container => Container.status(), + Self::Auto => { + anyhow::bail!("Backend::Auto must be resolved before use; call .resolve() first") + } + Self::Qemu => Qemu.status(), + } + } + fn cleanup(&self) -> anyhow::Result<()> { + match self { + Self::Container => Container.cleanup(), + Self::Auto => { + anyhow::bail!("Backend::Auto must be resolved before use; call .resolve() first") + } + Self::Qemu => Qemu.cleanup(), + } + } + fn exec(&self, cmd: Vec) -> anyhow::Result<()> { + match self { + Self::Container => Container.exec(cmd), + Self::Auto => { + anyhow::bail!("Backend::Auto must be resolved before use; call .resolve() first") + } + Self::Qemu => Qemu.exec(cmd), + } + } + fn run_sims(&self, args: RunVmArgs) -> anyhow::Result<()> { + match self { + Self::Container => Container.run_sims(args), + Self::Auto => { + anyhow::bail!("Backend::Auto must be resolved before use; call .resolve() first") + } + Self::Qemu => Qemu.run_sims(args), + } + } + fn run_tests(&self, args: TestVmArgs) -> anyhow::Result<()> { + match self { + Self::Container => Container.run_tests(args), + Self::Auto => { + anyhow::bail!("Backend::Auto must be resolved before use; call .resolve() first") + } + Self::Qemu => Qemu.run_tests(args), + } + } +} diff --git a/patchbay-vm/src/main.rs b/patchbay-vm/src/main.rs deleted file mode 100644 index e756852..0000000 --- a/patchbay-vm/src/main.rs +++ /dev/null @@ -1,274 +0,0 @@ -mod common; -mod container; -mod qemu; -mod util; - -fn default_test_target() -> String { - if std::env::consts::ARCH == "aarch64" { - "aarch64-unknown-linux-musl".to_string() - } else { - "x86_64-unknown-linux-musl".to_string() - } -} - -use std::path::PathBuf; - -use anyhow::Result; -use clap::{Parser, Subcommand, ValueEnum}; -use common::{RunVmArgs, TestVmArgs}; -use patchbay_server::DEFAULT_UI_BIND; - -/// VM backend selection. -#[derive(Clone, Debug, ValueEnum)] -enum Backend { - /// Auto-detect: prefer `container` on macOS Apple Silicon, fall back to QEMU. - Auto, - /// QEMU with a full Debian cloud image and SSH access. - Qemu, - /// Apple `container` CLI (macOS 26 + Apple Silicon only). - Container, -} - -#[derive(Parser)] -#[command(name = "patchbay-vm", about = "Standalone VM runner for patchbay")] -struct Cli { - /// Which VM backend to use. - #[arg(long, default_value = "auto", global = true)] - backend: Backend, - #[command(subcommand)] - command: Command, -} - -#[derive(Subcommand)] -enum Command { - /// Boot or reuse VM and ensure mounts. - Up { - #[arg(long)] - recreate: bool, - }, - /// Stop VM and helper processes. - Down, - /// Show VM running status. - Status, - /// Best-effort cleanup of VM helper artifacts/processes. - Cleanup, - /// Execute command in the guest (SSH for QEMU, exec for container). - Ssh { - #[arg(trailing_var_arg = true, allow_hyphen_values = true)] - cmd: Vec, - }, - /// Run one or more sims in VM using guest patchbay binary. - Run { - #[arg(required = true)] - sims: Vec, - #[arg(long, default_value = ".patchbay-work")] - work_dir: PathBuf, - #[arg(long = "binary")] - binary_overrides: Vec, - #[arg(short = 'v', long, default_value_t = false)] - verbose: bool, - #[arg(long)] - recreate: bool, - #[arg(long, default_value = "latest")] - patchbay_version: String, - #[arg(long, default_value_t = false)] - open: bool, - #[arg(long, default_value = DEFAULT_UI_BIND)] - bind: String, - }, - /// Serve embedded UI + work directory over HTTP. - Serve { - #[arg(long, default_value = ".patchbay-work")] - work_dir: PathBuf, - /// Serve `/binaries/tests/testdir-current` instead of work_dir. - /// - /// In the VM, test binaries live under `/binaries/tests/` and - /// the testdir crate writes output next to the executable. - #[arg(long, default_value_t = false)] - testdir: bool, - #[arg(long, default_value = DEFAULT_UI_BIND)] - bind: String, - #[arg(long, default_value_t = false)] - open: bool, - }, - /// Build and run tests in VM (replaces legacy test-vm flow). - /// - /// Positional FILTER is passed to each test binary as a name filter - /// (like `cargo test `). Extra args after `--` go to cargo - /// during the build and to each test binary at runtime. - Test { - /// Test name filter (passed to test binaries at runtime). - #[arg()] - filter: Option, - #[arg(long, default_value_t = default_test_target())] - target: String, - #[arg(short = 'p', long = "package")] - packages: Vec, - #[arg(long = "test")] - tests: Vec, - #[arg(short = 'j', long)] - jobs: Option, - #[arg(short = 'F', long)] - features: Vec, - #[arg(long)] - release: bool, - #[arg(long)] - lib: bool, - #[arg(long)] - no_fail_fast: bool, - #[arg(long)] - recreate: bool, - #[arg(last = true)] - cargo_args: Vec, - }, -} - -/// Resolve `Backend::Auto` into a concrete backend. -fn resolve_backend(b: Backend) -> Backend { - match b { - Backend::Auto => { - if std::env::consts::OS == "macos" - && std::env::consts::ARCH == "aarch64" - && common::command_exists("container").unwrap_or(false) - { - Backend::Container - } else { - Backend::Qemu - } - } - other => other, - } -} - -#[tokio::main(flavor = "current_thread")] -async fn main() -> Result<()> { - patchbay_utils::init_tracing(); - let cli = Cli::parse(); - let backend = resolve_backend(cli.backend); - - match cli.command { - Command::Up { recreate } => match backend { - Backend::Container => container::up_cmd(recreate), - _ => qemu::up_cmd(recreate), - }, - Command::Down => match backend { - Backend::Container => container::down_cmd(), - _ => qemu::down_cmd(), - }, - Command::Status => match backend { - Backend::Container => container::status_cmd(), - _ => qemu::status_cmd(), - }, - Command::Cleanup => match backend { - Backend::Container => container::cleanup_cmd(), - _ => qemu::cleanup_cmd(), - }, - Command::Ssh { cmd } => match backend { - Backend::Container => container::exec_cmd_cli(cmd), - _ => qemu::ssh_cmd_cli(cmd), - }, - Command::Run { - sims, - work_dir, - binary_overrides, - verbose, - recreate, - patchbay_version, - open, - bind, - } => { - if open { - let url = format!("http://{bind}"); - println!("patchbay UI: {url}"); - let _ = open::that(&url); - let work = work_dir.clone(); - tokio::spawn(async move { - if let Err(e) = patchbay_server::serve(work, &bind).await { - tracing::error!("server error: {e}"); - } - }); - } - let args = RunVmArgs { - sim_inputs: sims, - work_dir, - binary_overrides, - verbose, - recreate, - patchbay_version, - }; - let res = match backend { - Backend::Container => container::run_sims(args), - _ => qemu::run_sims_in_vm(args), - }; - if open && res.is_ok() { - println!("run finished; server still running (Ctrl-C to exit)"); - loop { - tokio::time::sleep(std::time::Duration::from_secs(60)).await; - } - } - res - } - Command::Serve { - work_dir, - testdir, - bind, - open, - } => { - let dir = if testdir { - work_dir - .join("binaries") - .join("tests") - .join("testdir-current") - } else { - work_dir - }; - println!("patchbay: serving {} at http://{bind}/", dir.display()); - if open { - let url = format!("http://{bind}"); - let _ = open::that(&url); - } - patchbay_server::serve(dir, &bind).await - } - Command::Test { - filter, - target, - packages, - tests, - jobs, - features, - release, - lib, - no_fail_fast, - recreate, - mut cargo_args, - } => { - if let Some(j) = jobs { - cargo_args.extend(["--jobs".into(), j.to_string()]); - } - for f in features { - cargo_args.extend(["--features".into(), f]); - } - if release { - cargo_args.push("--release".into()); - } - if lib { - cargo_args.push("--lib".into()); - } - if no_fail_fast { - cargo_args.push("--no-fail-fast".into()); - } - let args = TestVmArgs { - filter, - target, - packages, - tests, - recreate, - cargo_args, - }; - match backend { - Backend::Container => container::run_tests(args), - _ => qemu::run_tests_in_vm(args), - } - } - } -} diff --git a/patchbay-vm/src/qemu.rs b/patchbay-vm/src/qemu.rs index 2092abb..ca3dbee 100644 --- a/patchbay-vm/src/qemu.rs +++ b/patchbay-vm/src/qemu.rs @@ -24,7 +24,7 @@ use crate::{ // QEMU-specific constants // --------------------------------------------------------------------------- -const VM_STATE_DIR: &str = ".qemu-vm"; +const VM_STATE_DIR: &str = ".patchbay/vm"; const DEFAULT_VM_NAME: &str = "patchbay-vm"; const DEFAULT_IMAGE_URL_X86: &str = "https://cloud.debian.org/images/cloud/trixie/latest/debian-13-genericcloud-amd64.qcow2"; @@ -160,7 +160,7 @@ impl VmConfig { Ok(dir) => dir, Err(_) => cwd.join("target"), }; - let default_work = cwd.join(".patchbay-work"); + let default_work = cwd.join(".patchbay/work"); Ok(Self { vm_name: env_or("QEMU_VM_NAME", DEFAULT_VM_NAME), diff --git a/patchbay/Cargo.toml b/patchbay/Cargo.toml index cdab33c..f0708b6 100644 --- a/patchbay/Cargo.toml +++ b/patchbay/Cargo.toml @@ -8,7 +8,11 @@ license.workspace = true authors.workspace = true repository.workspace = true +[features] +iroh-metrics = ["dep:iroh-metrics"] + [dependencies] +iroh-metrics = { version = "0.38", optional = true } anyhow = "1" chrono = { version = "0.4", default-features = false, features = ["clock", "serde"] } derive_more = { version = "2.1.1", features = ["debug", "display"] } diff --git a/patchbay/src/consts.rs b/patchbay/src/consts.rs index 0a14ea1..6fc7a08 100644 --- a/patchbay/src/consts.rs +++ b/patchbay/src/consts.rs @@ -27,6 +27,9 @@ pub const STDOUT_LOG_EXT: &str = "stdout.log"; /// Per-node stderr log suffix. pub const STDERR_LOG_EXT: &str = "stderr.log"; +/// Per-node metrics suffix. +pub const METRICS_JSONL_EXT: &str = "metrics.jsonl"; + /// Node kind prefix for devices. pub const KIND_DEVICE: &str = "device"; diff --git a/patchbay/src/handles.rs b/patchbay/src/handles.rs index 005dbf4..96b5888 100644 --- a/patchbay/src/handles.rs +++ b/patchbay/src/handles.rs @@ -38,6 +38,21 @@ use crate::{ netlink::Netlink, }; +/// Record a metric via the given tracing dispatch. +pub(crate) fn record_metric(dispatch: &tracing::Dispatch, key: &str, value: f64) { + let mut map = serde_json::Map::new(); + if let Some(n) = serde_json::Number::from_f64(value) { + map.insert(key.to_string(), serde_json::Value::Number(n)); + } + let json = serde_json::to_string(&map).unwrap_or_default(); + let _guard = tracing::dispatcher::set_default(dispatch); + tracing::event!( + target: "patchbay::_metrics", + tracing::Level::INFO, + metrics_json = %json, + ); +} + async fn reconcile_radriven_default_v6_routes( lab: &Arc, router: NodeId, @@ -132,6 +147,7 @@ pub struct Device { name: Arc, ns: Arc, lab: Arc, + dispatch: tracing::Dispatch, } /// Owned snapshot of a single router network interface. @@ -172,6 +188,7 @@ impl Clone for Device { name: Arc::clone(&self.name), ns: Arc::clone(&self.ns), lab: Arc::clone(&self.lab), + dispatch: self.dispatch.clone(), } } } @@ -187,7 +204,51 @@ impl std::fmt::Debug for Device { impl Device { pub(crate) fn new(id: NodeId, name: Arc, ns: Arc, lab: Arc) -> Self { - Self { id, name, ns, lab } + let dispatch = lab + .netns + .dispatch_for(&ns) + .unwrap_or_else(|| tracing::dispatcher::get_default(|d| d.clone())); + Self { + id, + name, + ns, + lab, + dispatch, + } + } + + /// Enter this device's tracing context. + pub fn enter_tracing(&self) -> tracing::subscriber::DefaultGuard { + tracing::dispatcher::set_default(&self.dispatch) + } + + /// Record a single metric. + pub fn record(&self, key: &str, value: f64) { + record_metric(&self.dispatch, key, value); + } + + /// Returns a builder for recording multiple metrics at once. + pub fn metrics(&self) -> crate::metrics::MetricsBuilder { + crate::metrics::MetricsBuilder::new(self.dispatch.clone()) + } + + /// Record all counter/gauge values from an iroh-metrics group. + /// + /// Iterates the group's metrics and emits each counter or gauge as a + /// patchbay metric line. Histograms are skipped. + #[cfg(feature = "iroh-metrics")] + pub fn record_iroh_metrics(&self, group: &dyn iroh_metrics::MetricsGroup) { + let _guard = self.enter_tracing(); + let mut builder = self.metrics(); + for item in group.iter() { + let value: f64 = match item.value() { + iroh_metrics::MetricValue::Counter(v) => v as f64, + iroh_metrics::MetricValue::Gauge(v) => v as f64, + _ => continue, + }; + builder = builder.record(item.name(), value); + } + builder.emit(); } /// Returns the node identifier. @@ -928,6 +989,7 @@ pub struct Router { name: Arc, ns: Arc, lab: Arc, + dispatch: tracing::Dispatch, } impl Clone for Router { @@ -937,6 +999,7 @@ impl Clone for Router { name: Arc::clone(&self.name), ns: Arc::clone(&self.ns), lab: Arc::clone(&self.lab), + dispatch: self.dispatch.clone(), } } } @@ -952,7 +1015,51 @@ impl std::fmt::Debug for Router { impl Router { pub(crate) fn new(id: NodeId, name: Arc, ns: Arc, lab: Arc) -> Self { - Self { id, name, ns, lab } + let dispatch = lab + .netns + .dispatch_for(&ns) + .unwrap_or_else(|| tracing::dispatcher::get_default(|d| d.clone())); + Self { + id, + name, + ns, + lab, + dispatch, + } + } + + /// Enter this router's tracing context. + pub fn enter_tracing(&self) -> tracing::subscriber::DefaultGuard { + tracing::dispatcher::set_default(&self.dispatch) + } + + /// Record a single metric. + pub fn record(&self, key: &str, value: f64) { + record_metric(&self.dispatch, key, value); + } + + /// Returns a builder for recording multiple metrics at once. + pub fn metrics(&self) -> crate::metrics::MetricsBuilder { + crate::metrics::MetricsBuilder::new(self.dispatch.clone()) + } + + /// Record all counter/gauge values from an iroh-metrics group. + /// + /// Iterates the group's metrics and emits each counter or gauge as a + /// patchbay metric line. Histograms are skipped. + #[cfg(feature = "iroh-metrics")] + pub fn record_iroh_metrics(&self, group: &dyn iroh_metrics::MetricsGroup) { + let _guard = self.enter_tracing(); + let mut builder = self.metrics(); + for item in group.iter() { + let value: f64 = match item.value() { + iroh_metrics::MetricValue::Counter(v) => v as f64, + iroh_metrics::MetricValue::Gauge(v) => v as f64, + _ => continue, + }; + builder = builder.record(item.name(), value); + } + builder.emit(); } /// Returns the node identifier. diff --git a/patchbay/src/lib.rs b/patchbay/src/lib.rs index af46edf..b9b6f3b 100644 --- a/patchbay/src/lib.rs +++ b/patchbay/src/lib.rs @@ -203,6 +203,7 @@ pub mod event; pub(crate) mod firewall; pub(crate) mod handles; mod lab; +mod metrics; pub(crate) mod nat; pub(crate) mod nat64; mod netlink; @@ -228,6 +229,7 @@ pub use lab::{ NatConfigBuilder, NatFiltering, NatMapping, NatV6Mode, OutDir, Region, RegionLink, Router, RouterBuilder, RouterIface, RouterPreset, TestGuard, }; +pub use metrics::MetricsBuilder; pub use crate::{ core::{NodeId, ReflectorGuard}, diff --git a/patchbay/src/metrics.rs b/patchbay/src/metrics.rs new file mode 100644 index 0000000..5b24677 --- /dev/null +++ b/patchbay/src/metrics.rs @@ -0,0 +1,41 @@ +//! Builder for emitting multiple metrics in a single JSONL line. + +use serde_json::Map; + +/// Builder for batch metric emission. Obtained from [`Device::metrics()`](crate::Device::metrics). +pub struct MetricsBuilder { + pub(crate) dispatch: tracing::Dispatch, + pub(crate) values: Map, +} + +impl MetricsBuilder { + pub(crate) fn new(dispatch: tracing::Dispatch) -> Self { + Self { + dispatch, + values: Map::new(), + } + } + + /// Add a metric key-value pair. + pub fn record(mut self, key: &str, value: f64) -> Self { + if let Some(n) = serde_json::Number::from_f64(value) { + self.values + .insert(key.to_string(), serde_json::Value::Number(n)); + } + self + } + + /// Emit all recorded metrics as a single line in metrics.jsonl. + pub fn emit(self) { + if self.values.is_empty() { + return; + } + let _guard = tracing::dispatcher::set_default(&self.dispatch); + let json = serde_json::to_string(&self.values).unwrap_or_default(); + tracing::event!( + target: "patchbay::_metrics", + tracing::Level::INFO, + metrics_json = %json, + ); + } +} diff --git a/patchbay/src/netns.rs b/patchbay/src/netns.rs index c994a5e..05419c7 100644 --- a/patchbay/src/netns.rs +++ b/patchbay/src/netns.rs @@ -550,6 +550,12 @@ impl NetnsManager { .context("spawn user thread") } + /// Get the tracing dispatch for a namespace. + pub(crate) fn dispatch_for(&self, ns: &str) -> Option { + let workers = self.workers.lock().expect("netns worker map poisoned"); + workers.get(ns).map(|w| w.tracing_dispatch.clone()) + } + /// Clone the namespace fd (for moving veth endpoints etc). pub(crate) fn ns_fd(&self, ns: &str) -> Result { let workers = self.workers.lock().expect("netns worker map poisoned"); diff --git a/patchbay/src/tracing.rs b/patchbay/src/tracing.rs index 9fe7f80..46fb68e 100644 --- a/patchbay/src/tracing.rs +++ b/patchbay/src/tracing.rs @@ -253,6 +253,8 @@ struct NsWriterSubscriber { ansi_writer: Mutex, /// Extracted `_events::` NDJSON writer. events_writer: Mutex, + /// Per-node metrics JSONL writer. + metrics_writer: Mutex, /// Target+level filter for the tracing file (from PATCHBAY_LOG / RUST_LOG). /// Supports full directive syntax, e.g. `iroh=trace,patchbay=debug`. file_filter: tracing_subscriber::filter::Targets, @@ -381,23 +383,55 @@ impl NsWriterSubscriber { fn write_event_to_files(&self, event: &tracing::Event<'_>) { let meta = event.metadata(); let target = meta.target(); + + // Record fields once for all paths. + let mut visitor = JsonFieldVisitor::new(); + event.record(&mut visitor); + + // Write to .metrics.jsonl — only patchbay::_metrics target. + if target == "patchbay::_metrics" { + let metrics_map = if let Some(serde_json::Value::String(json_str)) = + visitor.fields.get("metrics_json") + { + serde_json::from_str::>(json_str) + .unwrap_or_default() + } else { + let mut m = visitor.fields; + m.remove("message"); + m + }; + if !metrics_map.is_empty() { + let now = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default(); + let mut obj = serde_json::Map::new(); + obj.insert("t".into(), serde_json::json!(now.as_secs_f64())); + obj.insert("m".into(), serde_json::Value::Object(metrics_map)); + if let Ok(mut w) = self.metrics_writer.lock() { + let _ = serde_json::to_writer(&mut *w, &serde_json::Value::Object(obj)); + let _ = w.write_all(b"\n"); + let _ = w.flush(); + } + } + return; + } + let timestamp = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Micros, true); // Write to .events.jsonl — only _events:: targets. if let Some(kind) = target.split_once("_events::").map(|(_, k)| k) { - let mut visitor = JsonFieldVisitor::new(); - event.record(&mut visitor); - visitor.fields.remove("message"); - visitor.fields.insert( + let mut events_fields = visitor.fields.clone(); + events_fields.remove("message"); + events_fields.insert( "kind".to_string(), serde_json::Value::String(kind.to_string()), ); - visitor.fields.insert( + events_fields.insert( "timestamp".to_string(), serde_json::Value::String(timestamp.clone()), ); if let Ok(mut w) = self.events_writer.lock() { - let _ = serde_json::to_writer(&mut *w, &visitor.fields); + let _ = serde_json::to_writer(&mut *w, &events_fields); let _ = w.write_all(b"\n"); let _ = w.flush(); } @@ -406,8 +440,6 @@ impl NsWriterSubscriber { // Write to .tracing.jsonl — matching tracing-subscriber's JSON format: // {"timestamp":"...","level":"INFO","fields":{"message":"...","key":"val"},"target":"mod::path"} if self.file_filter.would_enable(target, meta.level()) || target.contains("_events::") { - let mut visitor = JsonFieldVisitor::new(); - event.record(&mut visitor); let mut obj = serde_json::Map::new(); obj.insert( "timestamp".to_string(), @@ -462,6 +494,7 @@ impl tracing::Subscriber for NsWriterSubscriber { .file_filter .would_enable(metadata.target(), metadata.level()) || metadata.target().contains("_events::") + || metadata.target() == "patchbay::_metrics" } fn new_span(&self, span: &tracing::span::Attributes<'_>) -> tracing::span::Id { @@ -549,6 +582,7 @@ pub(crate) fn install_namespace_subscriber( let tracing_path = run_dir.join(format!("{log_prefix}.{}", consts::TRACING_JSONL_EXT)); let ansi_path = run_dir.join(format!("{log_prefix}.{}", consts::TRACING_LOG_EXT)); let events_path = run_dir.join(format!("{log_prefix}.{}", consts::EVENTS_JSONL_EXT)); + let metrics_path = run_dir.join(format!("{log_prefix}.{}", consts::METRICS_JSONL_EXT)); let file_filter_str = std::env::var("PATCHBAY_LOG") .or_else(|_| std::env::var("RUST_LOG")) @@ -566,6 +600,7 @@ pub(crate) fn install_namespace_subscriber( tracing_writer: Mutex::new(LazyFile::new(tracing_path)), ansi_writer: Mutex::new(LazyFile::new(ansi_path)), events_writer: Mutex::new(LazyFile::new(events_path)), + metrics_writer: Mutex::new(LazyFile::new(metrics_path)), file_filter, span_state: Mutex::new(SpanState::default()), }; diff --git a/plans/compare-refactor.md b/plans/compare-refactor.md new file mode 100644 index 0000000..84a16db --- /dev/null +++ b/plans/compare-refactor.md @@ -0,0 +1,503 @@ +# Compare & Run Data Model Refactor + +## Problem Statement + +The current compare implementation creates separate `compare-{timestamp}/` directories with +a `summary.json` that duplicates test results. Runs and tests lack a unified manifest, making +it impossible to find "the run for commit X" or compare arbitrary runs in the UI. Three +different `RunManifest` structs exist across crates. The naming is inconsistent (`batch`, +`invocation`, `sim-` prefix for what is actually a "run"). + +This refactor unifies the data model so that: +- Every execution (test or sim) writes a `run.json` manifest with git context +- Compare is a view over two existing runs, not a separate artifact +- The UI can compare any two runs from the same project +- `patchbay compare` is smart about caching (skip if run for that ref already exists) + +## Naming + +Everything is a **run**. A run has a `kind` field (enum: `Test` or `Sim`). + +| Term | Meaning | +|------|---------| +| **run** | Any single execution. The atomic unit everywhere. | +| **kind** | `RunKind::Test` or `RunKind::Sim` — what produced the run | +| **group** | When `patchbay run sims/` processes N sim TOMLs, the top-level `run-{timestamp}/` dir is the group. Each sim inside is a nested run. For tests, each test binary's output under `testdir-current/` is a nested run (if it has `events.jsonl`). The group shares the `run.json` manifest. | +| **project** | A named scope for filtering & comparing (e.g. `"iroh"`) | + +"batch" is retired (kept as serde alias for backward compat). + +### Directory naming + +| Context | Current | New | +|---------|---------|-----| +| Sim run root | `sim-YYMMDD-HHMMSS/` | `run-YYMMDD-HHMMSS/` | +| Pushed run | `{project}-{date}-{uuid}/` | unchanged | +| Compare dir | `compare-{timestamp}/` | **removed** (compare is computed on the fly) | +| Worktree | `.patchbay/tree/{ref}/` | unchanged | +| VM state | `.patchbay/vm/` | unchanged | +| Image cache | `~/.local/share/patchbay/qemu-images/` | unchanged (stays in XDG) | + +### Testdir nesting + +`testdir!()` creates nested subdirectories for module paths: +`testdir-current/crate_name/module/test_name/`. The server scans up to 3 levels deep +for `events.jsonl`, so nested Lab output is discovered automatically. This is fine as-is. + +## Unified `run.json` Manifest + +One struct, defined in `patchbay-utils/src/manifest.rs` (shared between runner, CLI, server): + +```rust +use chrono::{DateTime, Utc}; +use std::time::Duration; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum RunKind { + Test, + Sim, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum TestStatus { + Pass, + Fail, + Ignored, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TestResult { + pub name: String, + pub status: TestStatus, + /// Test duration, serialized as integer milliseconds. + #[serde(default, skip_serializing_if = "Option::is_none", with = "option_duration_ms")] + pub duration: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RunManifest { + // ── Identity ── + pub kind: RunKind, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub project: Option, + + // ── Git context ── + #[serde(default, skip_serializing_if = "Option::is_none")] + pub commit: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub branch: Option, + #[serde(default)] + pub dirty: bool, + + // ── CI context (populated from env vars when available) ── + #[serde(default, skip_serializing_if = "Option::is_none")] + pub pr: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub pr_url: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub title: Option, + + // ── Execution ── + #[serde(default, skip_serializing_if = "Option::is_none")] + pub started_at: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub ended_at: Option>, + /// Total runtime, serialized as integer milliseconds. + #[serde(default, skip_serializing_if = "Option::is_none", with = "option_duration_ms")] + pub runtime: Option, + + // ── Outcome ── + /// "pass" or "fail". Aliases for backward compat with old run.json fields. + #[serde(default, skip_serializing_if = "Option::is_none", + alias = "test_outcome", alias = "status")] + pub outcome: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub pass: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub fail: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub total: Option, + + // ── Per-test results (kind == Test only) ── + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub tests: Vec, + + // ── Environment ── + #[serde(default, skip_serializing_if = "Option::is_none")] + pub os: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub arch: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub patchbay_version: Option, +} +``` + +### Duration serialization module (in patchbay-utils) + +Move the existing `duration_ms` serde module from `compare.rs` to `patchbay-utils/src/manifest.rs`. +Add an `option_duration_ms` variant for `Option` fields. + +```rust +pub mod duration_ms { + use std::time::Duration; + use serde::{Deserialize, Deserializer, Serializer}; + + pub fn serialize(d: &Duration, s: S) -> Result { + s.serialize_u64(d.as_millis() as u64) + } + pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result { + Ok(Duration::from_millis(u64::deserialize(d)?)) + } +} + +pub mod option_duration_ms { + use std::time::Duration; + use serde::{Deserialize, Deserializer, Serialize, Serializer}; + + pub fn serialize(d: &Option, s: S) -> Result { + match d { + Some(d) => s.serialize_u64(d.as_millis() as u64), + None => s.serialize_none(), + } + } + pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result, D::Error> { + Ok(Option::::deserialize(d)?.map(Duration::from_millis)) + } +} +``` + +### Git context helper (in patchbay-utils) + +```rust +pub struct GitContext { + pub commit: Option, + pub branch: Option, + pub dirty: bool, +} + +pub fn git_context() -> GitContext { + let commit = Command::new("git").args(["rev-parse", "HEAD"]).output().ok() + .filter(|o| o.status.success()) + .and_then(|o| String::from_utf8(o.stdout).ok()) + .map(|s| s.trim().to_string()); + let branch = Command::new("git").args(["rev-parse", "--abbrev-ref", "HEAD"]).output().ok() + .filter(|o| o.status.success()) + .and_then(|o| String::from_utf8(o.stdout).ok()) + .map(|s| s.trim().to_string()) + .filter(|s| s != "HEAD"); + let dirty = !Command::new("git").args(["diff", "--quiet"]).status() + .map(|s| s.success()).unwrap_or(true); + GitContext { commit, branch, dirty } +} + +/// Resolve a git ref (branch name, tag, or SHA prefix) to a full commit SHA. +pub fn resolve_ref(git_ref: &str) -> Option { + Command::new("git").args(["rev-parse", git_ref]).output().ok() + .filter(|o| o.status.success()) + .and_then(|o| String::from_utf8(o.stdout).ok()) + .map(|s| s.trim().to_string()) +} +``` + +### Who writes `run.json` + +| Command | Where | kind | Git info | +|---------|-------|------|----------| +| `patchbay test` | testdir output root | `Test` | `git_context()` | +| `patchbay test --persist` | also copies to `.patchbay/work/run-{ts}/` | `Test` | same | +| `patchbay run` | `run_root` (`run-{ts}/` dir) | `Sim` | `git_context()` | +| `patchbay upload` | writes if missing, reads if present | either | from CI env vars | +| `patchbay compare test` | each worktree test run writes its own | `Test` | worktree HEAD | + +## How `patchbay run` changes + +`prepare_run_root` creates `run-{timestamp}/` (rename from `sim-{timestamp}/`). + +After all sims finish, write `run.json` alongside the existing `manifest.json`. +Rename the runner's `RunManifest` (in `progress.rs`) → `SimRunReport` to avoid collision. +Both files coexist for now; `manifest.json` has per-sim details, `run.json` has unified metadata. +Docs clarify the distinction. Long-term merge target. + +### Group semantics for sim runs + +When `patchbay run sims/` processes multiple TOML files, the `run-{timestamp}/` directory is +the group. Each sim inside (`my-sim/`, `my-sim-2/`) is a nested run with its own `events.jsonl`. +The server discovers nested runs and derives `group` from the first path component. +The group-level `run.json` provides the shared manifest. + +## How `patchbay test` changes + +After `cargo test` / `nextest` finishes: + +1. Pipe stdout/stderr, parse test output via `parse_test_output()` for per-test results +2. Locate `testdir-current/` via cargo metadata +3. Also set `PATCHBAY_OUTDIR` env var so Labs write to a known location +4. Write `run.json` into testdir-current/ (or PATCHBAY_OUTDIR if it was used and is non-empty) +5. If `--persist` flag is set, copy the whole thing into `.patchbay/work/run-{ts}/` + +### testdir and PATCHBAY_OUTDIR + +`patchbay test` sets `PATCHBAY_OUTDIR` when running cargo test. After the test finishes: +- If the PATCHBAY_OUTDIR directory exists and is non-empty → use it for run.json +- Otherwise check if testdir-current exists → use that +- Write `run.json` with kind, git context, parsed test results + +Consider re-exporting `testdir` from the patchbay crate (`patchbay::testdir`) for convenience. + +## How `patchbay compare` changes + +### New flow + +``` +patchbay compare test [ref2] [-- test-args] + --force-build Force rebuild even if cached run exists + --no-ref-build Don't build; fail if no cached run found +``` + +1. **Resolve refs to commits**: `resolve_ref(ref)` → full SHA +2. **Check for cached runs**: `find_run_for_commit(".patchbay/work", sha, RunKind::Test)` + scans `*/run.json` for matching `commit` + `kind` + `dirty == false` +3. **For each ref without a cached run**: + - If `--no-ref-build`: fail with "no run found for {ref}, use --force-build" + - Create worktree at `.patchbay/tree/{ref}/` + - Run `cargo test` in worktree, persist results to `.patchbay/work/run-{ts}/` + - Clean up worktree +4. **For current worktree** (when ref2 is omitted): + - Check if a run exists for HEAD (match `dirty` against current state) + - If not, run tests and persist +5. **If `--force-build`**: skip cache check, always build & run +6. **Diff**: Load both `run.json` manifests, compare `tests` arrays + - Print summary table + score + - Exit non-zero on regressions + +### Cached run lookup + +```rust +/// Find a persisted run matching commit SHA and kind. +pub fn find_run_for_commit(work_dir: &Path, commit: &str, kind: RunKind) -> Option<(PathBuf, RunManifest)> { + for entry in fs::read_dir(work_dir).ok()?.flatten() { + let run_json = entry.path().join("run.json"); + if let Ok(text) = fs::read_to_string(&run_json) { + if let Ok(m) = serde_json::from_str::(&text) { + if m.kind == kind && m.commit.as_deref() == Some(commit) && !m.dirty { + return Some((entry.path(), m)); + } + } + } + } + None +} +``` + +### Comparing two RunManifests + +`compare_results()` takes two `&RunManifest`s and returns a computed summary. Uses the `tests` +field for per-test diff. Same scoring logic as before (fixes +3, regressions -5, time delta ±1). + +No `CompareManifest` or `CompareSummary` structs stored to disk. The summary is printed to +stdout and optionally returned as JSON for the UI. + +## Server changes + +### Discovery + +Extend `discover_runs` to detect directories with `run.json` in addition to `events.jsonl`: +```rust +if path.join(EVENTS_JSONL).exists() || path.join(RUN_JSON).exists() { + // This is a run +} +``` + +### RunInfo changes + +```rust +pub struct RunInfo { + pub name: String, + #[serde(skip)] + pub path: PathBuf, + pub label: Option, + pub status: Option, + /// Group name (first path component for nested runs). + /// Serialized as both "group" and legacy "batch". + #[serde(alias = "batch")] + pub group: Option, + pub manifest: Option, // unified RunManifest from patchbay-utils +} +``` + +### API changes + +- `GET /api/runs` gains optional query params: `?project=X&kind=test&limit=100&offset=0` +- Response includes `group` field (with `batch` as serde alias) +- Keep `/api/batches/` and `/api/invocations/` routes as aliases +- Compare is computed client-side (no new server endpoint needed) + +## UI changes + +### Runs index redesign + +Single page at `/`: + +``` +┌──────────────────────────────────────────────────────┐ +│ Runs [Project ▾] [Kind ▾] [< 1 2 3 >] │ +│ │ +│ ☐ main@abc123 test 2m ago 47/50 pass [view] │ +│ ☐ main@def456 test 1h ago 45/50 pass [view] │ +│ ☐ feat@789abc sim 3h ago pass [view] │ +│ │ +│ [Compare Selected (2)] │ +└──────────────────────────────────────────────────────┘ +``` + +- **Sorted by date** (newest first, from `started_at` or dir name) +- **Project filter** dropdown (populated from unique `manifest.project` values) +- **Kind filter** dropdown (test/sim/all) +- **Pagination** (100 per page) +- **Checkboxes** for multi-select → "Compare Selected" button +- Click row → `/run/{name}` detail view +- Grouped runs (multi-sim) show as expandable rows + +### Compare view + +Route: `/compare/:left/:right` + +- Fetch both runs' `run.json` via `/api/runs/{name}/files/run.json` +- Compute diff client-side (same logic as CLI compare) +- Summary bar: left ref, right ref, pass/fail counts, score +- Per-test table: name, left status, right status, delta badge +- Side-by-side metrics (if metrics.jsonl exists in both) +- "Compare with..." button on individual run pages (picker shows same-project runs) + +### Co-navigation + +Split-screen layout reusing RunView for each side. Shared tab state — switching tab on +one side switches both. Scroll sync optional (defer to v2). + +### Router + +``` +/ → RunsIndex +/run/:name → RunDetail (single run view) +/compare/:left/:right → CompareView (side-by-side) +/batch/:name → alias for group view +/inv/:name → redirect to /batch/:name (legacy) +``` + +## Implementation Phases + +### Phase 1: Unified RunManifest + run.json everywhere + +**Commit 1a: Move manifest types to patchbay-utils** +- Create `patchbay-utils/src/manifest.rs` +- Define `RunKind`, `TestStatus`, `TestResult`, `RunManifest`, `GitContext` +- Move `duration_ms` / `option_duration_ms` serde modules there +- Add `git_context()`, `resolve_ref()`, `find_run_for_commit()` helpers +- Export from `patchbay-utils/src/lib.rs` +- Add `chrono` dependency to patchbay-utils (workspace dep) +- Delete duplicate RunManifest from `patchbay-cli/src/upload.rs` +- Delete duplicate RunManifest from `patchbay-server/src/lib.rs` +- Import from `patchbay_utils::manifest::*` in both +- Server: keep backward-compat serde aliases +- Files changed: `patchbay-utils/{Cargo.toml, src/lib.rs, src/manifest.rs}`, + `patchbay-cli/src/upload.rs`, `patchbay-server/src/lib.rs`, + `patchbay-cli/src/compare.rs` (delete TestResult/TestStatus, import from utils) + +**Commit 1b: Rename runner's RunManifest → SimRunReport** +- In `patchbay-runner/src/sim/progress.rs`: rename `RunManifest` → `SimRunReport` +- Update all references in `runner.rs` +- Add doc comments distinguishing from the unified `run.json` manifest +- Files changed: `patchbay-runner/src/sim/progress.rs`, `patchbay-runner/src/sim/runner.rs` + +**Commit 1c: `patchbay run` writes run.json** +- In `runner.rs::run_sims()`, after writing `manifest.json`, also write `run.json` + using `patchbay_utils::manifest::RunManifest` with `kind: Sim` +- Rename dir prefix `sim-` → `run-` in `prepare_run_root()` +- Files changed: `patchbay-runner/src/sim/runner.rs` + +**Commit 1d: `patchbay test` writes run.json + --persist** +- Pipe stdout/stderr from cargo test, parse with `parse_test_output()` +- Write `run.json` to testdir-current (or PATCHBAY_OUTDIR) with test results +- Add `--persist` flag to Test command: copies output dir to `.patchbay/work/run-{ts}/` +- Set `PATCHBAY_OUTDIR` env var when running cargo test +- Files changed: `patchbay-cli/src/test.rs`, `patchbay-cli/src/main.rs` + +### Phase 2: Refactor compare to use cached runs + +**Commit 2a: Compare uses run.json + cache lookup** +- Rewrite compare flow: resolve refs → check cache → build if needed → diff run.json +- Delete `CompareManifest`, `CompareSummary` structs (compare is computed, not stored) +- `compare_results()` takes two `&RunManifest` and returns printed summary +- Add `--force-build` and `--no-ref-build` flags +- Remove `compare-{timestamp}/` directory creation +- Files changed: `patchbay-cli/src/compare.rs`, `patchbay-cli/src/main.rs` + +### Phase 3: Server + API updates + +**Commit 3a: Server discovers run.json + group rename** +- Extend `discover_runs` to check for `run.json` in addition to `events.jsonl` +- Rename `batch` → `group` in `RunInfo` (keep `batch` as serde alias) +- Import `RunManifest` from patchbay-utils instead of local definition +- Add query params to `GET /api/runs`: `project`, `kind`, `limit`, `offset` +- Files changed: `patchbay-server/src/lib.rs` + +**Commit 3b: Rename batch → group in UI types** +- Update `api.ts`, `RunsIndex.tsx`, `App.tsx` to use `group` (keep `batch` as fallback) +- Files changed: `ui/src/api.ts`, `ui/src/RunsIndex.tsx`, `ui/src/App.tsx` + +### Phase 4: UI overhaul + +**Commit 4a: Runs index redesign** +- Project dropdown filter, kind dropdown filter +- Pagination (100/page) +- Checkbox selection for compare +- Sorted by date (from manifest.started_at or dir name) +- Files changed: `ui/src/RunsIndex.tsx`, `ui/src/api.ts` (add query params) + +**Commit 4b: Compare view refactor** +- New route: `/compare/:left/:right` +- Fetch both runs' `run.json`, compute diff client-side +- Summary bar, per-test table with delta badges, score +- "Compare with..." button on individual run pages +- Files changed: `ui/src/components/CompareView.tsx`, `ui/src/main.tsx`, `ui/src/App.tsx` + +**Commit 4c: Co-navigation (side-by-side)** +- Split-screen layout reusing RunView for each side +- Shared tab state (switching tab on one side switches both) +- Files changed: `ui/src/components/CompareView.tsx` + +### Phase 5: Tests + +**Commit 5a: Update integration test** +- Rewrite `compare_integration.rs` for new flow (no compare directory, reads run.json) +- Fixture crate runs via `patchbay test --persist` +- Assert cached run lookup works (second compare skips build) +- Files changed: `patchbay-cli/tests/compare_integration.rs` + +**Commit 5b: Update E2E test** +- Rewrite `compare.spec.ts` for new routes and data model +- Mock two run directories with `run.json` manifests containing test results +- Assert compare view renders from `/compare/run-a/run-b` +- Files changed: `ui/e2e/compare.spec.ts` + +## Key invariants + +1. `run.json` is the single source of truth for run metadata +2. Filesystem is the only source of truth for the server (no persistent index) +3. Compare is always computed, never stored +4. Every `patchbay test --persist` and `patchbay run` produces a discoverable run +5. Image cache stays in `~/.local/share/patchbay/` (XDG), not `.patchbay/` +6. Backward compat: old `batch`, `test_outcome`, `status` fields still deserialize + +## Decisions + +1. **manifest.json vs run.json**: Both coexist. `manifest.json` (SimRunReport) has per-sim + details. `run.json` (RunManifest) has unified metadata. Naming and docs are clear. + Long-term merge target. + +2. **Pagination**: offset/limit (file-based discovery is inherently bounded). + +3. **testdir**: Supported mechanism for test output. Consider re-exporting from `patchbay::testdir`. + Also set `PATCHBAY_OUTDIR` and check both locations. diff --git a/plans/future-platform.md b/plans/future-platform.md new file mode 100644 index 0000000..413a8f0 --- /dev/null +++ b/plans/future-platform.md @@ -0,0 +1,136 @@ +# Future: Distributed Systems Debug/Test/Bench Platform + +Where patchbay can go beyond v1. Ordered roughly by expected impact and +feasibility. + +--- + +## Near-term (post v1) + +### qlog comparison +Sum packet and frame counts from per-device qlog files. Show deltas in compare +view as stacked bar charts. Since qlog is already collected per device, this is +mostly parsing + aggregation. Useful for spotting QUIC behavioral changes (more +retransmits, different frame mix) between versions. The compare UI already +prepares for this (code comments mark where to add qlog parsing and delta +display). Implementation: parse qlog JSON, bucket events by type, diff counts +between left/right runs, render as compact delta table in CompareSummary. + +### Flaky test detection +Run N iterations of a test suite, track pass/fail rate per test over time. Flag +tests whose failure rate exceeds a threshold. Store history in +`.patchbay/work/flaky-history.jsonl`. Integrate with compare: exclude known-flaky +tests from regression scoring. + +### Network fault injection schedules +Programmable chaos beyond static `set-link-condition`. Define schedules: +```toml +[[fault-schedule]] +device = "client" +at = "5s" +condition = { latency_ms = 500, jitter_ms = 100 } +at = "15s" +condition = "reset" # back to normal +``` +Partition events, delay spikes, packet reordering, bandwidth oscillation. Make it +easy to simulate real-world network instability over time. + +### Multi-region latency matrices +Define region topologies with realistic inter-region RTT from cloud provider +measurements: +```toml +[regions] +us-east = { router = "r1" } +eu-west = { router = "r2" } +ap-south = { router = "r3" } + +[latency-matrix] +us-east.eu-west = { rtt_ms = 80, jitter_ms = 5 } +us-east.ap-south = { rtt_ms = 180, jitter_ms = 15 } +eu-west.ap-south = { rtt_ms = 140, jitter_ms = 10 } +``` + +### Bisect mode +`patchbay bisect ` — binary search git history for the commit +that introduced a regression (test failure or metric threshold breach). Uses +compare infrastructure internally. Could integrate with `git bisect run`. + +--- + +## Mid-term + +### Record & replay +**High impact.** Capture full packet traces (pcap per device via `tcpdump` in +namespace) during a run. Store alongside events. Replay deterministically by +injecting captured packets back into namespaces, without running the original +binaries. Enables: +- Exact reproduction of a failure without rebuilding +- Sharing a failure as a self-contained artifact +- Regression testing against recorded network behavior + +### Distributed tracing correlation +Collect OpenTelemetry spans from all devices (via OTLP receiver per namespace), +stitch into a unified trace. Visualize in UI: see a request flow from client +through NAT, relay, to server. Correlate with network events (link condition +changes, NAT rebinds) on the timeline. + +### Benchmark suites +Named benchmark profiles tracking key metrics over time: +```toml +[benchmark.relay-throughput] +sim = "iperf-relay.toml" +metric = "iperf.down_bytes" +direction = "higher_is_better" +threshold_regression = "5%" +``` +Track like `criterion` but for distributed scenarios. CI posts trend graphs +on PRs. + +### CI integration +`patchbay ci` mode: +- Posts compare results as GitHub PR comments (markdown table) +- Blocks merge on regression thresholds +- Stores history in a central patchbay-server instance +- Supports `--push` to send results to remote server + +### Resource profiling per device +Collect CPU, memory, fd count, socket buffer usage per namespace. Correlate with +network events in the timeline. Implemented via periodic `/proc` sampling inside +each namespace worker thread. + +--- + +## Long-term vision + +### Cluster mode +Distribute devices across multiple machines for large-scale simulations (100+ +nodes). Coordination via iroh/QUIC. Each machine runs a patchbay agent that +manages local namespaces. Central orchestrator assigns devices to agents and +manages cross-machine virtual links (via tunnels). + +### Protocol conformance testing +Pluggable test harnesses that verify protocol implementations against specs. +Ship reference test suites for QUIC, STUN/TURN, DNS, HTTP/3. Run against any +implementation by pointing at its binary. + +### AI-assisted debugging +Feed topology + events + logs + metrics into an LLM to answer: +- "Why did this connection fail?" +- "Why is throughput 10x lower than baseline?" +- "What changed between these two runs that explains the regression?" +Possible via structured context from events.jsonl + metrics.jsonl + qlog. + +### Snapshot & restore +Freeze entire lab state: all namespace configurations, iptables rules, tc qdiscs, +routing tables, running process state. Serialize to disk. Restore later for +deterministic debugging. Like VM snapshots but for namespace-based labs. + +### Visual topology editor +Drag-and-drop in the UI to build topologies. Export to TOML. Live-edit during +inspect sessions — add/remove devices, change NAT/firewall, apply link conditions, +all from the browser. + +### Shared test infrastructure +Hosted patchbay-server where teams push results from CI. Compare across +branches, PRs, releases. Retention policies, alerting on metric regressions, +team dashboards. Like a Grafana for network simulation results. diff --git a/plans/v1-unified-cli.md b/plans/v1-unified-cli.md new file mode 100644 index 0000000..d8e3b8a --- /dev/null +++ b/plans/v1-unified-cli.md @@ -0,0 +1,514 @@ +# v1: Unified CLI, Compare, Metrics + +## Overview + +Consolidate `patchbay-runner` and `patchbay-vm` into a single `patchbay` CLI with +feature-gated backends, add `compare` mode for regression testing across git refs, +and add lightweight per-device metrics recording via tracing. + +All work paths consolidate under `.patchbay/`. Image cache stays in +`~/.local/share/patchbay/` (shared across projects). + +--- + +## CLI Structure + +``` +patchbay +├── test [filter] # run cargo test (native or VM) +│ [--ignored] # include ignored tests +│ [--ignored-only] # run only ignored tests +│ [-p pkg] [--test name] # cargo test selectors +│ [--vm [qemu|container]] # force VM backend +│ [-- extra-cargo-args] +│ +├── run # run simulations +│ [--vm [qemu|container]] +│ [--verbose] [--open] [--timeout T] +│ +├── compare +│ ├── test [filter] [ref2] # compare test results +│ │ [--ignored] [--ignored-only] +│ │ [-p pkg] [--test name] +│ │ +│ └── run [ref2] # compare sim results +│ +├── serve [dir] # serve UI +│ [--bind addr] [--open] [--testdir] +│ +├── vm # direct VM control +│ ├── up [--recreate] [--backend qemu|container] +│ ├── down +│ ├── status +│ ├── ssh [cmd...] +│ └── cleanup +│ +├── inspect # interactive ns debugging +└── run-in # exec in inspect ns +``` + +### Backend auto-detection + +``` +Linux → native (patchbay crate, namespaces) +macOS + ARM → check `container` CLI exists → container, else qemu +macOS + x86 → qemu +other → qemu +``` + +Override: `--vm` (no value = auto-detect VM), `--vm qemu`, `--vm container`. + +### Compare semantics + +- One ref: `patchbay compare test main` → worktree vs `main` +- Two refs: `patchbay compare test main abc123` → `main` vs `abc123` (no worktree) +- Creates git worktrees in `.patchbay/tree/{ref}/` +- Runs the full test/sim suite in each worktree (sequential) +- Writes compare manifest as a batch under `.patchbay/work/compare-{timestamp}/` +- Compare is itself a batch, so it shows up in UI as a batch with left/right runs +- Prints quick pass/fail + time summary and score +- Worktrees removed if unchanged (git diff empty), kept otherwise + +### Test delegation + +All `patchbay test` and `patchbay compare test` commands add +`RUSTFLAGS="--cfg patchbay_test"` to cargo invocations (enables conditional +compilation in test code, e.g., `#[cfg(patchbay_test)]`). + +On native: prefer `cargo nextest run` if installed, else `cargo test` (warn once +that nextest gives better structured output). Forward filter, `--ignored`, +`--ignored-only`, package/test selectors, and extra args. + +On VM: cross-compile to musl, stage binaries, run in guest (existing flow). + +**testdir integration:** The `testdir` crate writes to `target/testdir-current/` +and has no env var override. After tests complete, copy `testdir-current/` into +`.patchbay/work/{run}/testdir/` so results are co-located with other run artifacts +and visible in the UI. + +--- + +## Consolidated Paths + +``` +.patchbay/ +├── work/ # run output (was .patchbay-work) +│ ├── latest -> ... +│ ├── sim-YYMMDD-HHMMSS/ +│ └── compare-YYMMDD-HHMMSS/ # compare is a batch +│ ├── left-{ref}/ # run results for ref1 +│ ├── right-{ref}/ # run results for ref2 +│ └── summary.json # compare manifest +├── vm/ # VM state (was .qemu-vm) +├── tree/ # git worktrees for compare +│ ├── main/ +│ └── abc123/ +└── cache/ # binary cache (project-local) + └── binaries/ + +~/.local/share/patchbay/ +└── images/ # shared VM base images (unchanged) +``` + +`.patchbay/` should be gitignored. + +Path migration: check for old `.patchbay-work` and `.qemu-vm`, print one-line +warning pointing to new location, do NOT auto-migrate. + +--- + +## Commit Strategy + +### Commit 0: Rename "invocation" → "batch" + +**Goal:** Rename everywhere: Rust types, API endpoints, UI code, CSS. +Separate commit because it touches many files but is a trivial rename. + +Changes: +- Rust: `invocation` → `batch` in structs, fields, API paths +- Server: `/api/invocations/` → `/api/batches/` (keep `/api/invocations/` as + alias for backward compat since links are shared on Discord) +- UI: rename in types, components, routes. Keep `#/inv/` route as redirect + to `#/batch/` for backward compat +- UI routing: switch from hash routes to real routes. Server returns index.html + for all non-`/api/` and non-asset paths (wildcard fallback) + +### Commit 1: Pure refactor — extract libraries from CLIs + +**Goal:** Make `patchbay-runner` and `patchbay-vm` into libraries, create `patchbay-cli`. +Zero behavior change. + +Changes: +- `patchbay-runner/src/main.rs` → move CLI parsing + dispatch into `patchbay-cli` + - Keep `sim/` module as library (`pub mod sim` in `lib.rs`) + - Keep `init.rs` (userns bootstrap) + - Remove `[[bin]]` from `patchbay-runner/Cargo.toml`, keep as `[lib]` + - The `inspect`/`run-in` code moves to `patchbay-cli` (it's CLI-only) + +- `patchbay-vm/src/main.rs` → move CLI parsing + dispatch into `patchbay-cli` + - `common.rs`, `qemu.rs`, `container.rs`, `util.rs` stay as library + - Add `pub` to module-level functions needed by CLI + - Remove `[[bin]]` from `patchbay-vm/Cargo.toml`, keep as `[lib]` + +- New crate: `patchbay-cli/` + - `Cargo.toml` with feature flags: + - `native` (default on linux) → depends on `patchbay`, `patchbay-runner` + - `vm-qemu` → depends on `patchbay-vm` + - `vm-container` → depends on `patchbay-vm` + - `serve` (default) → depends on `patchbay-server` + - `src/main.rs` — unified clap CLI, dispatches to runner/vm libs + - Binary name: `patchbay` + +- Update workspace `Cargo.toml` to add `patchbay-cli` +- Update paths: `.patchbay-work` → `.patchbay/work`, `.qemu-vm` → `.patchbay/vm` + +**LOC estimate:** ~300 new in patchbay-cli, ~200 removed from runner+vm mains. +Net small because it's mostly moving code. + +### Commit 2: Add `patchbay test` (native + VM) + +**Goal:** `patchbay test` delegates to cargo test on native, VM test flow on VM. + +Changes: +- `patchbay-cli/src/test.rs` — new module + - Native path: detect nextest (`which cargo-nextest`), prefer if found, else + `cargo test` with one-time warning + - Sets `RUSTFLAGS="--cfg patchbay_test"` on all cargo commands + - Maps `--ignored` → `-- --ignored`, `--ignored-only` → `-- --ignored` + - VM path: call into `patchbay_vm::run_tests()` (existing) + - Parse test output for pass/fail/ignore counts (both cargo test and nextest) + - After tests: copy `target/testdir-current/` into `.patchbay/work/` run dir + - Support `--vm` override + +**LOC estimate:** ~200 new. + +### Commit 3: Metrics recording — `device.record()` + builder + iroh-metrics + +**Goal:** Lightweight per-device metrics, stored as JSONL, viewable in UI. + +**Format:** `device..metrics.jsonl` +```jsonl +{"t":1679000000.123,"m":{"throughput_bytes":1234.0,"connections_active":5}} +{"t":1679000000.456,"m":{"latency_ms":42.5}} +``` + +Each line is a batch of key-value pairs sharing one timestamp. This handles both +single metrics and bulk emission (iroh-metrics, custom structs). + +**Tracing approach:** Since tracing field names must be compile-time, we serialize +the metrics map to a JSON string and emit as a single known field: + +```rust +tracing::info!( + target: "patchbay::_metrics", + metrics_json = %json_string, +); +``` + +The namespace subscriber (already in `tracing.rs` with `JsonFieldVisitor`) +intercepts this target, parses `metrics_json`, prepends timestamp, writes to +the per-device metrics file. + +**Device tracing handle:** Currently `device.run_sync(|| tracing::info!(...))` is +needed to emit to the right file. This is wasteful for metrics. Instead: + +```rust +impl Device { + /// Enter this device's tracing context. Returns a guard; while held, + /// tracing events are routed to this device's output files. + pub fn enter_tracing(&self) -> tracing::dispatcher::DefaultGuard { + let dispatch = self.tracing_dispatch(); + tracing::dispatcher::set_default(&dispatch) + } + + /// Record a single metric. + pub fn record(&self, key: &str, value: f64) { + let _guard = self.enter_tracing(); + let json = format!("{{\"{key}\":{value}}}"); + tracing::info!(target: "patchbay::_metrics", metrics_json = %json); + } + + /// Record multiple metrics at once. + pub fn metrics(&self) -> MetricsBuilder<'_> { + MetricsBuilder { device_name: self.name().to_string(), dispatch: self.tracing_dispatch(), fields: serde_json::Map::new() } + } +} + +/// Builder for batch metric emission. +pub struct MetricsBuilder { ... } +impl MetricsBuilder { + pub fn field(mut self, key: &str, value: f64) -> Self { + self.fields.insert(key.to_string(), value.into()); + self + } + /// Emit all fields as a single metrics line. + pub fn emit(self) { + let _guard = tracing::dispatcher::set_default(&self.dispatch); + let json = serde_json::to_string(&self.fields).unwrap(); + tracing::info!(target: "patchbay::_metrics", metrics_json = %json); + } +} +``` + +**iroh-metrics integration** (optional feature `iroh-metrics`): + +```rust +#[cfg(feature = "iroh-metrics")] +impl Device { + /// Record all metrics from an iroh-metrics MetricsGroup. + pub fn record_metrics(&self, group: &impl iroh_metrics::MetricsGroup) { + // MetricsGroup exposes iter() or encode() to get name/value pairs + // Serialize to JSON map, emit as single metrics line + let mut builder = self.metrics(); + for (name, value) in group.iter() { + builder = builder.field(name, value); + } + builder.emit(); + } +} +``` + +Changes: +- `patchbay/src/handles.rs` — `record()`, `metrics()`, `enter_tracing()`, + `record_metrics()` (feature-gated) +- `patchbay/src/metrics.rs` — `MetricsBuilder` struct +- `patchbay/src/tracing.rs` — handle `patchbay::_metrics` target, write to + metrics file. Store `Dispatch` clone in device handle for direct emission +- `patchbay/src/consts.rs` — add `METRICS_JSONL_EXT = "metrics.jsonl"` +- `patchbay/Cargo.toml` — optional `iroh-metrics` dependency +- `patchbay-server/src/lib.rs` — recognize `*.metrics.jsonl` as log kind `metrics` + +**LOC estimate:** ~200 new. + +### Commit 4: Compare mode + +**Goal:** `patchbay compare test main` and `patchbay compare run sims/ main` + +Changes: +- `patchbay-cli/src/compare.rs` — new module + + **Worktree management:** + ```rust + fn setup_worktree(ref_name: &str, base: &Path) -> Result { + let tree_dir = base.join(".patchbay/tree").join(sanitize(ref_name)); + // git worktree add --detach + } + fn cleanup_if_unchanged(tree_dir: &Path) -> Result<()> { + // git diff --quiet && git worktree remove + } + ``` + + **Compare test flow (sequential):** + 1. If two refs: create two worktrees. If one ref: worktree + current dir + 2. Run `patchbay test` in left, then right (sequential) + 3. Parse test results from both runs + 4. Write compare batch to `.patchbay/work/compare-{timestamp}/` + (structured as a batch so it shows in UI naturally) + 5. Print summary table + score + 6. Cleanup unchanged worktrees + + **Compare run flow:** + Same worktree setup, run sims in each, compare captures/results/metrics. + + **Summary output:** + ``` + Compare: main ↔ worktree + + Tests: 45/50 pass → 47/50 pass (+2 fixed) + Regressions: 0 + New failures: 0 + Total time: 120.3s → 115.1s (-4.3%) + + ┌──────────────┬────────┬────────┬─────────┐ + │ Test │ Left │ Right │ Delta │ + ├──────────────┼────────┼────────┼─────────┤ + │ test_nat │ PASS │ PASS │ -0.3s │ + │ test_relay │ FAIL │ PASS │ fixed │ + │ test_holepunch│ PASS │ PASS │ +0.1s │ + └──────────────┴────────┴────────┴─────────┘ + + Score: +7 (2 fixes, 0 regressions, 4.3% faster) + ``` + + **Scoring formula (simple v0):** + - +3 per fix (fail→pass) + - -5 per regression (pass→fail) + - +1 if total time improves >2% + - -1 if total time regresses >5% + + **Metrics in compare:** If both sides have `*.metrics.jsonl`, include metric + deltas in the per-test table (last value of each key compared). + + **qlog in compare (prepared, not implemented):** + ```rust + // TODO: qlog comparison — parse per-device qlog files, sum packet/frame + // counts by type, include as metric deltas in compare summary. + // See LogsTab.tsx for qlog rendering; comparison adds a delta overlay. + ``` + +- `patchbay-cli/src/compare_manifest.rs` — types for compare output + +**LOC estimate:** ~350 new. + +### Commit 5: UI — metrics view, comparison, tree navigation + +**Goal:** Show metrics in UI, add split-screen comparison, improve navigation. + +**Architecture:** Extract existing run detail into a reusable `RunView` component, +then compose `CompareView` as two `RunView`s side by side. + +Changes: + +**Routing overhaul:** +- Switch from hash-based to real URL routes +- Server: wildcard fallback (serve index.html for all non-`/api/`, non-asset paths) +- Routes: `/run/:name`, `/batch/:name`, `/compare/:name` +- Keep `/inv/:name` as redirect to `/batch/:name` + +**Navigation — tree selector:** +- Replace `) +├── RunView (extracted, reusable) +│ ├── TopologyGraph +│ ├── LogsTab +│ ├── TimelineTab +│ ├── PerfTab +│ └── MetricsTab (new) +├── BatchView (renamed from invocation view) +│ └── RunView per run +└── CompareView (new, for batches with summary.json) + ├── CompareSummary + ├── RunView (left) + └── RunView (right) +``` + +Co-navigation: `CompareView` owns the active tab state, passes it down to both +`RunView` instances. Tab clicks update once, both sides follow. + +--- + +## Resolved Questions + +1. **nextest:** prefer if installed, else cargo test with warning +2. **Compare parallelism:** sequential for now +3. **Worktree cleanup:** remove if unchanged (git diff empty), keep if modified +4. **Path migration:** minimal warning, no auto-migrate +5. **"invocation" → "batch":** yes, separate commit, keep backward compat routes + +--- + +## Tests + +### Integration tests (Rust) + +- `patchbay-cli`: test that `patchbay test` invokes cargo test with correct flags + (mock the cargo command, verify args including `RUSTFLAGS=--cfg patchbay_test`) +- `patchbay-cli`: test that `--vm` flag overrides backend detection +- `patchbay/src/handles.rs`: test `device.record()` writes to metrics file +- `patchbay/src/handles.rs`: test `device.metrics().field().field().emit()` + writes single line with all fields +- `patchbay-cli/src/compare.rs`: test worktree setup/cleanup, manifest generation +- `patchbay-cli/src/compare.rs`: test scoring formula +- `patchbay-server`: test that metrics.jsonl files are discovered as `metrics` kind + +### E2E tests + +- **Metrics UI:** playwright test — run a sim that records metrics, verify + MetricsTab shows key/value table with sparklines +- **Compare UI:** playwright test — create a compare batch directory with mock + data (left/right runs + summary.json), verify CompareView renders split screen + with summary bar, co-navigation works (clicking tab changes both sides) +- **Batch rename:** verify `/inv/` routes redirect to `/batch/` +- **Tree nav:** verify sidebar shows batches expandable to runs diff --git a/test-results/.last-run.json b/test-results/.last-run.json new file mode 100644 index 0000000..5fca3f8 --- /dev/null +++ b/test-results/.last-run.json @@ -0,0 +1,4 @@ +{ + "status": "failed", + "failedTests": [] +} \ No newline at end of file diff --git a/ui/e2e/compare.spec.ts b/ui/e2e/compare.spec.ts new file mode 100644 index 0000000..d378399 --- /dev/null +++ b/ui/e2e/compare.spec.ts @@ -0,0 +1,208 @@ +import { test, expect } from '@playwright/test' +import { mkdtempSync, mkdirSync, writeFileSync, rmSync } from 'node:fs' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { type ChildProcess, spawn } from 'node:child_process' +import { PATCHBAY_BIN, REPO_ROOT, waitForHttp } from './helpers' + +const PORT = 7434 +const UI_URL = `http://127.0.0.1:${PORT}` + +const MINIMAL_EVENT = + '{"opid":1,"timestamp":"2026-03-25T00:00:00Z","kind":"lab_created","lab_prefix":"lab-p1","label":"test"}\n' + +const MOCK_LEFT_MANIFEST = { + kind: 'test', + project: 'test-project', + commit: 'aaa111', + branch: 'main', + dirty: false, + outcome: 'pass', + pass: 2, + fail: 0, + total: 2, + tests: [ + { name: 'counter::udp_counter', status: 'pass' }, + { name: 'counter::udp_threshold', status: 'pass' }, + ], +} + +const MOCK_RIGHT_MANIFEST = { + kind: 'test', + project: 'test-project', + commit: 'bbb222', + branch: 'feature', + dirty: false, + outcome: 'fail', + pass: 1, + fail: 1, + total: 2, + tests: [ + { name: 'counter::udp_counter', status: 'pass' }, + { name: 'counter::udp_threshold', status: 'fail' }, + ], +} + +test('checkbox selection on runs index navigates to compare view', async ({ page }) => { + test.setTimeout(60_000) + const workDir = mkdtempSync(join(tmpdir(), 'patchbay-compare-select-')) + let proc: ChildProcess | null = null + + try { + // Create two run directories with manifests + const leftDir = join(workDir, 'run-left') + const rightDir = join(workDir, 'run-right') + mkdirSync(leftDir, { recursive: true }) + mkdirSync(rightDir, { recursive: true }) + + writeFileSync(join(leftDir, 'run.json'), JSON.stringify(MOCK_LEFT_MANIFEST)) + writeFileSync(join(leftDir, 'events.jsonl'), MINIMAL_EVENT) + writeFileSync(join(rightDir, 'run.json'), JSON.stringify(MOCK_RIGHT_MANIFEST)) + writeFileSync(join(rightDir, 'events.jsonl'), MINIMAL_EVENT) + + proc = spawn( + PATCHBAY_BIN, + ['serve', workDir, '--bind', `127.0.0.1:${PORT}`], + { cwd: REPO_ROOT, stdio: 'pipe' }, + ) + await waitForHttp(UI_URL, 15_000) + + await page.goto(UI_URL) + await expect(page.getByRole('heading', { name: 'Runs' })).toBeVisible({ timeout: 10_000 }) + + // Both runs should appear + const checkboxes = page.locator('.run-entry input[type="checkbox"]') + await expect(checkboxes).toHaveCount(2, { timeout: 10_000 }) + + // Compare button should NOT be visible with 0 selected + await expect(page.locator('.compare-selected-btn')).not.toBeVisible() + + // Select first checkbox + await checkboxes.first().check() + // Compare button still not visible with only 1 selected + await expect(page.locator('.compare-selected-btn')).not.toBeVisible() + + // Select second checkbox + await checkboxes.nth(1).check() + // Now the compare button should appear + const compareBtn = page.locator('.compare-selected-btn') + await expect(compareBtn).toBeVisible() + await expect(compareBtn).toHaveText('Compare Selected (2)') + + // Click compare and verify navigation to compare view + await compareBtn.click() + await expect(page).toHaveURL(/\/compare\//) + await expect(page.getByText('main@aaa111').first()).toBeVisible({ timeout: 10_000 }) + await expect(page.getByText('feature@bbb222').first()).toBeVisible() + } finally { + if (proc && !proc.killed) proc.kill('SIGTERM') + rmSync(workDir, { recursive: true, force: true }) + } +}) + +test('compare view renders summary and regression', async ({ page }) => { + test.setTimeout(60_000) + const workDir = mkdtempSync(join(tmpdir(), 'patchbay-compare-e2e-')) + let proc: ChildProcess | null = null + + try { + // Write mock data: two separate run directories, each with run.json + const leftDir = join(workDir, 'run-left') + const rightDir = join(workDir, 'run-right') + mkdirSync(leftDir, { recursive: true }) + mkdirSync(rightDir, { recursive: true }) + + writeFileSync(join(leftDir, 'run.json'), JSON.stringify(MOCK_LEFT_MANIFEST)) + writeFileSync(join(leftDir, 'events.jsonl'), MINIMAL_EVENT) + + writeFileSync(join(rightDir, 'run.json'), JSON.stringify(MOCK_RIGHT_MANIFEST)) + writeFileSync(join(rightDir, 'events.jsonl'), MINIMAL_EVENT) + + // Start server + proc = spawn( + PATCHBAY_BIN, + ['serve', workDir, '--bind', `127.0.0.1:${PORT}`], + { cwd: REPO_ROOT, stdio: 'pipe' }, + ) + await waitForHttp(UI_URL, 15_000) + + // Navigate directly to the compare view with two run names + await page.goto(`${UI_URL}/compare/run-left/run-right`) + + // Verify header shows ref labels and pass/fail summary + await expect(page.getByText('main@aaa111').first()).toBeVisible({ timeout: 10_000 }) + await expect(page.getByText('feature@bbb222').first()).toBeVisible() + // Concise header: "2/2 → 1/2 (1 regression)" + await expect(page.getByText('2/2').first()).toBeVisible() + await expect(page.getByText('1/2').first()).toBeVisible() + await expect(page.getByText('regression').first()).toBeVisible() + + // Negative: no fixes in this scenario + await expect(page.getByText('fix').first()).not.toBeVisible() + + // Score: 0 fixes, 1 regression => score = -5 + await expect(page.getByText('-5').first()).toBeVisible() + + // Per-test table: verify column content, not just presence + const tableRows = page.locator('table tbody tr') + await expect(tableRows).toHaveCount(2) // two tests total + + // udp_counter: pass on both sides, no delta + const counterRow = tableRows.filter({ hasText: 'udp_counter' }) + await expect(counterRow.locator('td').nth(1)).toHaveText('PASS') // left status + await expect(counterRow.locator('td').nth(2)).toHaveText('PASS') // right status + await expect(counterRow.locator('td').nth(3)).toHaveText('') // no delta + + // udp_threshold: pass -> fail = REGRESS + const thresholdRow = tableRows.filter({ hasText: 'udp_threshold' }) + await expect(thresholdRow.locator('td').nth(1)).toHaveText('PASS') + await expect(thresholdRow.locator('td').nth(2)).toHaveText('FAIL') + await expect(thresholdRow.locator('td').nth(3)).toHaveText('REGRESS') + } finally { + if (proc && !proc.killed) proc.kill('SIGTERM') + rmSync(workDir, { recursive: true, force: true }) + } +}) + +test('compare view shows fix when right side improves', async ({ page }) => { + test.setTimeout(60_000) + const workDir = mkdtempSync(join(tmpdir(), 'patchbay-compare-fix-')) + let proc: ChildProcess | null = null + + try { + // Reverse direction: left has a failure, right fixes it + const leftDir = join(workDir, 'run-broken') + const rightDir = join(workDir, 'run-fixed') + mkdirSync(leftDir, { recursive: true }) + mkdirSync(rightDir, { recursive: true }) + + writeFileSync(join(leftDir, 'run.json'), JSON.stringify(MOCK_RIGHT_MANIFEST)) // fail side + writeFileSync(join(leftDir, 'events.jsonl'), MINIMAL_EVENT) + writeFileSync(join(rightDir, 'run.json'), JSON.stringify(MOCK_LEFT_MANIFEST)) // pass side + writeFileSync(join(rightDir, 'events.jsonl'), MINIMAL_EVENT) + + proc = spawn( + PATCHBAY_BIN, + ['serve', workDir, '--bind', `127.0.0.1:${PORT}`], + { cwd: REPO_ROOT, stdio: 'pipe' }, + ) + await waitForHttp(UI_URL, 15_000) + + await page.goto(`${UI_URL}/compare/run-broken/run-fixed`) + + // Header should show fix info + await expect(page.getByText('fix').first()).toBeVisible({ timeout: 10_000 }) + // Negative: no regressions + await expect(page.getByText('regression')).not.toBeVisible() + + // Score: 1 fix * 3 = +3 + await expect(page.getByText('+3').first()).toBeVisible() + + // Delta column should show "fixed" not "REGRESS" + const thresholdRow = page.locator('table tbody tr').filter({ hasText: 'udp_threshold' }) + await expect(thresholdRow.locator('td').nth(3)).toHaveText('fixed') + } finally { + if (proc && !proc.killed) proc.kill('SIGTERM') + rmSync(workDir, { recursive: true, force: true }) + } +}) diff --git a/ui/e2e/devtools.spec.ts b/ui/e2e/devtools.spec.ts index 6398fab..f6667a8 100644 --- a/ui/e2e/devtools.spec.ts +++ b/ui/e2e/devtools.spec.ts @@ -36,16 +36,16 @@ test('devtools ui shows all views', async ({ page }) => { // Step 3: Open the UI. await page.goto(DEVTOOLS_URL) - // Verify the topbar shows "patchbay" heading. - await expect(page.getByRole('heading', { name: 'patchbay' })).toBeVisible() + // Runs index should show the run. + await expect(page.getByRole('heading', { name: 'Runs' })).toBeVisible({ timeout: 10_000 }) - // The run selector should have an entry containing "e2e-test". - const selector = page.locator('select') - await expect(selector).toBeVisible() - await expect(selector.locator('option', { hasText: 'e2e-test' })).toBeAttached() + // Click through to the run detail. + const runLink = page.locator('a[href*="/run/"]').first() + await expect(runLink).toBeVisible({ timeout: 10_000 }) + await runLink.click() - // Verify the run status shows "stopped" (not stuck on "running"). - await expect(page.getByText('stopped')).toBeVisible({ timeout: 5_000 }) + // Verify the topbar shows "patchbay" heading. + await expect(page.getByRole('heading', { name: 'patchbay' })).toBeVisible({ timeout: 10_000 }) // Step 4: Verify topology tab shows router and device nodes (default tab). await expect(page.getByText('dc')).toBeVisible({ timeout: 10_000 }) diff --git a/ui/e2e/global-setup.ts b/ui/e2e/global-setup.ts index c7d4622..44d1810 100644 --- a/ui/e2e/global-setup.ts +++ b/ui/e2e/global-setup.ts @@ -7,24 +7,11 @@ const UI_DIR = path.resolve(THIS_DIR, '..') const REPO_ROOT = path.resolve(UI_DIR, '..') export default function globalSetup() { - console.log('[setup] building UI...') - execFileSync('npm', ['run', 'build'], { - cwd: UI_DIR, - stdio: 'inherit', - timeout: 60_000, - }) - - console.log('[setup] building cargo workspace...') - execFileSync('cargo', ['build', '-p', 'patchbay-runner', '-p', 'patchbay-server'], { + // cargo build triggers npm build via patchbay-server's build.rs + console.log('[setup] building cargo workspace (includes UI build)...') + execFileSync('cargo', ['build', '-p', 'patchbay-cli', '-p', 'patchbay-server'], { cwd: REPO_ROOT, stdio: 'inherit', timeout: 5 * 60_000, }) - - console.log('[setup] building patchbay-serve binary...') - execFileSync('cargo', ['build', '--bin', 'patchbay-serve'], { - cwd: REPO_ROOT, - stdio: 'inherit', - timeout: 3 * 60_000, - }) } diff --git a/ui/e2e/push.spec.ts b/ui/e2e/push.spec.ts index a6a02b4..5257cbe 100644 --- a/ui/e2e/push.spec.ts +++ b/ui/e2e/push.spec.ts @@ -74,34 +74,29 @@ test('push run results and view via deep link', async ({ page }) => { body: tarGz, }) expect(pushRes.status).toBe(200) - const pushBody = await pushRes.json() as { ok: boolean; invocation: string; project: string } + const pushBody = await pushRes.json() as { ok: boolean; group: string; project: string } expect(pushBody.ok).toBe(true) expect(pushBody.project).toBe('test-project') - expect(pushBody.invocation).toBeTruthy() + expect(pushBody.group).toBeTruthy() - // Step 4: Verify the run appears in the API. + // Step 4: Verify the run appears in the API (allow time for discovery). + await new Promise(r => setTimeout(r, 3000)) const runsRes = await fetch(`${SERVE_URL}/api/runs`) - const runs = await runsRes.json() as Array<{ name: string; invocation: string | null }> + const runs = await runsRes.json() as Array<{ name: string; group: string | null }> expect(runs.length).toBeGreaterThan(0) - // All runs should share the same invocation (the push dir name). - const inv = runs[0].invocation - expect(inv).toBe(pushBody.invocation) + // The pushed run should belong to a group matching the push dir. + const run = runs.find(r => r.group === pushBody.group) + expect(run).toBeTruthy() - // Step 5: Open the deep link and verify the UI shows the run. - await page.goto(`${SERVE_URL}/#/inv/${pushBody.invocation}`) + // Step 5: Open the runs index and verify pushed run appears with manifest data. + await page.goto(SERVE_URL) + await expect(page.getByRole('heading', { name: 'Runs' })).toBeVisible({ timeout: 15_000 }) - // The topbar should show "patchbay". - await expect(page.getByRole('heading', { name: 'patchbay' })).toBeVisible() - - // The sims tab should list the sim(s) from this push. - const simEntry = page.locator('.run-entry', { hasText: 'ping-e2e' }).first() - await expect(simEntry).toBeVisible({ timeout: 10_000 }) - - // Click through to an individual sim and verify topology loads. - await simEntry.click() - await expect(page.getByText('dc')).toBeVisible({ timeout: 10_000 }) - await expect(page.getByText('sender')).toBeVisible() - await expect(page.getByText('receiver')).toBeVisible() + // The group header should show manifest data (branch, commit). + const groupHeader = page.locator('.run-group-header').first() + await expect(groupHeader).toBeVisible({ timeout: 10_000 }) + await expect(page.getByText('feat/test').first()).toBeVisible({ timeout: 5_000 }) + await expect(page.getByText('abc1234').first()).toBeVisible() // Step 6: Verify push auth — request without key should fail. const noAuthRes = await fetch(`${SERVE_URL}/api/push/test-project`, { diff --git a/ui/e2e/runner-sim.spec.ts b/ui/e2e/runner-sim.spec.ts index a82904f..84b86a0 100644 --- a/ui/e2e/runner-sim.spec.ts +++ b/ui/e2e/runner-sim.spec.ts @@ -37,13 +37,17 @@ test('runner sim produces viewable UI output', async ({ page }) => { ) await waitForHttp(UI_URL, 15_000) - // Step 3: Verify the UI loads and shows the run. + // Step 3: Verify the runs index shows the run. await page.goto(UI_URL) - await expect(page.getByRole('heading', { name: 'patchbay' })).toBeVisible() + await expect(page.getByRole('heading', { name: 'Runs' })).toBeVisible({ timeout: 15_000 }) - const selector = page.locator('select') - await expect(selector).toBeVisible() - await expect(selector.locator('option', { hasText: 'ping-e2e' })).toBeAttached() + // Expand the group (collapsed by default) then click a child run. + const groupHeader = page.locator('.run-group-header').first() + await expect(groupHeader).toBeVisible({ timeout: 10_000 }) + await groupHeader.click() + const runLink = page.locator('a[href*="/run/"]').first() + await expect(runLink).toBeVisible({ timeout: 5_000 }) + await runLink.click() // Topology tab should show the router and devices. await expect(page.getByText('dc')).toBeVisible({ timeout: 10_000 }) @@ -59,10 +63,13 @@ test('runner sim produces viewable UI output', async ({ page }) => { await expect(page.getByText('router_added').first()).toBeVisible() await expect(page.getByText('device_added').first()).toBeVisible() - // Perf tab: should show latency column from ping results. + // Perf tab: should show latency column from ping results with actual numeric data. await page.getByRole('button', { name: 'perf' }).click() await expect(page.getByText('ping-check')).toBeVisible({ timeout: 5_000 }) await expect(page.getByText('Latency (ms)')).toBeVisible() + // Verify that the perf table has at least one data row with a numeric latency value. + const perfDataCell = page.locator('table tbody tr td').first() + await expect(perfDataCell).toBeVisible({ timeout: 5_000 }) } finally { if (serveProc && !serveProc.killed) { serveProc.kill('SIGTERM') @@ -71,12 +78,12 @@ test('runner sim produces viewable UI output', async ({ page }) => { } }) -test('multi-sim invocation shows grouped selector and combined results', async ({ page }) => { +test('multi-sim group shows grouped selector and combined results', async ({ page }) => { test.setTimeout(4 * 60 * 1000) const workDir = mkdtempSync(`${tmpdir()}/patchbay-runner-e2e-multi-`) let serveProc: ChildProcess | null = null try { - // Run both sims in a single invocation. + // Run both sims in a single group. execFileSync( PATCHBAY_BIN, ['run', '--work-dir', workDir, PING_TOML, IPERF_TOML], @@ -97,28 +104,24 @@ test('multi-sim invocation shows grouped selector and combined results', async ( await waitForHttp(UI_URL, 15_000) await page.goto(UI_URL) - await expect(page.getByRole('heading', { name: 'patchbay' })).toBeVisible() - - // The selector should have an optgroup (invocation) with both sims. - const selector = page.locator('select') - await expect(selector).toBeVisible() - await expect(selector.locator('optgroup')).toBeAttached() - await expect(selector.locator('option', { hasText: 'ping-e2e' })).toBeAttached() - await expect(selector.locator('option', { hasText: 'iperf-e2e' })).toBeAttached() - - // Select the "combined" option. - const combinedOption = selector.locator('option', { hasText: 'combined' }) - await expect(combinedOption).toBeAttached() - await selector.selectOption({ label: await combinedOption.innerText() }) - - // Switch to perf tab — invocation view defaults to sims list. - await page.getByRole('button', { name: 'perf' }).click() - // Perf tab should show summary and detail tables with both sims. - await expect(page.getByText('summary')).toBeVisible({ timeout: 5_000 }) - await expect(page.getByText('all steps')).toBeVisible() - // Verify both sims appear in the summary table cells. - await expect(page.getByRole('cell', { name: 'ping-e2e' }).first()).toBeVisible() - await expect(page.getByRole('cell', { name: 'iperf-e2e' }).first()).toBeVisible() + await expect(page.getByRole('heading', { name: 'Runs' })).toBeVisible({ timeout: 15_000 }) + + // Expand the group to see child runs. + const groupHeader = page.locator('.run-group-header').first() + await expect(groupHeader).toBeVisible({ timeout: 10_000 }) + await groupHeader.click() + + // Both sims should appear as run entries. + await expect(page.getByText('ping-e2e').first()).toBeVisible({ timeout: 5_000 }) + await expect(page.getByText('iperf-e2e').first()).toBeVisible() + + // Click through to one of the runs and verify it loads. + const pingLink = page.locator('a[href*="/run/"]', { hasText: 'ping-e2e' }).first() + await expect(pingLink).toBeVisible({ timeout: 5_000 }) + await pingLink.click() + // Topology tab should render topology nodes for this sim. + await expect(page.getByText('sender')).toBeVisible({ timeout: 10_000 }) + await expect(page.getByText('receiver')).toBeVisible() } finally { if (serveProc && !serveProc.killed) { serveProc.kill('SIGTERM') @@ -153,7 +156,14 @@ test('iperf sim shows perf results', async ({ page }) => { await waitForHttp(UI_URL, 15_000) await page.goto(UI_URL) - await expect(page.getByRole('heading', { name: 'patchbay' })).toBeVisible() + await expect(page.getByRole('heading', { name: 'Runs' })).toBeVisible({ timeout: 15_000 }) + // Expand group and click through to the run detail. + const groupHeader = page.locator('.run-group-header').first() + await expect(groupHeader).toBeVisible({ timeout: 10_000 }) + await groupHeader.click() + const runLink = page.locator('a[href*="/run/"]').first() + await expect(runLink).toBeVisible({ timeout: 5_000 }) + await runLink.click() // Navigate to perf tab. await page.getByRole('button', { name: 'perf' }).click() diff --git a/ui/src/App.tsx b/ui/src/App.tsx deleted file mode 100644 index bc69328..0000000 --- a/ui/src/App.tsx +++ /dev/null @@ -1,465 +0,0 @@ -import { useCallback, useEffect, useRef, useState } from 'react' -import { useLocation, useNavigate } from 'react-router-dom' -import type { - Firewall, - LabEvent, - LabState, - LinkCondition, - Nat, - NatV6Mode, - RouterState, - DeviceState, - IfaceState, -} from './devtools-types' -import type { CombinedResults, SimResults } from './types' -import { - fetchRuns, - fetchState, - fetchEvents, - subscribeEvents, - fetchLogs, - fetchResults, - fetchCombinedResults, - runFilesBase, -} from './api' -import type { RunInfo, LogEntry } from './api' -import LogsTab from './components/LogsTab' -import PerfTab from './components/PerfTab' -import TimelineTab from './components/TimelineTab' -import TopologyGraph from './components/TopologyGraph' -import NodeDetail from './components/NodeDetail' - -type Tab = 'topology' | 'logs' | 'timeline' | 'perf' | 'sims' - -// ── Selection model ──────────────────────────────────────────────── - -type Selection = - | { kind: 'run'; name: string } - | { kind: 'invocation'; name: string } - -function selectionKey(s: Selection | null): string { - if (!s) return '' - return s.kind === 'invocation' ? `inv:${s.name}` : s.name -} - -function selectionPath(s: Selection | null): string { - if (!s) return '/' - return s.kind === 'invocation' ? `/inv/${s.name}` : `/run/${s.name}` -} - -// ── Invocation grouping ──────────────────────────────────────────── - -interface InvocationGroup { - invocation: string - runs: RunInfo[] -} - -function groupByInvocation(runs: RunInfo[]): { groups: InvocationGroup[]; ungrouped: RunInfo[] } { - const grouped = new Map() - const ungrouped: RunInfo[] = [] - for (const r of runs) { - if (r.invocation) { - let list = grouped.get(r.invocation) - if (!list) { - list = [] - grouped.set(r.invocation, list) - } - list.push(r) - } else { - ungrouped.push(r) - } - } - const groups: InvocationGroup[] = [] - for (const [invocation, groupRuns] of grouped) { - groups.push({ invocation, runs: groupRuns }) - } - return { groups, ungrouped } -} - -/** Short display label for a run within an invocation group. */ -function simLabel(run: RunInfo): string { - if (run.invocation && run.name.startsWith(run.invocation + '/')) { - return run.label ?? run.name.slice(run.invocation.length + 1) - } - return run.label ?? run.name -} - -// ── State reducer (from DevtoolsApp) ────────────────────────────── - -function applyEvent(state: LabState, event: LabEvent): LabState { - const next = { ...state, opid: event.opid } - const kind = event.kind - - if (kind === 'router_added') { - const name = event.name as string - const routerState: RouterState = { - ns: event.ns as string, - region: (event.region as string | null) ?? null, - nat: event.nat as Nat, - nat_v6: event.nat_v6 as NatV6Mode, - firewall: event.firewall as Firewall, - ip_support: event.ip_support as RouterState['ip_support'], - mtu: (event.mtu as number | null) ?? null, - upstream: (event.upstream as string | null) ?? null, - uplink_ip: (event.uplink_ip as string | null) ?? null, - uplink_ip_v6: (event.uplink_ip_v6 as string | null) ?? null, - downstream_cidr: (event.downstream_cidr as string | null) ?? null, - downstream_gw: (event.downstream_gw as string | null) ?? null, - downstream_cidr_v6: (event.downstream_cidr_v6 as string | null) ?? null, - downstream_gw_v6: (event.downstream_gw_v6 as string | null) ?? null, - downstream_bridge: event.downstream_bridge as string, - downlink_condition: (event.downlink_condition as LinkCondition | null) ?? null, - devices: (event.devices as string[]) ?? [], - counters: (event.counters as Record) ?? {}, - } - next.routers = { ...next.routers, [name]: routerState } - } else if (kind === 'router_removed') { - const { [event.name as string]: _, ...rest } = next.routers - next.routers = rest - } else if (kind === 'device_added') { - const name = event.name as string - const deviceState: DeviceState = { - ns: event.ns as string, - default_via: event.default_via as string, - mtu: (event.mtu as number | null) ?? null, - interfaces: (event.interfaces as IfaceState[]) ?? [], - counters: (event.counters as Record) ?? {}, - } - for (const iface of deviceState.interfaces) { - const router = next.routers[iface.router] - if (router && !router.devices.includes(name)) { - next.routers = { - ...next.routers, - [iface.router]: { ...router, devices: [...router.devices, name] }, - } - } - } - next.devices = { ...next.devices, [name]: deviceState } - } else if (kind === 'device_removed') { - const name = event.name as string - const dev = next.devices[name] - if (dev) { - for (const iface of dev.interfaces) { - const router = next.routers[iface.router] - if (router) { - next.routers = { - ...next.routers, - [iface.router]: { ...router, devices: router.devices.filter((d) => d !== name) }, - } - } - } - } - const { [name]: _, ...rest } = next.devices - next.devices = rest - } else if (kind === 'nat_changed') { - const router = next.routers[event.router as string] - if (router) { - next.routers = { ...next.routers, [event.router as string]: { ...router, nat: event.nat as Nat } } - } - } else if (kind === 'firewall_changed') { - const router = next.routers[event.router as string] - if (router) { - next.routers = { ...next.routers, [event.router as string]: { ...router, firewall: event.firewall as Firewall } } - } - } - - return next -} - -// ── Unified App ──────────────────────────────────────────────────── - -export default function App({ mode }: { mode: 'run' | 'inv' }) { - const location = useLocation() - const navigate = useNavigate() - - // Derive selection from the URL path. - // Route is /run/* or /inv/* so everything after the prefix is the name. - const nameFromUrl = location.pathname.slice(mode === 'run' ? 5 : 5) // "/run/" or "/inv/" = 5 chars - const selection: Selection | null = nameFromUrl - ? { kind: mode === 'inv' ? 'invocation' : 'run', name: nameFromUrl } - : null - - const selectedRun = selection?.kind === 'run' ? selection.name : null - const selectedInvocation = selection?.kind === 'invocation' ? selection.name : null - - const [tab, setTab] = useState(mode === 'inv' ? 'sims' : 'topology') - - // Run list (for the dropdown) - const [runs, setRuns] = useState([]) - - // Lab state and events - const [labState, setLabState] = useState(null) - const [labEvents, setLabEvents] = useState([]) - const esRef = useRef(null) - const lastOpidRef = useRef(0) - - // Log files - const [logList, setLogList] = useState([]) - - // Perf results - const [simResults, setSimResults] = useState(null) - const [combinedResults, setCombinedResults] = useState(null) - - // Topology selection - const [selectedNode, setSelectedNode] = useState(null) - const [selectedKind, setSelectedKind] = useState<'router' | 'device' | 'ix'>('router') - - // Cross-tab log jump - const [logJump, setLogJump] = useState<{ node: string; path: string; timeLabel: string; nonce: number } | null>(null) - - // ── Poll runs list ── - - const refreshRuns = useCallback(async () => { - const r = await fetchRuns() - setRuns(r) - }, []) - - useEffect(() => { - refreshRuns() - const id = setInterval(refreshRuns, 5_000) - return () => clearInterval(id) - }, [refreshRuns]) - - // ── Load run data when an individual sim is selected ── - - useEffect(() => { - if (!selectedRun) { - setLabState(null) - setLabEvents([]) - setLogList([]) - setSimResults(null) - return - } - - let dead = false - Promise.all([ - fetchState(selectedRun), - fetchEvents(selectedRun), - fetchLogs(selectedRun), - fetchResults(selectedRun), - ]).then(([state, events, logs, results]) => { - if (dead) return - if (state) setLabState(state) - setLabEvents(events) - lastOpidRef.current = events.length ? Math.max(...events.map((e) => e.opid ?? 0)) : 0 - setLogList(logs) - setSimResults(results) - }) - - return () => { dead = true } - }, [selectedRun]) - - // ── Load combined results when an invocation is selected ── - - useEffect(() => { - if (!selectedInvocation) { - setCombinedResults(null) - return - } - - let dead = false - fetchCombinedResults(selectedInvocation).then((results) => { - if (dead) return - setCombinedResults(results) - }) - - return () => { dead = true } - }, [selectedInvocation]) - - // ── SSE for live updates (only when run is "running") ── - - useEffect(() => { - if (!selectedRun) return - const runInfo = runs.find((r) => r.name === selectedRun) - if (runInfo?.status !== 'running') return - - const es = subscribeEvents(selectedRun, lastOpidRef.current, (event) => { - setLabState((prev) => (prev ? applyEvent(prev, event) : prev)) - setLabEvents((prev) => [...prev.slice(-999), event]) - if (event.opid != null) lastOpidRef.current = event.opid - }) - esRef.current = es - return () => { - es.close() - esRef.current = null - } - }, [selectedRun, runs]) - - // Close SSE when tab becomes hidden, reconnect when visible. - useEffect(() => { - const onVisibility = () => { - if (document.hidden) { - esRef.current?.close() - esRef.current = null - } - } - document.addEventListener('visibilitychange', onVisibility) - window.addEventListener('beforeunload', () => esRef.current?.close()) - return () => document.removeEventListener('visibilitychange', onVisibility) - }, []) - - // ── Callbacks ── - - const handleNodeSelect = useCallback((name: string, kind: 'router' | 'device' | 'ix') => { - setSelectedNode(name) - setSelectedKind(kind) - }, []) - - const handleJumpToLog = useCallback((target: { node: string; path: string; timeLabel: string }) => { - setTab('logs') - setLogJump({ ...target, nonce: Date.now() }) - }, []) - - // ── Derived ── - - const base = selectedRun ? runFilesBase(selectedRun) : '' - const isSimView = selection?.kind === 'run' - const isInvocationView = selection?.kind === 'invocation' - - // Runs belonging to the current invocation - const invocationRuns = isInvocationView - ? runs.filter((r) => r.invocation === selectedInvocation) - : [] - - const availableTabs: Tab[] = isSimView - ? ['topology', 'logs', 'timeline', ...(simResults ? (['perf'] as Tab[]) : [])] - : isInvocationView - ? ['sims', ...(combinedResults ? (['perf'] as Tab[]) : [])] - : [] - - // When available tabs change, ensure current tab is still valid. - useEffect(() => { - if (availableTabs.length > 0 && !availableTabs.includes(tab)) { - setTab(availableTabs[0]) - } - }, [availableTabs, tab]) - - // Map LogEntry to SimLogEntry shape for LogsTab/TimelineTab compatibility - const logsForTabs = logList.map((l) => ({ node: l.node, kind: l.kind, path: l.path })) - - // Group runs for the selector - const { groups, ungrouped } = groupByInvocation(runs) - - // ── Render ── - - return ( -
-
-

patchbay

- - {isSimView && runs.find((r) => r.name === selectedRun) && ( - - {runs.find((r) => r.name === selectedRun)?.status ?? ''} - - )} - {labState && ( - - opid: {labState.opid} - - )} -
- -
- {availableTabs.map((t) => ( - - ))} -
- -
- {tab === 'topology' && labState && ( -
-
- -
- {selectedNode && ( -
- -
- )} -
- )} - {tab === 'topology' && !labState && isSimView && ( -
Loading lab state...
- )} - - {tab === 'logs' && selectedRun && ( - - )} - - {tab === 'timeline' && selectedRun && ( - - )} - - {tab === 'sims' && isInvocationView && ( -
-

{selectedInvocation}

- {invocationRuns.length === 0 &&
No sims found.
} - {invocationRuns.map((r) => ( - { e.preventDefault(); navigate(`/run/${r.name}`) }} - > - {simLabel(r)} - {r.status && {r.status}} - - ))} -
- )} - - {tab === 'perf' && isSimView && } - {tab === 'perf' && isInvocationView && navigate(`/run/${sim}`)} />} -
-
- ) -} diff --git a/ui/src/ComparePage.tsx b/ui/src/ComparePage.tsx new file mode 100644 index 0000000..9226c50 --- /dev/null +++ b/ui/src/ComparePage.tsx @@ -0,0 +1,46 @@ +import { useEffect, useState } from 'react' +import { useParams } from 'react-router-dom' +import { fetchRunJson } from './api' +import type { RunManifest } from './api' +import CompareView from './components/CompareView' + +function refLabel(m: RunManifest | null): string | null { + if (!m) return null + if (m.branch && m.commit) return `${m.branch}@${m.commit.slice(0, 7)}` + if (m.commit) return m.commit.slice(0, 7) + return null +} + +export default function ComparePage() { + const { left, right } = useParams<{ left: string; right: string }>() + const [leftManifest, setLeftManifest] = useState(null) + const [rightManifest, setRightManifest] = useState(null) + + useEffect(() => { + if (!left || !right) return + fetchRunJson(left).then(setLeftManifest) + fetchRunJson(right).then(setRightManifest) + }, [left, right]) + + if (!left || !right) { + return
Missing run names in URL. Use /compare/:left/:right
+ } + + const project = leftManifest?.project ?? rightManifest?.project + const leftRef = refLabel(leftManifest) + const rightRef = refLabel(rightManifest) + const subtitle = [ + project, + leftRef && rightRef ? `${leftRef} vs ${rightRef}` : leftRef ?? rightRef, + ].filter(Boolean).join(' · ') + + return ( +
+
+

patchbay

+ {subtitle && {subtitle}} +
+ +
+ ) +} diff --git a/ui/src/GroupPage.tsx b/ui/src/GroupPage.tsx new file mode 100644 index 0000000..b55005f --- /dev/null +++ b/ui/src/GroupPage.tsx @@ -0,0 +1,124 @@ +import { useCallback, useEffect, useMemo, useState } from 'react' +import { useLocation, useNavigate } from 'react-router-dom' +import type { CombinedResults } from './types' +import { fetchRuns, fetchCombinedResults } from './api' +import type { RunInfo } from './api' +import RunSelector, { selectionPath } from './components/RunSelector' +import type { Selection } from './components/RunSelector' +import PerfTab from './components/PerfTab' +import { simLabel } from './utils' + +type GroupTab = 'sims' | 'perf' + +export default function GroupPage() { + const location = useLocation() + const navigate = useNavigate() + + const groupName = location.pathname.slice('/group/'.length) + const [tab, setTab] = useState('sims') + + // Run list (for the dropdown) + const [runs, setRuns] = useState([]) + const [combinedResults, setCombinedResults] = useState(null) + + // ── Poll runs list ── + + const refreshRuns = useCallback(async () => { + const r = await fetchRuns() + setRuns(r) + }, []) + + useEffect(() => { + refreshRuns() + const id = setInterval(refreshRuns, 5_000) + return () => clearInterval(id) + }, [refreshRuns]) + + // ── Load combined results ── + + useEffect(() => { + if (!groupName) { + setCombinedResults(null) + return + } + + let dead = false + fetchCombinedResults(groupName).then((results) => { + if (dead) return + setCombinedResults(results) + }) + + return () => { dead = true } + }, [groupName]) + + // ── Derived ── + + const selection: Selection | null = groupName ? { kind: 'group', name: groupName } : null + const groupRuns = useMemo( + () => runs.filter((r) => r.group === groupName), + [runs, groupName], + ) + + const availableTabs = useMemo( + () => ['sims', ...(combinedResults ? (['perf'] as GroupTab[]) : [])], + [combinedResults], + ) + + // Ensure current tab is still valid when available tabs change. + useEffect(() => { + if (availableTabs.length > 0 && !availableTabs.includes(tab)) { + setTab(availableTabs[0]) + } + }, [availableTabs, tab]) + + const handleSelectionChange = useCallback((sel: Selection | null) => { + navigate(selectionPath(sel)) + }, [navigate]) + + // ── Render ── + + return ( +
+
+

patchbay

+ +
+ +
+ {availableTabs.map((t) => ( + + ))} +
+ +
+ {tab === 'sims' && ( +
+

{groupName}

+ {groupRuns.length === 0 &&
No sims found.
} + {groupRuns.map((r) => ( + { e.preventDefault(); navigate(`/run/${r.name}`) }} + > + {simLabel(r)} + {r.status && {r.status}} + + ))} +
+ )} + + {tab === 'perf' && ( + navigate(`/run/${sim}`)} /> + )} +
+
+ ) +} diff --git a/ui/src/RunPage.tsx b/ui/src/RunPage.tsx new file mode 100644 index 0000000..d4ae7b7 --- /dev/null +++ b/ui/src/RunPage.tsx @@ -0,0 +1,254 @@ +import { useCallback, useEffect, useRef, useState } from 'react' +import { useLocation, useNavigate } from 'react-router-dom' +import type { + Firewall, + LabEvent, + LabState, + LinkCondition, + Nat, + NatV6Mode, + RouterState, + DeviceState, + IfaceState, +} from './devtools-types' +import type { SimResults } from './types' +import { + fetchRuns, + fetchState, + fetchEvents, + fetchLogs, + fetchResults, + subscribeEvents, +} from './api' +import type { RunInfo, LogEntry } from './api' +import RunSelector, { selectionPath } from './components/RunSelector' +import type { Selection } from './components/RunSelector' +import RunView from './components/RunView' +import type { RunTab } from './components/RunView' + +// ── State reducer ────────────────────────────────────────────────── + +function applyEvent(state: LabState, event: LabEvent): LabState { + const next = { ...state, opid: event.opid } + const kind = event.kind + + if (kind === 'router_added') { + const name = event.name as string + const routerState: RouterState = { + ns: event.ns as string, + region: (event.region as string | null) ?? null, + nat: event.nat as Nat, + nat_v6: event.nat_v6 as NatV6Mode, + firewall: event.firewall as Firewall, + ip_support: event.ip_support as RouterState['ip_support'], + mtu: (event.mtu as number | null) ?? null, + upstream: (event.upstream as string | null) ?? null, + uplink_ip: (event.uplink_ip as string | null) ?? null, + uplink_ip_v6: (event.uplink_ip_v6 as string | null) ?? null, + downstream_cidr: (event.downstream_cidr as string | null) ?? null, + downstream_gw: (event.downstream_gw as string | null) ?? null, + downstream_cidr_v6: (event.downstream_cidr_v6 as string | null) ?? null, + downstream_gw_v6: (event.downstream_gw_v6 as string | null) ?? null, + downstream_bridge: event.downstream_bridge as string, + downlink_condition: (event.downlink_condition as LinkCondition | null) ?? null, + devices: (event.devices as string[]) ?? [], + counters: (event.counters as Record) ?? {}, + } + next.routers = { ...next.routers, [name]: routerState } + } else if (kind === 'router_removed') { + const { [event.name as string]: _, ...rest } = next.routers + next.routers = rest + } else if (kind === 'device_added') { + const name = event.name as string + const deviceState: DeviceState = { + ns: event.ns as string, + default_via: event.default_via as string, + mtu: (event.mtu as number | null) ?? null, + interfaces: (event.interfaces as IfaceState[]) ?? [], + counters: (event.counters as Record) ?? {}, + } + for (const iface of deviceState.interfaces) { + const router = next.routers[iface.router] + if (router && !router.devices.includes(name)) { + next.routers = { + ...next.routers, + [iface.router]: { ...router, devices: [...router.devices, name] }, + } + } + } + next.devices = { ...next.devices, [name]: deviceState } + } else if (kind === 'device_removed') { + const name = event.name as string + const dev = next.devices[name] + if (dev) { + for (const iface of dev.interfaces) { + const router = next.routers[iface.router] + if (router) { + next.routers = { + ...next.routers, + [iface.router]: { ...router, devices: router.devices.filter((d) => d !== name) }, + } + } + } + } + const { [name]: _, ...rest } = next.devices + next.devices = rest + } else if (kind === 'nat_changed') { + const router = next.routers[event.router as string] + if (router) { + next.routers = { ...next.routers, [event.router as string]: { ...router, nat: event.nat as Nat } } + } + } else if (kind === 'firewall_changed') { + const router = next.routers[event.router as string] + if (router) { + next.routers = { ...next.routers, [event.router as string]: { ...router, firewall: event.firewall as Firewall } } + } + } + + return next +} + +// ── RunPage ──────────────────────────────────────────────────────── + +export default function RunPage() { + const location = useLocation() + const navigate = useNavigate() + + const runName = location.pathname.slice('/run/'.length) + const [tab, setTab] = useState('topology') + + // Run list (for the dropdown) + const [runs, setRuns] = useState([]) + + // Lab state and events + const [labState, setLabState] = useState(null) + const [labEvents, setLabEvents] = useState([]) + const esRef = useRef(null) + const lastOpidRef = useRef(0) + + // Log files + const [logList, setLogList] = useState([]) + + // Perf results + const [simResults, setSimResults] = useState(null) + + // ── Poll runs list ── + + const refreshRuns = useCallback(async () => { + const r = await fetchRuns() + setRuns(r) + }, []) + + useEffect(() => { + refreshRuns() + const id = setInterval(refreshRuns, 5_000) + return () => clearInterval(id) + }, [refreshRuns]) + + // ── Load run data ── + + useEffect(() => { + if (!runName) { + setLabState(null) + setLabEvents([]) + setLogList([]) + setSimResults(null) + return + } + + let dead = false + Promise.all([ + fetchState(runName), + fetchEvents(runName), + fetchLogs(runName), + fetchResults(runName), + ]).then(([state, events, logs, results]) => { + if (dead) return + if (state) setLabState(state) + setLabEvents(events) + lastOpidRef.current = events.length ? Math.max(...events.map((e) => e.opid ?? 0)) : 0 + setLogList(logs) + setSimResults(results) + }) + + return () => { dead = true } + }, [runName]) + + // ── SSE for live updates (only when run is "running") ── + + useEffect(() => { + if (!runName) return + const runInfo = runs.find((r) => r.name === runName) + if (runInfo?.status !== 'running') return + + const es = subscribeEvents(runName, lastOpidRef.current, (event) => { + setLabState((prev) => (prev ? applyEvent(prev, event) : prev)) + setLabEvents((prev) => [...prev.slice(-999), event]) + if (event.opid != null) lastOpidRef.current = event.opid + }) + esRef.current = es + return () => { + es.close() + esRef.current = null + } + }, [runName, runs]) + + // Close SSE when tab becomes hidden. + useEffect(() => { + const onVisibility = () => { + if (document.hidden) { + esRef.current?.close() + esRef.current = null + } + } + const onUnload = () => esRef.current?.close() + document.addEventListener('visibilitychange', onVisibility) + window.addEventListener('beforeunload', onUnload) + return () => { + document.removeEventListener('visibilitychange', onVisibility) + window.removeEventListener('beforeunload', onUnload) + } + }, []) + + // ── Derived ── + + const selection: Selection | null = runName ? { kind: 'run', name: runName } : null + const selectedRunInfo = runs.find((r) => r.name === runName) ?? null + + const handleSelectionChange = useCallback((sel: Selection | null) => { + navigate(selectionPath(sel)) + }, [navigate]) + + // ── Render ── + + return ( +
+
+

patchbay

+ + {selectedRunInfo && ( + + {selectedRunInfo.status ?? ''} + + )} + {labState && ( + + opid: {labState.opid} + + )} +
+ + {runName && ( + + )} +
+ ) +} diff --git a/ui/src/RunsIndex.tsx b/ui/src/RunsIndex.tsx index 2380507..b339c7b 100644 --- a/ui/src/RunsIndex.tsx +++ b/ui/src/RunsIndex.tsx @@ -1,36 +1,29 @@ -import { useEffect, useState } from 'react' +import { useEffect, useMemo, useState } from 'react' import { Link, useNavigate } from 'react-router-dom' import { fetchRuns } from './api' import type { RunInfo, RunManifest } from './api' +import { groupByGroup as groupByGroupBase } from './utils' -interface InvocationGroup { - invocation: string +// ── Types ── + +interface RunGroupWithManifest { + group: string runs: RunInfo[] manifest: RunManifest | null } -function groupByInvocation(runs: RunInfo[]): { groups: InvocationGroup[]; ungrouped: RunInfo[] } { - const grouped = new Map() - const ungrouped: RunInfo[] = [] - for (const r of runs) { - if (r.invocation) { - let list = grouped.get(r.invocation) - if (!list) { - list = [] - grouped.set(r.invocation, list) - } - list.push(r) - } else { - ungrouped.push(r) - } - } - const groups: InvocationGroup[] = [] - for (const [invocation, groupRuns] of grouped) { - // Use manifest from the first run that has one. - const manifest = groupRuns.find((r) => r.manifest)?.manifest ?? null - groups.push({ invocation, runs: groupRuns, manifest }) +// ── Helpers ── + +/** Extends the shared groupByGroup with manifest extraction for the index page. */ +function groupByGroup(runs: RunInfo[]): { groups: RunGroupWithManifest[]; ungrouped: RunInfo[] } { + const { groups, ungrouped } = groupByGroupBase(runs) + return { + groups: groups.map((g) => ({ + ...g, + manifest: g.runs.find((r) => r.manifest)?.manifest ?? null, + })), + ungrouped, } - return { groups, ungrouped } } function formatDate(raw: string): string { @@ -44,17 +37,64 @@ function formatDate(raw: string): string { return `${y}-${mo}-${d} ${h}:${mi}:${s}` } -/** Extract date portion from invocation name like "project-YYYYMMDD_HHMMSS-uuid". */ +/** Extract date portion from group name like "project-YYYYMMDD_HHMMSS-uuid". */ function extractDate(name: string): string | null { const m = name.match(/(\d{8}_\d{6})/) return m ? m[1] : null } +/** Parse a date string (ISO or YYYYMMDD_HHMMSS) to a Date object for sorting. */ +function parseDate(s: string): Date { + // Try ISO format first + const d = new Date(s) + if (!isNaN(d.getTime())) return d + // Try YYYYMMDD_HHMMSS + const m = s.match(/(\d{4})(\d{2})(\d{2})_(\d{2})(\d{2})(\d{2})/) + if (m) return new Date(+m[1], +m[2] - 1, +m[3], +m[4], +m[5], +m[6]) + return new Date(0) +} + +/** Get sort key for a run/group - prefer manifest.started_at, fall back to dir name date. */ +function sortKey(run: RunInfo): number { + if (run.manifest?.started_at) return parseDate(run.manifest.started_at).getTime() + const dateStr = extractDate(run.group ?? run.name) + if (dateStr) return parseDate(dateStr).getTime() + return 0 +} + +/** Format relative time from a date string. */ +function relativeTime(dateStr: string): string { + const d = parseDate(dateStr) + if (d.getTime() === 0) return '' + const diff = Date.now() - d.getTime() + const mins = Math.floor(diff / 60000) + if (mins < 1) return 'just now' + if (mins < 60) return `${mins}m ago` + const hrs = Math.floor(mins / 60) + if (hrs < 24) return `${hrs}h ago` + const days = Math.floor(hrs / 24) + return `${days}d ago` +} + +const PAGE_SIZE = 100 + +// ── Component ── + export default function RunsIndex() { const [runs, setRuns] = useState([]) const [loaded, setLoaded] = useState(false) const navigate = useNavigate() + // Filters + const [projectFilter, setProjectFilter] = useState('') + const [kindFilter, setKindFilter] = useState('') + const [page, setPage] = useState(0) + + // Checkbox selection for compare + const [selected, setSelected] = useState>(new Set()) + // Collapsed groups (collapsed by default, expanded set tracks which are open) + const [expanded, setExpanded] = useState>(new Set()) + useEffect(() => { const refresh = () => fetchRuns().then((r) => { setRuns(r); setLoaded(true) }) refresh() @@ -62,93 +102,298 @@ export default function RunsIndex() { return () => clearInterval(id) }, []) - const { groups, ungrouped } = groupByInvocation(runs) + // Unique projects and kinds for filter dropdowns + const projects = useMemo(() => { + const s = new Set() + for (const r of runs) { + if (r.manifest?.project) s.add(r.manifest.project) + } + return Array.from(s).sort() + }, [runs]) - // Auto-navigate: if there's only one run, go directly to it. - // If there's only one invocation group, go to it. - useEffect(() => { - if (!loaded || runs.length === 0) return - if (runs.length === 1) { - navigate(`/run/${runs[0].name}`, { replace: true }) - } else if (groups.length === 1 && ungrouped.length === 0) { - navigate(`/inv/${groups[0].invocation}`, { replace: true }) + const kinds = useMemo(() => { + const s = new Set() + for (const r of runs) { + if (r.manifest?.kind) s.add(r.manifest.kind) } - }, [loaded, runs, groups, ungrouped, navigate]) + return Array.from(s).sort() + }, [runs]) + + // Filter and sort runs + const filteredRuns = useMemo(() => { + let result = runs + if (projectFilter) { + result = result.filter((r) => r.manifest?.project === projectFilter) + } + if (kindFilter) { + result = result.filter((r) => r.manifest?.kind === kindFilter) + } + // Sort by date (newest first) + result = [...result].sort((a, b) => sortKey(b) - sortKey(a)) + return result + }, [runs, projectFilter, kindFilter]) + + // Group filtered runs + const { groups, ungrouped } = useMemo(() => groupByGroup(filteredRuns), [filteredRuns]) + + // Flatten for pagination: each group is one "row", each ungrouped run is one "row" + type Row = { kind: 'group'; group: RunGroupWithManifest } | { kind: 'run'; run: RunInfo } + const allRows = useMemo(() => { + const rows: Row[] = [] + // Sort groups by the first run's sortKey + const sortedGroups = [...groups].sort((a, b) => { + const aKey = Math.max(...a.runs.map(sortKey)) + const bKey = Math.max(...b.runs.map(sortKey)) + return bKey - aKey + }) + // Interleave groups and ungrouped by date + let gi = 0 + let ui = 0 + while (gi < sortedGroups.length || ui < ungrouped.length) { + const gKey = gi < sortedGroups.length ? Math.max(...sortedGroups[gi].runs.map(sortKey)) : -1 + const uKey = ui < ungrouped.length ? sortKey(ungrouped[ui]) : -1 + if (gKey >= uKey && gi < sortedGroups.length) { + rows.push({ kind: 'group', group: sortedGroups[gi] }) + gi++ + } else { + rows.push({ kind: 'run', run: ungrouped[ui] }) + ui++ + } + } + return rows + }, [groups, ungrouped]) + + const totalPages = Math.max(1, Math.ceil(allRows.length / PAGE_SIZE)) + const pageRows = allRows.slice(page * PAGE_SIZE, (page + 1) * PAGE_SIZE) + + // Reset page when filters change + useEffect(() => { setPage(0) }, [projectFilter, kindFilter]) + + // Toggle a run in the selection set + const toggleSelected = (name: string) => { + setSelected((prev) => { + const next = new Set(prev) + if (next.has(name)) next.delete(name) + else next.add(name) + return next + }) + } + + const selectedList = Array.from(selected) return (
-

patchbay runs

+
+

Runs

+ + {/* Project filter */} + {projects.length > 0 && ( + + )} + + {/* Kind filter */} + {kinds.length > 0 && ( + + )} + + {/* Pagination */} +
+ + {page + 1} / {totalPages} + +
+
+ + {/* Compare selected button */} + {selectedList.length === 2 && ( + + )} + {runs.length === 0 && loaded &&
No runs found.
} - {groups.map((g) => ( -
- {g.manifest ? ( - - ) : ( -
- {g.invocation} - {g.runs.length > 1 && ( - - combined ({g.runs.length} sims) - - )} + {pageRows.map((row) => { + if (row.kind === 'group') { + const g = row.group + const isExpanded = expanded.has(g.group) + const toggleExpand = () => setExpanded(prev => { + const next = new Set(prev) + if (next.has(g.group)) next.delete(g.group) + else next.add(g.group) + return next + }) + return ( +
+
+ + {isExpanded ? '\u25BC' : '\u25B6'} + + { e.stopPropagation(); toggleSelected(g.group) }} + onClick={(e) => e.stopPropagation()} + style={{ cursor: 'pointer' }} + /> + {g.manifest ? ( + + ) : ( + {g.group} + )} + + {g.runs.length} {g.runs.length === 1 ? 'run' : 'runs'} + +
+ {isExpanded && g.runs.map((r) => ( + + ))}
- )} - {g.runs.map((r) => ( - - ))} -
- ))} - {ungrouped.map((r) => ( - - ))} + ) + } + const r = row.run + return + })}
) } -function ManifestGroupHeader({ group }: { group: InvocationGroup }) { +// ── Subcomponents ── + +/** Inline content for a group header with manifest info (rendered inside the collapsible header). */ +function GroupHeaderContent({ group }: { group: RunGroupWithManifest }) { const m = group.manifest! - const outcome = m.test_outcome - const statusIcon = outcome === 'success' ? '\u2705' : outcome === 'failure' ? '\u274c' : null - const date = extractDate(group.invocation) + const outcome = m.test_outcome ?? m.outcome + const statusIcon = outcome === 'pass' || outcome === 'success' ? '\u2705' : outcome === 'fail' || outcome === 'failure' ? '\u274c' : null + const date = m.started_at ?? extractDate(group.group) return ( - - {m.project || group.invocation} -
- {m.branch && {m.branch}} - {m.commit && {m.commit.slice(0, 7)}} - {m.pr != null && m.pr_url ? ( - e.stopPropagation()} - > - PR #{m.pr} - - ) : m.pr != null ? ( - PR #{m.pr} - ) : null} - {m.title && {m.title}} -
-
- {statusIcon && {statusIcon}} - {date && {formatDate(date)}} - -
- + <> + {m.project || group.group} + {m.branch && {m.branch}} + {m.commit && {m.commit.slice(0, 7)}} + {m.kind && {m.kind}} + {statusIcon && {statusIcon}} + {date && ( + + {typeof date === 'string' && date.includes('T') ? relativeTime(date) : formatDate(date as string)} + + )} + {m.pass != null && m.total != null && ( + {m.pass}/{m.total} pass + )} + ) } -function RunEntry({ run, grouped }: { run: RunInfo; grouped?: boolean }) { - const label = grouped && run.invocation && run.name.startsWith(run.invocation + '/') - ? run.label ?? run.name.slice(run.invocation.length + 1) - : run.label ?? run.name +function RunRow({ run, grouped, selected, onToggle }: { run: RunInfo; grouped?: boolean; selected: boolean; onToggle: (name: string) => void }) { + // For grouped (child) runs: show the test/sim name, not the inherited manifest info + const shortName = grouped && run.group && run.name.startsWith(run.group + '/') + ? run.name.slice(run.group.length + 1).replace(/\//g, ' / ') + : null + const displayLabel = shortName ?? run.label ?? run.name + + // Only show manifest details for ungrouped (top-level) runs + const m = grouped ? null : run.manifest + + const branchCommit = m?.branch && m?.commit + ? `${m.branch}@${m.commit.slice(0, 7)}` + : m?.commit ? m.commit.slice(0, 7) + : null + + const dateStr = m?.started_at ?? extractDate(run.group ?? run.name) + const kindBadge = grouped ? null : m?.kind return ( - - {label} - {run.status && {run.status}} - +
+ { e.stopPropagation(); onToggle(run.name) }} + onClick={(e) => e.stopPropagation()} + style={{ cursor: 'pointer' }} + /> + + + {branchCommit ?? displayLabel} + + {kindBadge && ( + {kindBadge} + )} + {!grouped && dateStr && ( + + {typeof dateStr === 'string' && dateStr.includes('T') ? relativeTime(dateStr) : dateStr} + + )} + {!grouped && m?.pass != null && m?.total != null && ( + + {m.pass}/{m.total} pass + + )} + {run.status && {run.status}} + +
) } + +// ── Styles ── + +const filterStyle: React.CSSProperties = { + padding: '4px 8px', + borderRadius: 4, + border: '1px solid var(--border)', + background: 'var(--surface)', + color: 'inherit', + fontSize: 13, +} + +const navBtnStyle: React.CSSProperties = { + padding: '4px 8px', + borderRadius: 4, + border: '1px solid var(--border)', + background: 'var(--surface)', + color: 'inherit', + fontSize: 12, + cursor: 'pointer', +} + +const compareBtnStyle: React.CSSProperties = { + padding: '8px 16px', + borderRadius: 6, + border: 'none', + background: 'var(--accent, #4a9eff)', + color: '#fff', + fontWeight: 'bold', + cursor: 'pointer', + marginBottom: '1rem', +} + +function kindBadgeStyle(kind: string): React.CSSProperties { + return { + fontSize: 10, + padding: '2px 6px', + borderRadius: 4, + fontWeight: 600, + textTransform: 'uppercase', + background: kind === 'test' ? 'rgba(74, 158, 255, 0.15)' : 'rgba(255, 158, 74, 0.15)', + color: kind === 'test' ? 'var(--accent, #4a9eff)' : '#ff9e4a', + } +} diff --git a/ui/src/api.ts b/ui/src/api.ts index af7794a..6fab1e8 100644 --- a/ui/src/api.ts +++ b/ui/src/api.ts @@ -3,17 +3,35 @@ import type { CombinedResults, SimResults } from './types' const API = '/api' -/** Manifest from run.json, included with pushed CI runs. */ +/** Test result entry within a run manifest. */ +export interface TestResult { + name: string + status: string // "pass" | "fail" | "ignored" + duration?: number | null + /** Relative directory path for this test's output, if it exists on disk. */ + dir?: string | null +} + export interface RunManifest { - project: string + kind?: string | null // "test" | "sim" + project?: string | null branch?: string | null commit?: string | null + dirty?: boolean pr?: number | null pr_url?: string | null created_at?: string | null + started_at?: string | null + ended_at?: string | null + runtime?: number | null title?: string | null /** CI test outcome (e.g. "success", "failure"). Not the lab lifecycle status. */ test_outcome?: string | null + outcome?: string | null + pass?: number | null + fail?: number | null + total?: number | null + tests?: TestResult[] } /** Metadata for a single Lab run directory. */ @@ -21,7 +39,8 @@ export interface RunInfo { name: string label: string | null status: string | null - invocation: string | null + /** Group name (first path component for nested runs). */ + group: string | null manifest?: RunManifest | null } @@ -32,11 +51,27 @@ export interface LogEntry { path: string } -export async function fetchRuns(): Promise { +export async function fetchRuns(params?: { + project?: string + kind?: string + limit?: number + offset?: number +}): Promise { try { - const res = await fetch(`${API}/runs`) + const query = new URLSearchParams() + if (params?.project) query.set('project', params.project) + if (params?.kind) query.set('kind', params.kind) + if (params?.limit != null) query.set('limit', String(params.limit)) + if (params?.offset != null) query.set('offset', String(params.offset)) + const qs = query.toString() + const res = await fetch(`${API}/runs${qs ? '?' + qs : ''}`) if (!res.ok) return [] - return (await res.json()) as RunInfo[] + const raw = (await res.json()) as any[] + // Normalize: accept both "group" and legacy "batch" from server + return raw.map((r) => ({ + ...r, + group: r.group ?? r.batch ?? null, + })) as RunInfo[] } catch { return [] } @@ -107,12 +142,24 @@ export function runFilesBase(run: string): string { return `${API}/runs/${encodeURIComponent(run)}/files/` } +/** Fetch run.json manifest for a given run. */ +export async function fetchRunJson(run: string): Promise { + try { + // Use the API endpoint which enriches the manifest (e.g. resolves test dirs). + const res = await fetch(`${API}/runs/${encodeURIComponent(run)}/manifest`) + if (!res.ok) return null + return (await res.json()) as RunManifest + } catch { + return null + } +} + export async function fetchCombinedResults( - invocation: string, + group: string, ): Promise { try { const res = await fetch( - `${API}/invocations/${encodeURIComponent(invocation)}/combined-results`, + `${API}/groups/${encodeURIComponent(group)}/combined-results`, ) if (!res.ok) return null return (await res.json()) as CombinedResults diff --git a/ui/src/components/CompareView.tsx b/ui/src/components/CompareView.tsx new file mode 100644 index 0000000..9381f44 --- /dev/null +++ b/ui/src/components/CompareView.tsx @@ -0,0 +1,381 @@ +import { useCallback, useEffect, useMemo, useState } from 'react' +import { Link, useNavigate } from 'react-router-dom' +import type { LabEvent, LabState } from '../devtools-types' +import type { SimResults } from '../types' +import { fetchRunJson, fetchState, fetchEvents, fetchLogs, fetchResults } from '../api' +import type { RunManifest, RunInfo, LogEntry } from '../api' +import RunView from './RunView' +import type { RunTab } from './RunView' + +// ── Scoring (same as CLI: fixes +3, regressions -5) ── + +const SCORE_FIX = 3 +const SCORE_REGRESS = -5 + +interface TestDelta { + name: string + left?: string + right?: string + delta: 'fixed' | 'REGRESS' | 'new' | 'removed' | '' + /** Relative directory for this test's output, if it exists on disk. */ + dir?: string +} + +function computeDiff(left: RunManifest, right: RunManifest) { + const leftTests = left.tests ?? [] + const rightTests = right.tests ?? [] + const leftMap = new Map(leftTests.map(t => [t.name, t.status])) + const rightMap = new Map(rightTests.map(t => [t.name, t.status])) + const dirMap = new Map([...leftTests, ...rightTests].filter((t): t is typeof t & { dir: string } => !!t.dir).map(t => [t.name, t.dir])) + + const allNames = new Set([...leftMap.keys(), ...rightMap.keys()]) + const tests: TestDelta[] = [] + let fixes = 0 + let regressions = 0 + + for (const name of Array.from(allNames).sort()) { + const l = leftMap.get(name) + const r = rightMap.get(name) + let delta: TestDelta['delta'] = '' + + if (l === 'fail' && r === 'pass') { delta = 'fixed'; fixes++ } + else if (l === 'pass' && r === 'fail') { delta = 'REGRESS'; regressions++ } + else if (!l && r) { delta = 'new' } + else if (l && !r) { delta = 'removed' } + + tests.push({ name, left: l, right: r, delta, dir: dirMap.get(name) }) + } + + const score = fixes * SCORE_FIX + regressions * SCORE_REGRESS + return { tests, fixes, regressions, score } +} + +function refLabel(m: RunManifest | null, fallback: string): string { + if (!m) return fallback + if (m.branch && m.commit) return `${m.branch}@${m.commit.slice(0, 7)}` + if (m.commit) return m.commit.slice(0, 7) + return fallback +} + +/** Extract the last path segment as a short display name. */ +function shortName(runPath: string): string { + const parts = runPath.split('/') + return parts[parts.length - 1] || runPath +} + +/** Check if this is a group compare (has tests) vs individual run compare. */ +function isGroupCompare(left: RunManifest | null, right: RunManifest | null): boolean { + const leftTests = left?.tests ?? [] + const rightTests = right?.tests ?? [] + return leftTests.length > 0 || rightTests.length > 0 +} + +/** Extract the group (first path segment) from a run path like "run-20260326_123338/project/test". */ +function extractGroup(runPath: string): string { + return runPath.split('/')[0] || runPath +} + +/** Build the parent group compare URL from two individual run paths. */ +function groupCompareUrl(leftRun: string, rightRun: string): string { + const leftGroup = extractGroup(leftRun) + const rightGroup = extractGroup(rightRun) + return `/compare/${encodeURIComponent(leftGroup)}/${encodeURIComponent(rightGroup)}` +} + +// ── Compare View (route: /compare/:left/:right) ── + +export default function CompareView({ leftRun, rightRun }: { leftRun: string; rightRun: string }) { + const navigate = useNavigate() + const [leftManifest, setLeftManifest] = useState(null) + const [rightManifest, setRightManifest] = useState(null) + const [loading, setLoading] = useState(true) + const [sharedTab, setSharedTab] = useState('topology') + + useEffect(() => { + setLoading(true) + Promise.all([fetchRunJson(leftRun), fetchRunJson(rightRun)]).then(([l, r]) => { + setLeftManifest(l) + setRightManifest(r) + setLoading(false) + }) + }, [leftRun, rightRun]) + + if (loading) { + return
Loading compare data...
+ } + + const leftLabel = refLabel(leftManifest, leftRun) + const rightLabel = refLabel(rightManifest, rightRun) + const isGroup = isGroupCompare(leftManifest, rightManifest) + + // Compute diff from tests arrays + const diff = leftManifest && rightManifest + ? computeDiff(leftManifest, rightManifest) + : { tests: [] as TestDelta[], fixes: 0, regressions: 0, score: 0 } + + const leftPass = leftManifest?.pass ?? (leftManifest?.tests ?? []).filter(t => t.status === 'pass').length + const leftTotal = leftManifest?.total ?? (leftManifest?.tests ?? []).length + const rightPass = rightManifest?.pass ?? (rightManifest?.tests ?? []).filter(t => t.status === 'pass').length + const rightTotal = rightManifest?.total ?? (rightManifest?.tests ?? []).length + const leftOutcome = leftManifest?.test_outcome ?? leftManifest?.outcome ?? null + const rightOutcome = rightManifest?.test_outcome ?? rightManifest?.outcome ?? null + + const handleTestClick = (dir: string) => { + const leftPath = `${leftRun}/${dir}` + const rightPath = `${rightRun}/${dir}` + navigate(`/compare/${encodeURIComponent(leftPath)}/${encodeURIComponent(rightPath)}`) + } + + return ( +
+ {/* Header: simple name for individual runs, summary for groups */} + {!isGroup ? ( +
+

+ Compare: {shortName(leftRun)} (left) vs {shortName(rightRun)} (right) +

+ + ↩ Back to group compare + +
+ ) : ( + <> +

+ Compare: {leftLabel} vs {rightLabel} — {leftPass}/{leftTotal} → {rightPass}/{rightTotal} + {diff.regressions > 0 && ({diff.regressions} regression{diff.regressions > 1 ? 's' : ''})} + {diff.fixes > 0 && ({diff.fixes} fix{diff.fixes > 1 ? 'es' : ''})} +

+ + {/* Summary bar */} +
+
+ Score: = 0 ? 'var(--green)' : 'var(--red)', fontWeight: 'bold' }}> + {diff.score >= 0 ? '+' : ''}{diff.score} + +
+
+ + {/* Per-test table */} + {diff.tests.length > 0 && ( +
+ + + + + + + + + + + {diff.tests.map(({ name, left, right, delta, dir }) => { + let color = '' + if (delta === 'fixed') color = 'var(--green)' + else if (delta === 'REGRESS') color = 'var(--red)' + + return ( + + + + + + + ) + })} + +
Test{leftLabel}{rightLabel}Delta
+ {dir ? ( + handleTestClick(dir)} + title={`Compare ${name} side-by-side`} + > + {name} + + ) : ( + + {name} + + )} + {statusBadge(left)}{statusBadge(right)}{delta}
+
+ )} + + )} + + {/* Split-screen co-navigation */} + +
+ ) +} + +// ── Shared controls state ── + +interface SharedControls { + logFilter: string + logLevels: Set + metricsFilter: string +} + +const ALL_LEVELS = ['ERROR', 'WARN', 'INFO', 'DEBUG', 'TRACE'] as const + +function SharedControlsBar({ controls, onChange, activeTab }: { + controls: SharedControls + onChange: (updates: Partial) => void + activeTab: RunTab +}) { + const toggleLevel = useCallback((level: string) => { + const next = new Set(controls.logLevels) + if (next.has(level)) next.delete(level) + else next.add(level) + onChange({ logLevels: next }) + }, [controls.logLevels, onChange]) + + if (activeTab === 'logs') { + return ( +
+ Shared: + {ALL_LEVELS.map((level) => ( + toggleLevel(level)} + style={{ cursor: 'pointer' }} + > + {level} + + ))} + onChange({ logFilter: e.target.value })} + style={{ marginLeft: 'auto', minWidth: 180 }} + /> +
+ ) + } + + if (activeTab === 'metrics') { + return ( +
+ Shared: + onChange({ metricsFilter: e.target.value })} + style={{ minWidth: 180 }} + /> +
+ ) + } + + return null +} + +// ── Split-screen co-navigation ── + +function SplitRunView({ left, right, sharedTab, onTabChange }: { + left: string + right: string + sharedTab: RunTab + onTabChange: (tab: RunTab) => void +}) { + const [sharedControls, setSharedControls] = useState({ + logFilter: '', + logLevels: new Set(ALL_LEVELS), + metricsFilter: '', + }) + + const handleControlsChange = useCallback((updates: Partial) => { + setSharedControls(prev => ({ ...prev, ...updates })) + }, []) + + return ( +
+ +
+
+
+ {left} +
+
+ +
+
+
+
+ {right} +
+
+ +
+
+
+
+ ) +} + +function SplitRunPanel({ runName, activeTab, onTabChange, sharedControls }: { + runName: string + activeTab: RunTab + onTabChange: (tab: RunTab) => void + sharedControls: SharedControls +}) { + const [state, setState] = useState(null) + const [events, setEvents] = useState([]) + const [logs, setLogs] = useState([]) + const [results, setResults] = useState(null) + + useEffect(() => { + let dead = false + Promise.all([ + fetchState(runName), + fetchEvents(runName), + fetchLogs(runName), + fetchResults(runName), + ]).then(([s, e, l, r]) => { + if (dead) return + setState(s) + setEvents(e ?? []) + setLogs(l) + setResults(r) + }) + return () => { dead = true } + }, [runName]) + + const run: RunInfo = { name: runName, label: null, status: null, group: null } + + const externalControls = useMemo(() => ({ + logFilter: sharedControls.logFilter, + logLevels: sharedControls.logLevels, + metricsFilter: sharedControls.metricsFilter, + }), [sharedControls.logFilter, sharedControls.logLevels, sharedControls.metricsFilter]) + + return ( + + ) +} + +// ── Shared helpers ── + +function statusBadge(status?: string) { + if (!status) return + const colors: Record = { + pass: 'var(--green)', + fail: 'var(--red)', + ignored: 'var(--text-muted)', + } + return {status.toUpperCase()} +} + diff --git a/ui/src/components/LogsTab.tsx b/ui/src/components/LogsTab.tsx index af9e065..4085b8b 100644 --- a/ui/src/components/LogsTab.tsx +++ b/ui/src/components/LogsTab.tsx @@ -1,4 +1,4 @@ -import { useEffect, useMemo, useRef, useState } from 'react' +import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import type { SimLogEntry } from '../types' import KvPairs from './KvPairs' import JsonTree from './JsonTree' @@ -37,6 +37,10 @@ interface Props { base: string logs: SimLogEntry[] jumpTarget?: { node: string; path: string; timeLabel: string; nonce: number } | null + /** When provided, use this filter instead of internal search state. */ + sharedFilter?: string + /** When provided, use these levels instead of internal level state. */ + sharedLevels?: Set } function valueString(v: unknown): string { @@ -208,7 +212,7 @@ function formatBytes(bytes: number): string { return `${(bytes / (1024 * 1024 * 1024)).toFixed(2)} GiB` } -export default function LogsTab({ base, logs, jumpTarget }: Props) { +export default function LogsTab({ base, logs, jumpTarget, sharedFilter, sharedLevels }: Props) { const [active, setActive] = useState(null) const [text, setText] = useState('') const [loaded, setLoaded] = useState(false) @@ -220,11 +224,16 @@ export default function LogsTab({ base, logs, jumpTarget }: Props) { const [jumpHandledNonce, setJumpHandledNonce] = useState(null) const jumpingRef = useRef(false) - // Level filter (for tracing logs) - const [enabledLevels, setEnabledLevels] = useState>(new Set(ALL_LEVELS)) + // Level filter (for tracing logs) — use shared if provided + const hasSharedLevels = sharedLevels != null + const [localEnabledLevels, setLocalEnabledLevels] = useState>(new Set(ALL_LEVELS)) + const enabledLevels = hasSharedLevels ? sharedLevels : localEnabledLevels + + // Search — use shared filter if provided + const hasSharedFilter = sharedFilter != null + const [localSearchQuery, setLocalSearchQuery] = useState('') + const searchQuery = hasSharedFilter ? sharedFilter : localSearchQuery - // Search - const [searchQuery, setSearchQuery] = useState('') const [searchMatches, setSearchMatches] = useState([]) const [searchIdx, setSearchIdx] = useState(0) const contentRef = useRef(null) @@ -233,10 +242,16 @@ export default function LogsTab({ base, logs, jumpTarget }: Props) { const [timeMode, setTimeMode] = useState('absolute') const [qlogNameFilter, setQlogNameFilter] = useState('all') + // Sidebar collapse + const [sidebarCollapsed, setSidebarCollapsed] = useState(false) + const isStructured = active != null && STRUCTURED_KINDS.has(active.kind) const isTracingLog = active?.kind === 'tracing_jsonl' const isQlog = active?.kind === 'qlog' + // Hide internal controls when shared controls are provided + const hideInternalControls = hasSharedLevels || hasSharedFilter + // Auto-select first log useEffect(() => { setActive((prev) => { @@ -257,11 +272,11 @@ export default function LogsTab({ base, logs, jumpTarget }: Props) { setJumpLine(null) } jumpingRef.current = false - setSearchQuery('') + if (!hasSharedFilter) setLocalSearchQuery('') setSearchMatches([]) setSearchIdx(0) setQlogNameFilter('all') - }, [active, base]) + }, [active, base, hasSharedFilter]) // Handle jump target from timeline useEffect(() => { @@ -277,7 +292,7 @@ export default function LogsTab({ base, logs, jumpTarget }: Props) { }, [jumpTarget, logs, jumpHandledNonce]) // Load log content - const loadContent = async () => { + const loadContent = useCallback(async () => { if (!active) return const url = `${base}${active.path}` setLoading(true) @@ -298,19 +313,19 @@ export default function LogsTab({ base, logs, jumpTarget }: Props) { } finally { setLoading(false) } - } + }, [active, base]) // Auto-load when jump is pending useEffect(() => { if (!active || !jumpNeedle || loaded || loading) return loadContent() - }, [active, jumpNeedle, loaded, loading]) + }, [active, jumpNeedle, loaded, loading, loadContent]) // Auto-load structured logs immediately useEffect(() => { if (!active || loaded || loading) return if (AUTO_LOAD_KINDS.has(active.kind)) loadContent() - }, [active, loaded, loading]) + }, [active, loaded, loading, loadContent]) const byNode = useMemo(() => { const m = new Map() @@ -345,23 +360,39 @@ export default function LogsTab({ base, logs, jumpTarget }: Props) { }, [parsed]) const filteredLines = useMemo(() => { - if (!isTracingLog) return parsed.map((line, i) => ({ line, origIdx: i })) - return parsed - .map((line, i) => ({ line, origIdx: i })) - .filter(({ line }) => { + let lines = parsed.map((line, i) => ({ line, origIdx: i })) + + // Level filtering for tracing logs + if (isTracingLog) { + lines = lines.filter(({ line }) => { if (line.type === 'tracing') return enabledLevels.has(line.level) return true }) - }, [parsed, enabledLevels, isTracingLog]) + } - // Search matches + // Text search filtering (from shared or local) + if (searchQuery) { + const q = searchQuery.toLowerCase() + lines = lines.filter(({ line }) => { + const lineText = line.type === 'tracing' + ? `${line.ts} ${line.level} ${line.spans} ${line.target} ${line.msg} ${line.fields}` + : line.type === 'event' ? `${line.kind} ${line.raw}` + : line.raw + return lineText.toLowerCase().includes(q) + }) + } + + return lines + }, [parsed, enabledLevels, isTracingLog, searchQuery]) + + // Search matches (for local search navigation only) useEffect(() => { - if (!searchQuery) { + if (!localSearchQuery || hasSharedFilter) { setSearchMatches([]) setSearchIdx(0) return } - const q = searchQuery.toLowerCase() + const q = localSearchQuery.toLowerCase() const matches: number[] = [] filteredLines.forEach(({ line }, i) => { const text = line.type === 'tracing' @@ -372,7 +403,7 @@ export default function LogsTab({ base, logs, jumpTarget }: Props) { }) setSearchMatches(matches) setSearchIdx(0) - }, [searchQuery, filteredLines]) + }, [localSearchQuery, filteredLines, hasSharedFilter]) // Jump needle resolution useEffect(() => { @@ -435,7 +466,7 @@ export default function LogsTab({ base, logs, jumpTarget }: Props) { }, [searchIdx, searchMatches]) const toggleLevel = (level: string) => { - setEnabledLevels((prev) => { + setLocalEnabledLevels((prev) => { const next = new Set(prev) if (next.has(level)) next.delete(level) else next.add(level) @@ -485,24 +516,45 @@ export default function LogsTab({ base, logs, jumpTarget }: Props) { return (
-
- {byNode.map(([node, files]) => ( -
-
{node}
- {files.map((f) => ( -
setActive(f)} - title={f.path} - > - {f.path.split('/').pop()?.replace(/^device\.[^.]+\./, '')} - [{f.kind}] -
- ))} -
- ))} -
+ {/* Sidebar toggle button */} + + + {!sidebarCollapsed && ( +
+ {byNode.map(([node, files]) => ( +
+
{node}
+ {files.map((f) => ( +
setActive(f)} + title={f.path} + > + {f.path.split('/').pop()?.replace(/^device\.[^.]+\./, '')} + [{f.kind}] +
+ ))} +
+ ))} +
+ )}
{error &&
{error}
} @@ -537,8 +589,8 @@ export default function LogsTab({ base, logs, jumpTarget }: Props) { )}
- {/* Tracing log toolbar */} - {isTracingLog && loaded && ( + {/* Tracing log toolbar — hidden when shared controls are active */} + {isTracingLog && loaded && !hideInternalControls && (
)} + {/* Tracing log toolbar (display-only controls) when shared controls are active */} + {isTracingLog && loaded && hideInternalControls && ( +
+ + + +
+ )} + {/* Qlog filter toolbar */} {isQlog && loaded && renderMode === 'rendered' && (
diff --git a/ui/src/components/MetricsTab.tsx b/ui/src/components/MetricsTab.tsx new file mode 100644 index 0000000..dc243c8 --- /dev/null +++ b/ui/src/components/MetricsTab.tsx @@ -0,0 +1,177 @@ +import { Fragment, useEffect, useMemo, useState } from 'react' +import { runFilesBase } from '../api' +import type { LogEntry } from '../api' + +interface MetricPoint { t: number; m: Record } + +interface MetricSeries { + device: string + key: string + values: { t: number; v: number }[] +} + +function Sparkline({ values }: { values: number[] }) { + if (values.length < 2) return null + const w = 80, h = 20 + const min = Math.min(...values) + const max = Math.max(...values) + const range = max - min || 1 + const points = values.map((v, i) => + `${(i / (values.length - 1)) * w},${h - ((v - min) / range) * h}` + ).join(' ') + return ( + + + + ) +} + +interface MetricsTabProps { + run: string + logs: LogEntry[] + /** When provided, use this filter instead of internal state. */ + sharedFilter?: string +} + +export default function MetricsTab({ run, logs, sharedFilter }: MetricsTabProps) { + const [series, setSeries] = useState([]) + const hasSharedFilter = sharedFilter != null + const [localFilter, setLocalFilter] = useState('') + const filterValue = hasSharedFilter ? sharedFilter : localFilter + + useEffect(() => { + const metricsLogs = logs.filter(l => l.kind === 'metrics') + if (metricsLogs.length === 0) return + + let dead = false + Promise.all(metricsLogs.map(async (log) => { + const res = await fetch(`${runFilesBase(run)}${log.path}`) + if (!res.ok) return [] + const text = await res.text() + const device = log.path.split('.')[1] || log.node // device..metrics.jsonl + const points: MetricPoint[] = text.trim().split('\n') + .map(line => { try { return JSON.parse(line) } catch { return null } }) + .filter((p): p is MetricPoint => p != null) + + // Group by key + const byKey = new Map() + for (const p of points) { + for (const [k, v] of Object.entries(p.m)) { + if (typeof v !== 'number') continue + let arr = byKey.get(k) + if (!arr) { arr = []; byKey.set(k, arr) } + arr.push({ t: p.t, v }) + } + } + return Array.from(byKey.entries()).map(([key, values]) => ({ + device, key, values + })) + })).then(results => { + if (!dead) setSeries(results.flat()) + }) + + return () => { dead = true } + }, [run, logs]) + + // Derive unique devices and metric keys, then pivot + const devices = useMemo(() => { + const set = new Set() + for (const s of series) set.add(s.device) + return Array.from(set).sort() + }, [series]) + + const metricKeys = useMemo(() => { + const set = new Set() + for (const s of series) set.add(s.key) + return Array.from(set).sort() + }, [series]) + + // Build lookup: key -> device -> series + const lookup = useMemo(() => { + const map = new Map>() + for (const s of series) { + let byDevice = map.get(s.key) + if (!byDevice) { byDevice = new Map(); map.set(s.key, byDevice) } + byDevice.set(s.device, s) + } + return map + }, [series]) + + // Filter metric keys + const filteredKeys = useMemo(() => { + if (!filterValue) return metricKeys + const q = filterValue.toLowerCase() + return metricKeys.filter(k => k.toLowerCase().includes(q)) + }, [metricKeys, filterValue]) + + if (series.length === 0) { + return
No metrics recorded for this run.
+ } + + return ( +
+ {/* Filter input -- hidden when shared filter is provided */} + {!hasSharedFilter && ( +
+ setLocalFilter(e.target.value)} + style={{ width: '100%', maxWidth: 400 }} + /> +
+ )} + +
+ + + + + {devices.map(d => ( + + ))} + + + + {devices.map(d => ( + + + + + ))} + + + + {filteredKeys.map((key) => { + const byDevice = lookup.get(key) + return ( + + + {devices.map(device => { + const s = byDevice?.get(device) + if (!s) { + return ( + + + + + ) + } + const lastVal = s.values[s.values.length - 1]?.v + return ( + + + + + ) + })} + + ) + })} + +
Metric{d}
valuetrend
{key}{lastVal != null ? lastVal.toFixed(2) : '\u2014'} v.v)} />
+
+
+ ) +} diff --git a/ui/src/components/PerfTab.tsx b/ui/src/components/PerfTab.tsx index d348a8a..bfbef99 100644 --- a/ui/src/components/PerfTab.tsx +++ b/ui/src/components/PerfTab.tsx @@ -156,11 +156,11 @@ interface PerfTabProps { } export default function PerfTab({ results, combined, onSimSelect }: PerfTabProps) { - // Combined / invocation view + // Combined / group view if (combined) { const { runs } = combined if (runs.length === 0) { - return
no combined results for this invocation
+ return
no combined results for this group
} const detailRows = runs.flatMap((run) => diff --git a/ui/src/components/RunSelector.tsx b/ui/src/components/RunSelector.tsx new file mode 100644 index 0000000..57fa7c9 --- /dev/null +++ b/ui/src/components/RunSelector.tsx @@ -0,0 +1,65 @@ +import { useMemo } from 'react' +import type { RunInfo } from '../api' +import { groupByGroup, simLabel } from '../utils' + +// ── Selection model ──────────────────────────────────────────────── + +export type Selection = + | { kind: 'run'; name: string } + | { kind: 'group'; name: string } + +export function selectionKey(s: Selection | null): string { + if (!s) return '' + return s.kind === 'group' ? `group:${s.name}` : s.name +} + +export function selectionFromValue(val: string): Selection | null { + if (!val) return null + if (val.startsWith('group:')) return { kind: 'group', name: val.slice(6) } + return { kind: 'run', name: val } +} + +export function selectionPath(s: Selection | null): string { + if (!s) return '/' + return s.kind === 'group' ? `/group/${s.name}` : `/run/${s.name}` +} + +// ── Component ────────────────────────────────────────────────────── + +interface RunSelectorProps { + runs: RunInfo[] + value: Selection | null + onChange: (selection: Selection | null) => void +} + +export default function RunSelector({ runs, value, onChange }: RunSelectorProps) { + const { groups, ungrouped } = useMemo(() => groupByGroup(runs), [runs]) + + return ( + + ) +} diff --git a/ui/src/components/RunView.tsx b/ui/src/components/RunView.tsx new file mode 100644 index 0000000..2559498 --- /dev/null +++ b/ui/src/components/RunView.tsx @@ -0,0 +1,130 @@ +import { useState, useCallback, useMemo } from 'react' +import type { LabEvent, LabState } from '../devtools-types' +import type { SimResults } from '../types' +import type { RunInfo, LogEntry } from '../api' +import { runFilesBase } from '../api' +import LogsTab from './LogsTab' +import PerfTab from './PerfTab' +import TimelineTab from './TimelineTab' +import TopologyGraph from './TopologyGraph' +import NodeDetail from './NodeDetail' +import MetricsTab from './MetricsTab' + +export type RunTab = 'topology' | 'logs' | 'timeline' | 'perf' | 'metrics' + +/** External controls passed from CompareView for shared filter state. */ +export interface ExternalControls { + logFilter?: string + logLevels?: Set + metricsFilter?: string +} + +interface RunViewProps { + run: RunInfo + state: LabState | null + events: LabEvent[] + logs: LogEntry[] + results: SimResults | null + activeTab: RunTab + onTabChange: (tab: RunTab) => void + externalControls?: ExternalControls +} + +export default function RunView({ run, state, events, logs, results, activeTab, onTabChange, externalControls }: RunViewProps) { + const [selectedNode, setSelectedNode] = useState(null) + const [selectedKind, setSelectedKind] = useState<'router' | 'device' | 'ix'>('router') + const [logJump, setLogJump] = useState<{ node: string; path: string; timeLabel: string; nonce: number } | null>(null) + + const handleNodeSelect = useCallback((name: string, kind: 'router' | 'device' | 'ix') => { + setSelectedNode(name) + setSelectedKind(kind) + }, []) + + const handleJumpToLog = useCallback((target: { node: string; path: string; timeLabel: string }) => { + onTabChange('logs') + setLogJump({ ...target, nonce: Date.now() }) + }, [onTabChange]) + + const base = runFilesBase(run.name) + const logsForTabs = useMemo( + () => logs.map((l) => ({ node: l.node, kind: l.kind, path: l.path })), + [logs], + ) + + const hasMetricsLogs = useMemo(() => logs.some(l => l.kind === 'metrics'), [logs]) + const availableTabs = useMemo(() => [ + 'topology', + 'logs', + 'timeline', + ...(results ? (['perf'] as RunTab[]) : []), + ...(hasMetricsLogs ? (['metrics'] as RunTab[]) : []), + ], [results, hasMetricsLogs]) + + const tab = availableTabs.includes(activeTab) ? activeTab : availableTabs[0] + + return ( + <> +
+ {availableTabs.map((t) => ( + + ))} +
+ +
+ {tab === 'topology' && state && ( +
+
+ +
+ {selectedNode && ( +
+ +
+ )} +
+ )} + {tab === 'topology' && !state && ( +
Loading lab state...
+ )} + + {tab === 'logs' && ( + + )} + + {tab === 'timeline' && ( + + )} + + {tab === 'perf' && } + + {tab === 'metrics' && ( + + )} +
+ + ) +} diff --git a/ui/src/index.css b/ui/src/index.css index 3f00971..a42c62d 100644 --- a/ui/src/index.css +++ b/ui/src/index.css @@ -277,6 +277,7 @@ tbody td { display: flex; flex: 1; overflow: hidden; + position: relative; } .logs-sidebar { width: 220px; @@ -725,7 +726,7 @@ tbody td { padding: 16px; } -/* ── Sims list (invocation view) ── */ +/* ── Sims list (group view) ── */ .sims-list { padding: 1.5rem; max-width: 700px; diff --git a/ui/src/main.tsx b/ui/src/main.tsx index acb322e..76159de 100644 --- a/ui/src/main.tsx +++ b/ui/src/main.tsx @@ -1,19 +1,30 @@ import React from 'react' import ReactDOM from 'react-dom/client' -import { HashRouter, Routes, Route, Navigate } from 'react-router-dom' -import App from './App' +import { BrowserRouter, Routes, Route, Navigate } from 'react-router-dom' +import RunPage from './RunPage' +import GroupPage from './GroupPage' import RunsIndex from './RunsIndex' +import ComparePage from './ComparePage' import './index.css' ReactDOM.createRoot(document.getElementById('root')!).render( - + } /> - } /> - } /> + } /> + } /> + } /> + {/* Legacy redirect: /inv/:name -> /group/:name */} + } /> } /> - + ) + +/** Redirect legacy /inv/* paths to /group/*. */ +function InvRedirect() { + const rest = window.location.pathname.slice('/inv/'.length) + return +} diff --git a/ui/src/utils.ts b/ui/src/utils.ts new file mode 100644 index 0000000..6b9557a --- /dev/null +++ b/ui/src/utils.ts @@ -0,0 +1,38 @@ +import type { RunInfo } from './api' + +// ── Group helpers ─────────────────────────────────────────────────── + +export interface RunGroup { + group: string + runs: RunInfo[] +} + +export function groupByGroup(runs: RunInfo[]): { groups: RunGroup[]; ungrouped: RunInfo[] } { + const grouped = new Map() + const ungrouped: RunInfo[] = [] + for (const r of runs) { + if (r.group) { + let list = grouped.get(r.group) + if (!list) { + list = [] + grouped.set(r.group, list) + } + list.push(r) + } else { + ungrouped.push(r) + } + } + const groups: RunGroup[] = [] + for (const [group, groupRuns] of grouped) { + groups.push({ group, runs: groupRuns }) + } + return { groups, ungrouped } +} + +/** Short display label for a run within a group. */ +export function simLabel(run: RunInfo): string { + if (run.group && run.name.startsWith(run.group + '/')) { + return run.label ?? run.name.slice(run.group.length + 1) + } + return run.label ?? run.name +}