diff --git a/.gitignore b/.gitignore index b4d222c..ffb7a93 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,5 @@ DerivedData/ **/xcuserdata/ *.xcuserstate *.profraw +*.xcodeproj +*.xcworkspace diff --git a/PPG CLI/PPG CLI/WebSocketManager.swift b/PPG CLI/PPG CLI/WebSocketManager.swift new file mode 100644 index 0000000..32975fa --- /dev/null +++ b/PPG CLI/PPG CLI/WebSocketManager.swift @@ -0,0 +1,383 @@ +import Foundation + +// MARK: - Notifications + +extension Notification.Name { + static let webSocketStateDidChange = Notification.Name("PPGWebSocketStateDidChange") + static let webSocketDidReceiveEvent = Notification.Name("PPGWebSocketDidReceiveEvent") +} + +// MARK: - Connection State + +nonisolated enum WebSocketConnectionState: Equatable, Sendable { + case disconnected + case connecting + case connected + case reconnecting(attempt: Int) + + var isConnected: Bool { self == .connected } + + var isReconnecting: Bool { + if case .reconnecting = self { return true } + return false + } +} + +// MARK: - Server Events + +nonisolated enum WebSocketEvent: Sendable { + case manifestUpdated(ManifestModel) + case agentStatusChanged(agentId: String, status: AgentStatus) + case worktreeStatusChanged(worktreeId: String, status: String) + case pong + case unknown(type: String, payload: String) +} + +// MARK: - Client Commands + +nonisolated enum WebSocketCommand: Sendable { + case subscribe(channel: String) + case unsubscribe(channel: String) + case terminalInput(agentId: String, data: String) + + var jsonString: String { + let dict: [String: String] + switch self { + case .subscribe(let channel): + dict = ["type": "subscribe", "channel": channel] + case .unsubscribe(let channel): + dict = ["type": "unsubscribe", "channel": channel] + case .terminalInput(let agentId, let data): + dict = ["type": "terminal_input", "agentId": agentId, "data": data] + } + guard let data = try? JSONSerialization.data(withJSONObject: dict, options: [.sortedKeys]), + let str = String(data: data, encoding: .utf8) else { + return "{}" + } + return str + } +} + +// MARK: - WebSocketManager + +nonisolated class WebSocketManager: NSObject, @unchecked Sendable, URLSessionWebSocketDelegate { + + /// Notification userInfo key for connection state. + static let stateUserInfoKey = "PPGWebSocketState" + /// Notification userInfo key for received event. + static let eventUserInfoKey = "PPGWebSocketEvent" + + // MARK: - Configuration + + private let url: URL + private let maxReconnectDelay: TimeInterval = 30.0 + private let baseReconnectDelay: TimeInterval = 1.0 + private let pingInterval: TimeInterval = 30.0 + + // MARK: - State + + private let queue = DispatchQueue(label: "ppg.websocket-manager", qos: .utility) + + /// Internal state — only read/write on `queue`. + private var _state: WebSocketConnectionState = .disconnected + + /// Thread-safe read of the current connection state. + var state: WebSocketConnectionState { + queue.sync { _state } + } + + private var session: URLSession? + private var task: URLSessionWebSocketTask? + private var pingTimer: DispatchSourceTimer? + private var reconnectWorkItem: DispatchWorkItem? + private var reconnectAttempt = 0 + private var intentionalDisconnect = false + private var isHandlingConnectionLoss = false + + // MARK: - Init + + init(url: URL) { + self.url = url + super.init() + } + + convenience init?(urlString: String) { + guard let url = URL(string: urlString) else { return nil } + self.init(url: url) + } + + deinit { + // Synchronous cleanup — safe because we're the last reference holder. + intentionalDisconnect = true + pingTimer?.cancel() + pingTimer = nil + task?.cancel(with: .goingAway, reason: nil) + task = nil + session?.invalidateAndCancel() + session = nil + } + + // MARK: - Public API + + func connect() { + queue.async { [weak self] in + self?.doConnect() + } + } + + func disconnect() { + queue.async { [weak self] in + self?.doDisconnect() + } + } + + func send(_ command: WebSocketCommand) { + queue.async { [weak self] in + self?.doSend(command.jsonString) + } + } + + // MARK: - Connection Lifecycle + + private func doConnect() { + guard _state == .disconnected || _state.isReconnecting else { return } + + intentionalDisconnect = false + isHandlingConnectionLoss = false + reconnectWorkItem?.cancel() + reconnectWorkItem = nil + + if _state.isReconnecting { + // Already in reconnect flow — keep the attempt counter + } else { + reconnectAttempt = 0 + setState(.connecting) + } + + let config = URLSessionConfiguration.default + config.waitsForConnectivity = true + session = URLSession(configuration: config, delegate: self, delegateQueue: nil) + + let wsTask = session!.webSocketTask(with: url) + task = wsTask + wsTask.resume() + } + + private func doDisconnect() { + intentionalDisconnect = true + isHandlingConnectionLoss = false + reconnectWorkItem?.cancel() + reconnectWorkItem = nil + stopPingTimer() + task?.cancel(with: .goingAway, reason: nil) + task = nil + session?.invalidateAndCancel() + session = nil + reconnectAttempt = 0 + setState(.disconnected) + } + + /// Set state on the queue and post a notification on main. + private func setState(_ newState: WebSocketConnectionState) { + guard _state != newState else { return } + _state = newState + DispatchQueue.main.async { + NotificationCenter.default.post( + name: .webSocketStateDidChange, + object: nil, + userInfo: [WebSocketManager.stateUserInfoKey: newState] + ) + } + } + + // MARK: - Sending + + private func doSend(_ text: String) { + guard _state == .connected, let task = task else { return } + task.send(.string(text)) { error in + if let error = error { + NSLog("[WebSocketManager] send error: \(error.localizedDescription)") + } + } + } + + // MARK: - Receiving + + private func listenForMessages(for expectedTask: URLSessionWebSocketTask) { + expectedTask.receive { [weak self] result in + guard let self = self else { return } + self.queue.async { + guard self.task === expectedTask else { return } + switch result { + case .success(let message): + self.handleMessage(message) + self.listenForMessages(for: expectedTask) + case .failure(let error): + if !self.intentionalDisconnect { + NSLog("[WebSocketManager] receive error: \(error.localizedDescription)") + self.handleConnectionLost() + } + } + } + } + } + + private func handleMessage(_ message: URLSessionWebSocketTask.Message) { + let text: String + switch message { + case .string(let s): + text = s + case .data(let d): + guard let s = String(data: d, encoding: .utf8) else { return } + text = s + @unknown default: + return + } + + guard let event = parseEvent(text) else { return } + + DispatchQueue.main.async { + NotificationCenter.default.post( + name: .webSocketDidReceiveEvent, + object: nil, + userInfo: [WebSocketManager.eventUserInfoKey: event] + ) + } + } + + // MARK: - Event Parsing + + /// Parse a JSON text message into a typed event. Internal for testability. + func parseEvent(_ text: String) -> WebSocketEvent? { + guard let data = text.data(using: .utf8), + let json = try? JSONSerialization.jsonObject(with: data) as? [String: Any], + let type = json["type"] as? String else { + return nil + } + + switch type { + case "manifest_updated": + if let payloadData = json["manifest"], + let payloadJSON = try? JSONSerialization.data(withJSONObject: payloadData), + let manifest = try? JSONDecoder().decode(ManifestModel.self, from: payloadJSON) { + return .manifestUpdated(manifest) + } + return .unknown(type: type, payload: text) + + case "agent_status_changed": + if let agentId = json["agentId"] as? String, + let statusRaw = json["status"] as? String, + let status = AgentStatus(rawValue: statusRaw) { + return .agentStatusChanged(agentId: agentId, status: status) + } + return .unknown(type: type, payload: text) + + case "worktree_status_changed": + if let worktreeId = json["worktreeId"] as? String, + let status = json["status"] as? String { + return .worktreeStatusChanged(worktreeId: worktreeId, status: status) + } + return .unknown(type: type, payload: text) + + case "pong": + return .pong + + default: + return .unknown(type: type, payload: text) + } + } + + // MARK: - Keepalive Ping + + private func startPingTimer() { + stopPingTimer() + let timer = DispatchSource.makeTimerSource(queue: queue) + timer.schedule(deadline: .now() + pingInterval, repeating: pingInterval) + timer.setEventHandler { [weak self] in + self?.sendPing() + } + timer.resume() + pingTimer = timer + } + + private func stopPingTimer() { + pingTimer?.cancel() + pingTimer = nil + } + + private func sendPing() { + task?.sendPing { [weak self] error in + if let error = error { + NSLog("[WebSocketManager] ping error: \(error.localizedDescription)") + self?.queue.async { self?.handleConnectionLost() } + } + } + } + + // MARK: - Reconnect + + private func handleConnectionLost() { + guard !intentionalDisconnect else { return } + guard !isHandlingConnectionLoss else { return } + isHandlingConnectionLoss = true + stopPingTimer() + task?.cancel(with: .abnormalClosure, reason: nil) + task = nil + session?.invalidateAndCancel() + session = nil + scheduleReconnect() + } + + private func scheduleReconnect() { + reconnectAttempt += 1 + setState(.reconnecting(attempt: reconnectAttempt)) + + let delay = min(baseReconnectDelay * pow(2.0, Double(reconnectAttempt - 1)), maxReconnectDelay) + NSLog("[WebSocketManager] reconnecting in %.1fs (attempt %d)", delay, reconnectAttempt) + + let workItem = DispatchWorkItem { [weak self] in + guard let self = self, !self.intentionalDisconnect else { return } + self.reconnectWorkItem = nil + self.doConnect() + } + reconnectWorkItem?.cancel() + reconnectWorkItem = workItem + queue.asyncAfter(deadline: .now() + delay, execute: workItem) + } + + // MARK: - URLSessionWebSocketDelegate + + func urlSession(_ session: URLSession, webSocketTask: URLSessionWebSocketTask, didOpenWithProtocol protocol: String?) { + queue.async { [weak self] in + guard let self = self else { return } + guard self.task === webSocketTask else { return } + self.reconnectAttempt = 0 + self.isHandlingConnectionLoss = false + self.setState(.connected) + self.startPingTimer() + self.listenForMessages(for: webSocketTask) + } + } + + func urlSession(_ session: URLSession, webSocketTask: URLSessionWebSocketTask, didCloseWith closeCode: URLSessionWebSocketTask.CloseCode, reason: Data?) { + queue.async { [weak self] in + guard let self = self else { return } + guard self.task === webSocketTask else { return } + if self.intentionalDisconnect { + self.setState(.disconnected) + } else { + self.handleConnectionLost() + } + } + } + + func urlSession(_ session: URLSession, task: URLSessionTask, didCompleteWithError error: (any Error)?) { + guard error != nil else { return } + queue.async { [weak self] in + guard let self = self, !self.intentionalDisconnect else { return } + guard let webSocketTask = task as? URLSessionWebSocketTask, + self.task === webSocketTask else { return } + self.handleConnectionLost() + } + } +} diff --git a/PPG CLI/PPG CLITests/WebSocketManagerTests.swift b/PPG CLI/PPG CLITests/WebSocketManagerTests.swift new file mode 100644 index 0000000..70dac9d --- /dev/null +++ b/PPG CLI/PPG CLITests/WebSocketManagerTests.swift @@ -0,0 +1,212 @@ +import XCTest +@testable import PPG_CLI + +final class WebSocketManagerTests: XCTestCase { + + // MARK: - WebSocketConnectionState + + func testIsConnectedReturnsTrueOnlyWhenConnected() { + XCTAssertTrue(WebSocketConnectionState.connected.isConnected) + XCTAssertFalse(WebSocketConnectionState.disconnected.isConnected) + XCTAssertFalse(WebSocketConnectionState.connecting.isConnected) + XCTAssertFalse(WebSocketConnectionState.reconnecting(attempt: 1).isConnected) + } + + func testIsReconnectingReturnsTrueOnlyWhenReconnecting() { + XCTAssertTrue(WebSocketConnectionState.reconnecting(attempt: 1).isReconnecting) + XCTAssertTrue(WebSocketConnectionState.reconnecting(attempt: 5).isReconnecting) + XCTAssertFalse(WebSocketConnectionState.connected.isReconnecting) + XCTAssertFalse(WebSocketConnectionState.disconnected.isReconnecting) + XCTAssertFalse(WebSocketConnectionState.connecting.isReconnecting) + } + + func testReconnectingEquality() { + XCTAssertEqual( + WebSocketConnectionState.reconnecting(attempt: 3), + WebSocketConnectionState.reconnecting(attempt: 3) + ) + XCTAssertNotEqual( + WebSocketConnectionState.reconnecting(attempt: 1), + WebSocketConnectionState.reconnecting(attempt: 2) + ) + } + + // MARK: - WebSocketCommand.jsonString + + func testSubscribeCommandProducesValidJSON() { + let cmd = WebSocketCommand.subscribe(channel: "manifest") + let json = parseJSON(cmd.jsonString) + XCTAssertEqual(json?["type"] as? String, "subscribe") + XCTAssertEqual(json?["channel"] as? String, "manifest") + } + + func testUnsubscribeCommandProducesValidJSON() { + let cmd = WebSocketCommand.unsubscribe(channel: "agents") + let json = parseJSON(cmd.jsonString) + XCTAssertEqual(json?["type"] as? String, "unsubscribe") + XCTAssertEqual(json?["channel"] as? String, "agents") + } + + func testTerminalInputCommandProducesValidJSON() { + let cmd = WebSocketCommand.terminalInput(agentId: "ag-12345678", data: "ls -la\n") + let json = parseJSON(cmd.jsonString) + XCTAssertEqual(json?["type"] as? String, "terminal_input") + XCTAssertEqual(json?["agentId"] as? String, "ag-12345678") + XCTAssertEqual(json?["data"] as? String, "ls -la\n") + } + + func testCommandEscapesSpecialCharactersInChannel() { + // A channel name with quotes should not break JSON structure + let cmd = WebSocketCommand.subscribe(channel: #"test"channel"#) + let json = parseJSON(cmd.jsonString) + XCTAssertEqual(json?["channel"] as? String, #"test"channel"#) + } + + func testCommandEscapesSpecialCharactersInAgentId() { + let cmd = WebSocketCommand.terminalInput(agentId: #"id"with"quotes"#, data: "x") + let json = parseJSON(cmd.jsonString) + XCTAssertEqual(json?["agentId"] as? String, #"id"with"quotes"#) + } + + func testTerminalInputPreservesControlCharacters() { + let cmd = WebSocketCommand.terminalInput(agentId: "ag-1", data: "line1\nline2\ttab\r") + let json = parseJSON(cmd.jsonString) + XCTAssertEqual(json?["data"] as? String, "line1\nline2\ttab\r") + } + + // MARK: - parseEvent + + func testParseAgentStatusChangedEvent() { + let manager = WebSocketManager(url: URL(string: "ws://localhost")!) + let json = #"{"type":"agent_status_changed","agentId":"ag-abc","status":"completed"}"# + let event = manager.parseEvent(json) + + if case .agentStatusChanged(let agentId, let status) = event { + XCTAssertEqual(agentId, "ag-abc") + XCTAssertEqual(status, .completed) + } else { + XCTFail("Expected agentStatusChanged, got \(String(describing: event))") + } + } + + func testParseWorktreeStatusChangedEvent() { + let manager = WebSocketManager(url: URL(string: "ws://localhost")!) + let json = #"{"type":"worktree_status_changed","worktreeId":"wt-xyz","status":"active"}"# + let event = manager.parseEvent(json) + + if case .worktreeStatusChanged(let worktreeId, let status) = event { + XCTAssertEqual(worktreeId, "wt-xyz") + XCTAssertEqual(status, "active") + } else { + XCTFail("Expected worktreeStatusChanged, got \(String(describing: event))") + } + } + + func testParsePongEvent() { + let manager = WebSocketManager(url: URL(string: "ws://localhost")!) + let event = manager.parseEvent(#"{"type":"pong"}"#) + + if case .pong = event { + // pass + } else { + XCTFail("Expected pong, got \(String(describing: event))") + } + } + + func testParseUnknownEventType() { + let manager = WebSocketManager(url: URL(string: "ws://localhost")!) + let json = #"{"type":"custom_event","foo":"bar"}"# + let event = manager.parseEvent(json) + + if case .unknown(let type, let payload) = event { + XCTAssertEqual(type, "custom_event") + XCTAssertEqual(payload, json) + } else { + XCTFail("Expected unknown, got \(String(describing: event))") + } + } + + func testParseManifestUpdatedEvent() { + let manager = WebSocketManager(url: URL(string: "ws://localhost")!) + let json = """ + {"type":"manifest_updated","manifest":{"version":1,"projectRoot":"/tmp","sessionName":"s","worktrees":{},"createdAt":"t","updatedAt":"t"}} + """ + let event = manager.parseEvent(json) + + if case .manifestUpdated(let manifest) = event { + XCTAssertEqual(manifest.version, 1) + XCTAssertEqual(manifest.projectRoot, "/tmp") + XCTAssertEqual(manifest.sessionName, "s") + } else { + XCTFail("Expected manifestUpdated, got \(String(describing: event))") + } + } + + func testParseManifestUpdatedWithInvalidManifestFallsBackToUnknown() { + let manager = WebSocketManager(url: URL(string: "ws://localhost")!) + let json = #"{"type":"manifest_updated","manifest":{"bad":"data"}}"# + let event = manager.parseEvent(json) + + if case .unknown(let type, _) = event { + XCTAssertEqual(type, "manifest_updated") + } else { + XCTFail("Expected unknown fallback, got \(String(describing: event))") + } + } + + func testParseReturnsNilForInvalidJSON() { + let manager = WebSocketManager(url: URL(string: "ws://localhost")!) + XCTAssertNil(manager.parseEvent("not json")) + } + + func testParseReturnsNilForMissingType() { + let manager = WebSocketManager(url: URL(string: "ws://localhost")!) + XCTAssertNil(manager.parseEvent(#"{"channel":"test"}"#)) + } + + func testParseAgentStatusWithInvalidStatusFallsBackToUnknown() { + let manager = WebSocketManager(url: URL(string: "ws://localhost")!) + let json = #"{"type":"agent_status_changed","agentId":"ag-1","status":"bogus"}"# + let event = manager.parseEvent(json) + + if case .unknown(let type, _) = event { + XCTAssertEqual(type, "agent_status_changed") + } else { + XCTFail("Expected unknown fallback for invalid status, got \(String(describing: event))") + } + } + + func testParseAgentStatusWithMissingFieldsFallsBackToUnknown() { + let manager = WebSocketManager(url: URL(string: "ws://localhost")!) + let json = #"{"type":"agent_status_changed","agentId":"ag-1"}"# + let event = manager.parseEvent(json) + + if case .unknown(let type, _) = event { + XCTAssertEqual(type, "agent_status_changed") + } else { + XCTFail("Expected unknown fallback for missing status, got \(String(describing: event))") + } + } + + // MARK: - Initial State + + func testInitialStateIsDisconnected() { + let manager = WebSocketManager(url: URL(string: "ws://localhost")!) + XCTAssertEqual(manager.state, .disconnected) + } + + func testConvenienceInitReturnsNilForEmptyString() { + XCTAssertNil(WebSocketManager(urlString: "")) + } + + func testConvenienceInitSucceedsForValidURL() { + XCTAssertNotNil(WebSocketManager(urlString: "ws://localhost:8080")) + } + + // MARK: - Helpers + + private func parseJSON(_ string: String) -> [String: Any]? { + guard let data = string.data(using: .utf8) else { return nil } + return try? JSONSerialization.jsonObject(with: data) as? [String: Any] + } +} diff --git a/ios/PPGMobile/PPGMobile/App/ContentView.swift b/ios/PPGMobile/PPGMobile/App/ContentView.swift new file mode 100644 index 0000000..8dcab23 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/App/ContentView.swift @@ -0,0 +1,17 @@ +import SwiftUI + +struct ContentView: View { + var body: some View { + VStack { + Image(systemName: "terminal") + .imageScale(.large) + .foregroundStyle(.tint) + Text("PPG Mobile") + } + .padding() + } +} + +#Preview { + ContentView() +} diff --git a/ios/PPGMobile/PPGMobile/App/PPGMobileApp.swift b/ios/PPGMobile/PPGMobile/App/PPGMobileApp.swift new file mode 100644 index 0000000..d545c9a --- /dev/null +++ b/ios/PPGMobile/PPGMobile/App/PPGMobileApp.swift @@ -0,0 +1,31 @@ +import SwiftUI + +@main +struct PPGMobileApp: App { + @State private var appState = AppState() + + var body: some Scene { + WindowGroup { + TabView { + DashboardView() + .tabItem { + Label("Dashboard", systemImage: "square.grid.2x2") + } + + SpawnView() + .tabItem { + Label("Spawn", systemImage: "plus.circle") + } + + SettingsView() + .tabItem { + Label("Settings", systemImage: "gear") + } + } + .environment(appState) + .task { + await appState.autoConnect() + } + } + } +} diff --git a/ios/PPGMobile/PPGMobile/Assets.xcassets/AppIcon.appiconset/Contents.json b/ios/PPGMobile/PPGMobile/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000..b121e3b --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,13 @@ +{ + "images": [ + { + "idiom": "universal", + "platform": "ios", + "size": "1024x1024" + } + ], + "info": { + "author": "xcode", + "version": 1 + } +} diff --git a/ios/PPGMobile/PPGMobile/Assets.xcassets/Contents.json b/ios/PPGMobile/PPGMobile/Assets.xcassets/Contents.json new file mode 100644 index 0000000..74d6a72 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info": { + "author": "xcode", + "version": 1 + } +} diff --git a/ios/PPGMobile/PPGMobile/Models/AgentVariant.swift b/ios/PPGMobile/PPGMobile/Models/AgentVariant.swift new file mode 100644 index 0000000..b8c8556 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Models/AgentVariant.swift @@ -0,0 +1,73 @@ +import SwiftUI + +/// Known agent types with their display properties. +/// +/// Maps to the `agentType` field on `AgentEntry`. New variants can be added +/// without schema changes since `agentType` is a free-form string — unknown +/// values return `nil` from `AgentVariant.from(_:)` and fall back to defaults +/// in the `AgentEntry` convenience extensions. +enum AgentVariant: String, CaseIterable, Identifiable { + case claude + case codex + case opencode + + var id: String { rawValue } + + /// Human-readable display name. + var displayName: String { + switch self { + case .claude: "Claude" + case .codex: "Codex" + case .opencode: "OpenCode" + } + } + + /// SF Symbol icon for this agent type. + var icon: String { sfSymbol } + + var sfSymbol: String { + switch self { + case .claude: "brain.head.profile" + case .codex: "terminal" + case .opencode: "chevron.left.forwardslash.chevron.right" + } + } + + /// Brand color for this agent type. + var color: Color { + switch self { + case .claude: .orange + case .codex: .cyan + case .opencode: .purple + } + } + + /// Resolve an `agentType` string to a known variant, or `nil` if unknown. + static func from(_ agentType: String) -> AgentVariant? { + AgentVariant(rawValue: agentType.lowercased()) + } +} + +// MARK: - AgentEntry integration + +extension AgentEntry { + /// The known variant for this agent, or `nil` for custom agent types. + var variant: AgentVariant? { + AgentVariant.from(agentType) + } + + /// Display name — uses the variant's name if known, otherwise the raw `agentType`. + var displayName: String { + variant?.displayName ?? agentType + } + + /// Icon — uses the variant's symbol if known, otherwise a generic terminal icon. + var iconName: String { + variant?.sfSymbol ?? "terminal" + } + + /// Color — uses the variant's color if known, otherwise secondary. + var brandColor: Color { + variant?.color ?? .secondary + } +} diff --git a/ios/PPGMobile/PPGMobile/Models/DashboardModels.swift b/ios/PPGMobile/PPGMobile/Models/DashboardModels.swift new file mode 100644 index 0000000..bd0b030 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Models/DashboardModels.swift @@ -0,0 +1,107 @@ +import SwiftUI + +// MARK: - Connection State (UI-only, distinct from WebSocketConnectionState) + +enum ConnectionState { + case disconnected + case connecting + case connected + case error(String) +} + +// MARK: - Diff Stats + +struct DiffStats { + let filesChanged: Int + let insertions: Int + let deletions: Int +} + +struct DiffResponse: Codable { + let diff: String? + let stats: DiffStatsResponse? +} + +struct DiffStatsResponse: Codable { + let filesChanged: Int + let insertions: Int + let deletions: Int +} + +// MARK: - API Response Types + +struct SpawnResponse: Codable { + let success: Bool + let worktreeId: String +} + +struct LogsResponse: Codable { + let output: String +} + +struct Config: Codable { + let sessionName: String? +} + +struct TemplatesResponse: Codable { + let templates: [String] +} + +struct PromptsResponse: Codable { + let prompts: [String] +} + +struct SwarmsResponse: Codable { + let swarms: [String] +} + +struct ErrorResponse: Codable { + let error: String +} + +// MARK: - AgentStatus UI Extensions + +extension AgentStatus { + var icon: String { sfSymbol } + + var isActive: Bool { + self == .spawning || self == .running + } + + var isTerminal: Bool { + switch self { + case .completed, .failed, .killed, .lost: true + default: false + } + } +} + +// MARK: - WorktreeStatus UI Extensions + +extension WorktreeStatus { + var icon: String { sfSymbol } + + var isTerminal: Bool { + self == .merged || self == .cleaned + } +} + +// MARK: - AgentEntry UI Extensions + +extension AgentEntry { + var startDate: Date? { + ISO8601DateFormatter().date(from: startedAt) + } +} + +// MARK: - WorktreeEntry UI Extensions + +extension WorktreeEntry { + var createdDate: Date? { + ISO8601DateFormatter().date(from: createdAt) + } + + var mergedDate: Date? { + mergedAt.flatMap { ISO8601DateFormatter().date(from: $0) } + } +} diff --git a/ios/PPGMobile/PPGMobile/Models/Manifest.swift b/ios/PPGMobile/PPGMobile/Models/Manifest.swift new file mode 100644 index 0000000..f43f291 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Models/Manifest.swift @@ -0,0 +1,207 @@ +import SwiftUI + +// MARK: - Agent Status + +/// Lifecycle status for an agent process. +/// +/// Matches the ppg agent lifecycle: +/// spawning → running → completed | failed | killed | lost +/// +/// Custom decoding also accepts the current TypeScript status values: +/// `"idle"` → `.running`, `"exited"` → `.completed`, `"gone"` → `.lost` +enum AgentStatus: String, Codable, CaseIterable, Hashable { + case spawning + case running + case waiting + case completed + case failed + case killed + case lost + + /// Maps legacy/TS status strings to lifecycle values. + private static let aliases: [String: AgentStatus] = [ + "idle": .running, + "exited": .completed, + "gone": .lost, + "waiting": .waiting, + ] + + init(from decoder: Decoder) throws { + let raw = try decoder.singleValueContainer().decode(String.self) + if let direct = AgentStatus(rawValue: raw) { + self = direct + } else if let mapped = Self.aliases[raw] { + self = mapped + } else { + throw DecodingError.dataCorrupted( + .init(codingPath: decoder.codingPath, + debugDescription: "Unknown AgentStatus: \(raw)") + ) + } + } + + func encode(to encoder: Encoder) throws { + var container = encoder.singleValueContainer() + try container.encode(rawValue) + } + + var label: String { + rawValue.capitalized + } + + var color: Color { + switch self { + case .spawning: .orange + case .running: .green + case .waiting: .yellow + case .completed: .blue + case .failed: .red + case .killed: .gray + case .lost: .secondary + } + } + + var sfSymbol: String { + switch self { + case .spawning: "arrow.triangle.2.circlepath" + case .running: "play.circle.fill" + case .waiting: "pause.circle" + case .completed: "checkmark.circle.fill" + case .failed: "xmark.circle.fill" + case .killed: "stop.circle.fill" + case .lost: "questionmark.circle" + } + } +} + +// MARK: - Worktree Status + +/// Lifecycle status for a git worktree. +/// +/// Matches the ppg worktree lifecycle: +/// active → merging → merged → cleaned +/// → failed +enum WorktreeStatus: String, Codable, CaseIterable, Hashable { + case active + case spawning + case running + case merging + case merged + case failed + case cleaned + + var label: String { + rawValue.capitalized + } + + var color: Color { + switch self { + case .active: .green + case .spawning: .yellow + case .running: .green + case .merging: .orange + case .merged: .blue + case .failed: .red + case .cleaned: .gray + } + } + + var sfSymbol: String { + switch self { + case .active: "arrow.branch" + case .spawning: "hourglass" + case .running: "play.circle.fill" + case .merging: "arrow.triangle.merge" + case .merged: "checkmark.circle" + case .failed: "xmark.circle" + case .cleaned: "trash.circle" + } + } +} + +// MARK: - Agent Entry + +/// A single agent (CLI process) running in a tmux pane. +/// +/// JSON keys use camelCase matching the server schema (e.g. `agentType`, `startedAt`). +struct AgentEntry: Codable, Identifiable, Hashable { + let id: String + let name: String + let agentType: String + var status: AgentStatus + let tmuxTarget: String + let prompt: String + let startedAt: String + var exitCode: Int? + var sessionId: String? + + // MARK: Hashable (identity-based) + + static func == (lhs: AgentEntry, rhs: AgentEntry) -> Bool { + lhs.id == rhs.id + } + + func hash(into hasher: inout Hasher) { + hasher.combine(id) + } +} + +// MARK: - Worktree Entry + +/// An isolated git checkout on branch `ppg/`. +struct WorktreeEntry: Codable, Identifiable, Hashable { + let id: String + let name: String + let path: String + let branch: String + let baseBranch: String + var status: WorktreeStatus + let tmuxWindow: String + var prUrl: String? + var agents: [String: AgentEntry] + let createdAt: String + var mergedAt: String? + + // MARK: Hashable (identity-based) + + static func == (lhs: WorktreeEntry, rhs: WorktreeEntry) -> Bool { + lhs.id == rhs.id + } + + func hash(into hasher: inout Hasher) { + hasher.combine(id) + } +} + +// MARK: - Manifest + +/// Top-level runtime state persisted in `.ppg/manifest.json`. +struct Manifest: Codable { + let version: Int + let projectRoot: String + let sessionName: String + var worktrees: [String: WorktreeEntry] + let createdAt: String + var updatedAt: String +} + +// MARK: - Convenience + +extension Manifest { + /// All agents across all worktrees, flattened. + var allAgents: [AgentEntry] { + worktrees.values.flatMap { $0.agents.values } + } + + /// Worktrees sorted by creation date (newest first). + var sortedWorktrees: [WorktreeEntry] { + worktrees.values.sorted { $0.createdAt > $1.createdAt } + } +} + +extension WorktreeEntry { + /// Agents sorted by start date (newest first). + var sortedAgents: [AgentEntry] { + agents.values.sorted { $0.startedAt > $1.startedAt } + } +} diff --git a/ios/PPGMobile/PPGMobile/Models/ServerConnection.swift b/ios/PPGMobile/PPGMobile/Models/ServerConnection.swift new file mode 100644 index 0000000..9e40bf6 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Models/ServerConnection.swift @@ -0,0 +1,170 @@ +import Foundation + +/// Connection configuration for a ppg server instance. +/// +/// Stores the host, port, TLS CA certificate, and auth token needed to +/// communicate with a ppg server over REST and WebSocket. +struct ServerConnection: Codable, Identifiable, Hashable { + let id: UUID + var name: String + var host: String + var port: Int + var token: String + var caCertificate: String? + var isDefault: Bool + + init(id: UUID = UUID(), name: String = "My Mac", host: String, port: Int = 7700, token: String, caCertificate: String? = nil, isDefault: Bool = false) { + self.id = id + self.name = name + self.host = host + self.port = port + self.token = token + self.caCertificate = caCertificate + self.isDefault = isDefault + } + + /// Human-readable label (e.g. "192.168.1.5:7700"). + var displayName: String { + "\(host):\(port)" + } + + // MARK: - URL Builders + + private var usesTLS: Bool { + caCertificate != nil + } + + private var scheme: String { + usesTLS ? "https" : "http" + } + + private var wsScheme: String { + usesTLS ? "wss" : "ws" + } + + /// Base URL for REST API requests (e.g. `http://192.168.1.5:7700`). + /// Returns `nil` if the host is malformed. + var baseURL: URL? { + makeURL(scheme: scheme) + } + + /// URL for the API root. + var apiURL: URL? { + baseURL?.appendingPathComponent("api") + } + + /// URL for a specific REST API endpoint. + /// Returns `nil` if the base URL cannot be constructed. + /// + /// connection.restURL(for: "/api/status") + func restURL(for path: String) -> URL? { + guard let base = baseURL else { return nil } + return base.appending(path: path) + } + + /// WebSocket URL with auth token in query string. + /// Returns `nil` if the host is malformed. + /// + /// connection.webSocketURL // ws://192.168.1.5:7700/ws?token=abc123 + var webSocketURL: URL? { + makeURL( + scheme: wsScheme, + path: "/ws", + queryItems: [URLQueryItem(name: "token", value: token)] + ) + } + + // MARK: - QR Code + + /// Generates the QR code string for this connection. + /// + /// ppg://connect?host=192.168.1.5&port=7700&token=abc123 + /// ppg://connect?host=192.168.1.5&port=7700&ca=BASE64...&token=abc123 + var qrCodeString: String { + var components = URLComponents() + components.scheme = "ppg" + components.host = "connect" + var items = [ + URLQueryItem(name: "host", value: host), + URLQueryItem(name: "port", value: String(port)), + ] + if let ca = caCertificate { + items.append(URLQueryItem(name: "ca", value: ca)) + } + items.append(URLQueryItem(name: "token", value: token)) + components.queryItems = items + return components.string ?? "ppg://connect" + } + + /// Parse a ppg serve QR code payload. + /// Format: ppg://connect?host=&port=&token=[&ca=] + static func fromQRCode(_ payload: String) -> ServerConnection? { + guard let components = URLComponents(string: payload), + components.scheme?.lowercased() == "ppg", + components.host?.lowercased() == "connect" + else { + return nil + } + + let params = Dictionary( + (components.queryItems ?? []).compactMap { item in + item.value.map { (item.name, $0) } + }, + uniquingKeysWith: { _, last in last } + ) + + guard let host = params["host"], isValidHost(host), + let token = params["token"], !token.isEmpty + else { + return nil + } + + let port = params["port"].flatMap(Int.init) ?? 7700 + guard (1...65_535).contains(port) else { return nil } + let ca = params["ca"].flatMap { Data(base64Encoded: $0) != nil ? $0 : nil } + + return ServerConnection( + name: host == "0.0.0.0" ? "Local Mac" : host, + host: host, + port: port, + token: token, + caCertificate: ca + ) + } + + // MARK: - Auth Header + + /// Authorization header value for REST requests. + var authorizationHeader: String { + "Bearer \(token)" + } + + // MARK: - Private Helpers + + private func makeURL( + scheme: String, + path: String = "", + queryItems: [URLQueryItem] = [] + ) -> URL? { + var components = URLComponents() + components.scheme = scheme + components.host = host + components.port = port + components.path = path + components.queryItems = queryItems.isEmpty ? nil : queryItems + return components.url + } + + private static func isValidHost(_ host: String) -> Bool { + guard !host.isEmpty, + host.rangeOfCharacter(from: .whitespacesAndNewlines) == nil + else { + return false + } + + var components = URLComponents() + components.scheme = "http" + components.host = host + return components.url != nil + } +} diff --git a/ios/PPGMobile/PPGMobile/Models/ServerConnectionTests.swift b/ios/PPGMobile/PPGMobile/Models/ServerConnectionTests.swift new file mode 100644 index 0000000..a1ff952 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Models/ServerConnectionTests.swift @@ -0,0 +1,151 @@ +import XCTest +@testable import PPGMobile + +final class ServerConnectionTests: XCTestCase { + + // MARK: - fromQRCode + + func testValidQRCodeParsesCorrectly() { + let qr = "ppg://connect?host=192.168.1.10&port=7700&token=abc123" + let conn = ServerConnection.fromQRCode(qr) + + XCTAssertNotNil(conn) + XCTAssertEqual(conn?.host, "192.168.1.10") + XCTAssertEqual(conn?.port, 7700) + XCTAssertEqual(conn?.token, "abc123") + XCTAssertNil(conn?.ca) + } + + func testValidQRCodeWithCAParsesCorrectly() { + // "dGVzdA==" is base64 for "test" + let qr = "ppg://connect?host=myhost&port=8080&token=secret&ca=dGVzdA==" + let conn = ServerConnection.fromQRCode(qr) + + XCTAssertNotNil(conn) + XCTAssertEqual(conn?.host, "myhost") + XCTAssertEqual(conn?.port, 8080) + XCTAssertEqual(conn?.token, "secret") + XCTAssertEqual(conn?.ca, "dGVzdA==") + } + + func testMissingHostReturnsNil() { + let qr = "ppg://connect?port=7700&token=abc123" + XCTAssertNil(ServerConnection.fromQRCode(qr)) + } + + func testEmptyHostReturnsNil() { + let qr = "ppg://connect?host=&port=7700&token=abc123" + XCTAssertNil(ServerConnection.fromQRCode(qr)) + } + + func testMissingTokenReturnsNil() { + let qr = "ppg://connect?host=myhost&port=7700" + XCTAssertNil(ServerConnection.fromQRCode(qr)) + } + + func testEmptyTokenReturnsNil() { + let qr = "ppg://connect?host=myhost&port=7700&token=" + XCTAssertNil(ServerConnection.fromQRCode(qr)) + } + + func testMissingPortDefaultsTo7700() { + let qr = "ppg://connect?host=myhost&token=abc123" + let conn = ServerConnection.fromQRCode(qr) + + XCTAssertNotNil(conn) + XCTAssertEqual(conn?.port, 7700) + } + + func testInvalidPortReturnsNil() { + XCTAssertNil(ServerConnection.fromQRCode("ppg://connect?host=myhost&port=0&token=abc123")) + XCTAssertNil(ServerConnection.fromQRCode("ppg://connect?host=myhost&port=70000&token=abc123")) + } + + func testInvalidHostReturnsNil() { + let qr = "ppg://connect?host=my%20host&port=7700&token=abc123" + XCTAssertNil(ServerConnection.fromQRCode(qr)) + } + + func testWrongSchemeReturnsNil() { + let qr = "http://connect?host=myhost&port=7700&token=abc123" + XCTAssertNil(ServerConnection.fromQRCode(qr)) + } + + func testWrongHostReturnsNil() { + let qr = "ppg://pair?host=myhost&port=7700&token=abc123" + XCTAssertNil(ServerConnection.fromQRCode(qr)) + } + + func testNonPPGStringReturnsNil() { + XCTAssertNil(ServerConnection.fromQRCode("https://example.com")) + XCTAssertNil(ServerConnection.fromQRCode("just some text")) + XCTAssertNil(ServerConnection.fromQRCode("")) + } + + func testDuplicateQueryParamsDoNotCrash() { + let qr = "ppg://connect?host=myhost&token=first&token=second&port=7700" + let conn = ServerConnection.fromQRCode(qr) + + XCTAssertNotNil(conn) + // Last value wins per uniquingKeysWith + XCTAssertEqual(conn?.token, "second") + } + + func testInvalidBase64CAIsDiscarded() { + let qr = "ppg://connect?host=myhost&port=7700&token=abc&ca=not-valid-base64!!!" + let conn = ServerConnection.fromQRCode(qr) + + XCTAssertNotNil(conn) + XCTAssertNil(conn?.ca) + } + + func testLocalhostNameMapping() { + let qr = "ppg://connect?host=0.0.0.0&port=7700&token=abc123" + let conn = ServerConnection.fromQRCode(qr) + + XCTAssertEqual(conn?.name, "Local Mac") + } + + func testNonLocalhostUsesHostAsName() { + let qr = "ppg://connect?host=workstation.local&port=7700&token=abc123" + let conn = ServerConnection.fromQRCode(qr) + + XCTAssertEqual(conn?.name, "workstation.local") + } + + // MARK: - URL construction + + func testBaseURLUsesHTTPWithoutCA() { + let conn = ServerConnection(host: "myhost", port: 7700, token: "abc") + XCTAssertEqual(conn.baseURL?.absoluteString, "http://myhost:7700") + } + + func testBaseURLUsesHTTPSWithCA() { + let conn = ServerConnection(host: "myhost", port: 7700, token: "abc", ca: "dGVzdA==") + XCTAssertEqual(conn.baseURL?.absoluteString, "https://myhost:7700") + } + + func testWsURLUsesWSSWithCA() { + let conn = ServerConnection(host: "myhost", port: 7700, token: "abc", ca: "dGVzdA==") + XCTAssertEqual(conn.wsURL?.scheme, "wss") + } + + func testWsURLPercentEncodesToken() { + let conn = ServerConnection(host: "myhost", port: 7700, token: "abc+def&ghi=jkl") + guard let url = conn.wsURL else { + XCTFail("Expected wsURL to be generated") + return + } + + let components = URLComponents(url: url, resolvingAgainstBaseURL: false) + let tokenValue = components?.queryItems?.first(where: { $0.name == "token" })?.value + XCTAssertEqual(tokenValue, "abc+def&ghi=jkl") + XCTAssertEqual(components?.queryItems?.count, 1) + } + + func testInvalidHostDoesNotCrashURLBuilding() { + let conn = ServerConnection(host: "bad host", port: 7700, token: "abc") + XCTAssertNil(conn.baseURL) + XCTAssertNil(conn.wsURL) + } +} diff --git a/ios/PPGMobile/PPGMobile/Networking/PPGClient.swift b/ios/PPGMobile/PPGMobile/Networking/PPGClient.swift new file mode 100644 index 0000000..612d65e --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Networking/PPGClient.swift @@ -0,0 +1,292 @@ +import Foundation + +// MARK: - Error Types + +enum PPGClientError: LocalizedError { + case notConfigured + case invalidURL(String) + case network(URLError) + case unauthorized + case notFound(String) + case conflict(String) + case serverError(Int, String) + case decodingError(DecodingError) + case invalidResponse + + var errorDescription: String? { + switch self { + case .notConfigured: + return "No server connection configured" + case .invalidURL(let path): + return "Invalid URL: \(path)" + case .network(let error): + return "Network error: \(error.localizedDescription)" + case .unauthorized: + return "Authentication failed — check your token" + case .notFound(let msg): + return "Not found: \(msg)" + case .conflict(let msg): + return "Conflict: \(msg)" + case .serverError(let code, let msg): + return "Server error (\(code)): \(msg)" + case .decodingError(let error): + return "Failed to decode response: \(error.localizedDescription)" + case .invalidResponse: + return "Invalid server response" + } + } +} + +// MARK: - TLS Delegate + +/// Allows connections to servers using a self-signed certificate +/// by trusting a pinned CA certificate bundled with the app. +private final class PinnedCertDelegate: NSObject, URLSessionDelegate, Sendable { + private let pinnedCert: SecCertificate? + + init(pinnedCertificateNamed name: String = "ppg-ca") { + if let url = Bundle.main.url(forResource: name, withExtension: "der"), + let data = try? Data(contentsOf: url) { + pinnedCert = SecCertificateCreateWithData(nil, data as CFData) + } else { + pinnedCert = nil + } + } + + func urlSession( + _ session: URLSession, + didReceive challenge: URLAuthenticationChallenge, + completionHandler: @escaping (URLSession.AuthChallengeDisposition, URLCredential?) -> Void + ) { + guard challenge.protectionSpace.authenticationMethod == NSURLAuthenticationMethodServerTrust, + let serverTrust = challenge.protectionSpace.serverTrust, + let pinned = pinnedCert else { + completionHandler(.performDefaultHandling, nil) + return + } + + // Set the pinned CA as the sole anchor for evaluation + SecTrustSetAnchorCertificates(serverTrust, [pinned] as CFArray) + SecTrustSetAnchorCertificatesOnly(serverTrust, true) + + var error: CFError? + if SecTrustEvaluateWithError(serverTrust, &error) { + completionHandler(.useCredential, URLCredential(trust: serverTrust)) + } else { + completionHandler(.cancelAuthenticationChallenge, nil) + } + } +} + +// MARK: - REST Client + +/// Thread-safe REST client for the ppg serve API. +/// +/// Covers all 13 endpoints (7 read + 6 write) with async/await, +/// bearer token auth, and optional pinned-CA TLS trust. +actor PPGClient { + private let session: URLSession + private var connection: ServerConnection? + + init() { + let config = URLSessionConfiguration.default + config.timeoutIntervalForRequest = 15 + config.timeoutIntervalForResource = 30 + let delegate = PinnedCertDelegate() + self.session = URLSession(configuration: config, delegate: delegate, delegateQueue: nil) + } + + func configure(connection: ServerConnection) { + self.connection = connection + } + + // MARK: - Connection Test + + /// Verifies reachability and auth by hitting the status endpoint. + /// Returns `true` on success, throws on failure. + @discardableResult + func testConnection() async throws -> Bool { + let _: Manifest = try await get("/api/status") + return true + } + + // MARK: - Read API + + func fetchStatus() async throws -> Manifest { + return try await get("/api/status") + } + + func fetchWorktree(id: String) async throws -> WorktreeEntry { + return try await get("/api/worktrees/\(id)") + } + + func fetchDiff(worktreeId: String) async throws -> DiffResponse { + return try await get("/api/worktrees/\(worktreeId)/diff") + } + + func fetchAgentLogs(agentId: String, lines: Int = 200) async throws -> LogsResponse { + return try await get("/api/agents/\(agentId)/logs?lines=\(lines)") + } + + func fetchConfig() async throws -> Config { + return try await get("/api/config") + } + + func fetchTemplates() async throws -> TemplatesResponse { + return try await get("/api/templates") + } + + func fetchPrompts() async throws -> PromptsResponse { + return try await get("/api/prompts") + } + + func fetchSwarms() async throws -> SwarmsResponse { + return try await get("/api/swarms") + } + + // MARK: - Write API + + func spawn( + name: String?, + agent: String?, + prompt: String, + template: String? = nil, + base: String? = nil, + count: Int = 1 + ) async throws -> SpawnResponse { + var body: [String: Any] = ["prompt": prompt, "count": count] + if let name { body["name"] = name } + if let agent { body["agent"] = agent } + if let template { body["template"] = template } + if let base { body["base"] = base } + return try await post("/api/spawn", body: body) + } + + func sendToAgent(agentId: String, text: String, keys: Bool = false, enter: Bool = true) async throws { + var body: [String: Any] = ["text": text, "keys": keys] + if !enter { body["enter"] = false } + let _: SuccessResponse = try await post("/api/agents/\(agentId)/send", body: body) + } + + func killAgent(agentId: String) async throws { + let body: [String: Any] = [:] + let _: SuccessResponse = try await post("/api/agents/\(agentId)/kill", body: body) + } + + func restartAgent(agentId: String, prompt: String? = nil) async throws { + var body: [String: Any] = [:] + if let prompt { body["prompt"] = prompt } + let _: SuccessResponse = try await post("/api/agents/\(agentId)/restart", body: body) + } + + func mergeWorktree(worktreeId: String, strategy: String = "squash", force: Bool = false) async throws { + let body: [String: Any] = ["strategy": strategy, "force": force] + let _: SuccessResponse = try await post("/api/worktrees/\(worktreeId)/merge", body: body) + } + + func killWorktree(worktreeId: String) async throws { + let body: [String: Any] = [:] + let _: SuccessResponse = try await post("/api/worktrees/\(worktreeId)/kill", body: body) + } + + func createPR(worktreeId: String, title: String? = nil, body prBody: String? = nil, draft: Bool = false) async throws -> PRResponse { + var body: [String: Any] = ["draft": draft] + if let title { body["title"] = title } + if let prBody { body["body"] = prBody } + return try await post("/api/worktrees/\(worktreeId)/pr", body: body) + } + + // MARK: - Private Helpers + + private func get(_ path: String) async throws -> T { + let request = try makeRequest(path: path, method: "GET") + let (data, response) = try await performRequest(request) + try validateResponse(response, data: data) + return try decode(data) + } + + private func post(_ path: String, body: [String: Any]) async throws -> T { + var request = try makeRequest(path: path, method: "POST") + request.httpBody = try JSONSerialization.data(withJSONObject: body) + request.setValue("application/json", forHTTPHeaderField: "Content-Type") + let (data, response) = try await performRequest(request) + try validateResponse(response, data: data) + return try decode(data) + } + + private func makeRequest(path: String, method: String) throws -> URLRequest { + guard let conn = connection else { + throw PPGClientError.notConfigured + } + guard let url = URL(string: path, relativeTo: conn.baseURL) else { + throw PPGClientError.invalidURL(path) + } + var request = URLRequest(url: url) + request.httpMethod = method + request.setValue("Bearer \(conn.token)", forHTTPHeaderField: "Authorization") + return request + } + + private func performRequest(_ request: URLRequest) async throws -> (Data, URLResponse) { + do { + return try await session.data(for: request) + } catch let urlError as URLError { + throw PPGClientError.network(urlError) + } catch { + throw error + } + } + + private func decode(_ data: Data) throws -> T { + do { + return try JSONDecoder().decode(T.self, from: data) + } catch let decodingError as DecodingError { + throw PPGClientError.decodingError(decodingError) + } catch { + throw error + } + } + + private func validateResponse(_ response: URLResponse, data: Data) throws { + guard let http = response as? HTTPURLResponse else { + throw PPGClientError.invalidResponse + } + guard (200...299).contains(http.statusCode) else { + let msg = (try? JSONDecoder().decode(ErrorResponse.self, from: data))?.error + ?? String(data: data, encoding: .utf8) + ?? "Unknown error" + + switch http.statusCode { + case 401: + throw PPGClientError.unauthorized + case 404: + throw PPGClientError.notFound(msg) + case 409: + throw PPGClientError.conflict(msg) + default: + throw PPGClientError.serverError(http.statusCode, msg) + } + } + } +} + +// MARK: - Response Types (used only by PPGClient) + +private struct SuccessResponse: Decodable { + let success: Bool? + + init(from decoder: Decoder) throws { + let container = try? decoder.container(keyedBy: CodingKeys.self) + success = try container?.decodeIfPresent(Bool.self, forKey: .success) + } + + private enum CodingKeys: String, CodingKey { + case success + } +} + +struct PRResponse: Codable { + let success: Bool + let worktreeId: String + let prUrl: String +} diff --git a/ios/PPGMobile/PPGMobile/Networking/TokenStorage.swift b/ios/PPGMobile/PPGMobile/Networking/TokenStorage.swift new file mode 100644 index 0000000..1b29555 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Networking/TokenStorage.swift @@ -0,0 +1,107 @@ +import Foundation +import Security + +// MARK: - Error Types + +enum KeychainError: LocalizedError { + case itemNotFound + case unexpectedStatus(OSStatus) + case invalidData + + var errorDescription: String? { + switch self { + case .itemNotFound: + return "Token not found in keychain" + case .unexpectedStatus(let status): + return "Keychain operation failed with status \(status)" + case .invalidData: + return "Token data could not be encoded or decoded" + } + } +} + +// MARK: - Protocol + +protocol TokenStoring { + func save(token: String, for connectionId: UUID) throws + func load(for connectionId: UUID) throws -> String + func delete(for connectionId: UUID) throws +} + +// MARK: - Implementation + +struct TokenStorage: TokenStoring { + private let serviceName = "com.ppg.mobile" + + func save(token: String, for connectionId: UUID) throws { + guard let data = token.data(using: .utf8) else { + throw KeychainError.invalidData + } + + var query = baseQuery(for: connectionId) + query[kSecValueData as String] = data + query[kSecAttrAccessible as String] = kSecAttrAccessibleWhenUnlocked + + let status = SecItemAdd(query as CFDictionary, nil) + + switch status { + case errSecSuccess: + return + case errSecDuplicateItem: + let updateAttributes: [String: Any] = [ + kSecValueData as String: data, + kSecAttrAccessible as String: kSecAttrAccessibleWhenUnlocked + ] + let updateStatus = SecItemUpdate( + baseQuery(for: connectionId) as CFDictionary, + updateAttributes as CFDictionary + ) + guard updateStatus == errSecSuccess else { + throw KeychainError.unexpectedStatus(updateStatus) + } + default: + throw KeychainError.unexpectedStatus(status) + } + } + + func load(for connectionId: UUID) throws -> String { + var query = baseQuery(for: connectionId) + query[kSecReturnData as String] = true + query[kSecMatchLimit as String] = kSecMatchLimitOne + + var result: AnyObject? + let status = SecItemCopyMatching(query as CFDictionary, &result) + + guard status == errSecSuccess else { + if status == errSecItemNotFound { + throw KeychainError.itemNotFound + } + throw KeychainError.unexpectedStatus(status) + } + + guard let data = result as? Data, + let token = String(data: data, encoding: .utf8) else { + throw KeychainError.invalidData + } + + return token + } + + func delete(for connectionId: UUID) throws { + let status = SecItemDelete(baseQuery(for: connectionId) as CFDictionary) + + guard status == errSecSuccess || status == errSecItemNotFound else { + throw KeychainError.unexpectedStatus(status) + } + } + + // MARK: - Private + + private func baseQuery(for connectionId: UUID) -> [String: Any] { + [ + kSecClass as String: kSecClassGenericPassword, + kSecAttrService as String: serviceName, + kSecAttrAccount as String: connectionId.uuidString + ] + } +} diff --git a/ios/PPGMobile/PPGMobile/Networking/WebSocketManager.swift b/ios/PPGMobile/PPGMobile/Networking/WebSocketManager.swift new file mode 100644 index 0000000..af13821 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Networking/WebSocketManager.swift @@ -0,0 +1,391 @@ +import Foundation + +// MARK: - Connection State + +enum WebSocketConnectionState: Equatable, Sendable { + case disconnected + case connecting + case connected + case reconnecting(attempt: Int) + + var isConnected: Bool { self == .connected } + + var isReconnecting: Bool { + if case .reconnecting = self { return true } + return false + } +} + +// MARK: - Server Events + +enum WebSocketEvent: Sendable { + case manifestUpdated(Manifest) + case agentStatusChanged(agentId: String, status: AgentStatus) + case worktreeStatusChanged(worktreeId: String, status: String) + case pong + case unknown(type: String, payload: String) +} + +// MARK: - Server Message (for terminal streaming) + +struct ServerMessage { + let type: String + let agentId: String? + let data: String? +} + +// MARK: - WebSocketManager + +final class WebSocketManager: NSObject, @unchecked Sendable, URLSessionWebSocketDelegate { + + // MARK: - Callbacks + + var onStateChange: ((WebSocketConnectionState) -> Void)? + var onEvent: ((WebSocketEvent) -> Void)? + var onMessage: ((ServerMessage) -> Void)? + + // MARK: - Configuration + + private let url: URL + private let maxReconnectDelay: TimeInterval = 30.0 + private let baseReconnectDelay: TimeInterval = 1.0 + private let pingInterval: TimeInterval = 30.0 + + // MARK: - State + + private let queue = DispatchQueue(label: "ppg.websocket-manager", qos: .utility) + private var _state: WebSocketConnectionState = .disconnected + + var state: WebSocketConnectionState { + queue.sync { _state } + } + + private var session: URLSession? + private var task: URLSessionWebSocketTask? + private var pingTimer: DispatchSourceTimer? + private var reconnectWorkItem: DispatchWorkItem? + private var reconnectAttempt = 0 + private var intentionalDisconnect = false + private var isHandlingConnectionLoss = false + + // MARK: - Init + + init(url: URL) { + self.url = url + super.init() + } + + convenience init?(urlString: String) { + guard let url = URL(string: urlString) else { return nil } + self.init(url: url) + } + + deinit { + intentionalDisconnect = true + pingTimer?.cancel() + pingTimer = nil + task?.cancel(with: .goingAway, reason: nil) + task = nil + session?.invalidateAndCancel() + session = nil + } + + // MARK: - Public API + + func connect() { + queue.async { [weak self] in + self?.doConnect() + } + } + + func disconnect() { + queue.async { [weak self] in + self?.doDisconnect() + } + } + + func sendTerminalInput(agentId: String, text: String) { + let dict: [String: String] = ["type": "terminal_input", "agentId": agentId, "data": text] + guard let data = try? JSONSerialization.data(withJSONObject: dict, options: [.sortedKeys]), + let str = String(data: data, encoding: .utf8) else { return } + queue.async { [weak self] in + self?.doSend(str) + } + } + + func subscribeTerminal(agentId: String) { + let dict: [String: String] = ["type": "subscribe", "channel": "terminal:\(agentId)"] + guard let data = try? JSONSerialization.data(withJSONObject: dict, options: [.sortedKeys]), + let str = String(data: data, encoding: .utf8) else { return } + queue.async { [weak self] in + self?.doSend(str) + } + } + + func unsubscribeTerminal(agentId: String) { + let dict: [String: String] = ["type": "unsubscribe", "channel": "terminal:\(agentId)"] + guard let data = try? JSONSerialization.data(withJSONObject: dict, options: [.sortedKeys]), + let str = String(data: data, encoding: .utf8) else { return } + queue.async { [weak self] in + self?.doSend(str) + } + } + + // MARK: - Connection Lifecycle + + private func doConnect() { + guard _state == .disconnected || _state.isReconnecting else { return } + + intentionalDisconnect = false + isHandlingConnectionLoss = false + reconnectWorkItem?.cancel() + reconnectWorkItem = nil + + if _state.isReconnecting { + // Keep attempt counter + } else { + reconnectAttempt = 0 + setState(.connecting) + } + + let config = URLSessionConfiguration.default + config.waitsForConnectivity = true + session = URLSession(configuration: config, delegate: self, delegateQueue: nil) + + let wsTask = session!.webSocketTask(with: url) + task = wsTask + wsTask.resume() + } + + private func doDisconnect() { + intentionalDisconnect = true + isHandlingConnectionLoss = false + reconnectWorkItem?.cancel() + reconnectWorkItem = nil + stopPingTimer() + task?.cancel(with: .goingAway, reason: nil) + task = nil + session?.invalidateAndCancel() + session = nil + reconnectAttempt = 0 + setState(.disconnected) + } + + private func setState(_ newState: WebSocketConnectionState) { + guard _state != newState else { return } + _state = newState + let callback = onStateChange + DispatchQueue.main.async { + callback?(newState) + } + } + + // MARK: - Sending + + private func doSend(_ text: String) { + guard _state == .connected, let task = task else { return } + task.send(.string(text)) { error in + if let error = error { + NSLog("[WebSocketManager] send error: \(error.localizedDescription)") + } + } + } + + // MARK: - Receiving + + private func listenForMessages(for expectedTask: URLSessionWebSocketTask) { + expectedTask.receive { [weak self] result in + guard let self = self else { return } + self.queue.async { + guard self.task === expectedTask else { return } + switch result { + case .success(let message): + self.handleMessage(message) + self.listenForMessages(for: expectedTask) + case .failure(let error): + if !self.intentionalDisconnect { + NSLog("[WebSocketManager] receive error: \(error.localizedDescription)") + self.handleConnectionLost() + } + } + } + } + } + + private func handleMessage(_ message: URLSessionWebSocketTask.Message) { + let text: String + switch message { + case .string(let s): + text = s + case .data(let d): + guard let s = String(data: d, encoding: .utf8) else { return } + text = s + @unknown default: + return + } + + // Parse as generic ServerMessage for terminal streaming + if let serverMsg = parseServerMessage(text) { + let callback = onMessage + DispatchQueue.main.async { + callback?(serverMsg) + } + } + + // Parse as typed event + if let event = parseEvent(text) { + let callback = onEvent + DispatchQueue.main.async { + callback?(event) + } + } + } + + // MARK: - Event Parsing + + private func parseServerMessage(_ text: String) -> ServerMessage? { + guard let data = text.data(using: .utf8), + let json = try? JSONSerialization.jsonObject(with: data) as? [String: Any], + let type = json["type"] as? String else { + return nil + } + return ServerMessage( + type: type, + agentId: json["agentId"] as? String, + data: json["data"] as? String + ) + } + + func parseEvent(_ text: String) -> WebSocketEvent? { + guard let data = text.data(using: .utf8), + let json = try? JSONSerialization.jsonObject(with: data) as? [String: Any], + let type = json["type"] as? String else { + return nil + } + + switch type { + case "manifest_updated": + if let payloadData = json["manifest"], + let payloadJSON = try? JSONSerialization.data(withJSONObject: payloadData), + let manifest = try? JSONDecoder().decode(Manifest.self, from: payloadJSON) { + return .manifestUpdated(manifest) + } + return .unknown(type: type, payload: text) + + case "agent_status_changed": + if let agentId = json["agentId"] as? String, + let statusRaw = json["status"] as? String, + let status = AgentStatus(rawValue: statusRaw) { + return .agentStatusChanged(agentId: agentId, status: status) + } + return .unknown(type: type, payload: text) + + case "worktree_status_changed": + if let worktreeId = json["worktreeId"] as? String, + let status = json["status"] as? String { + return .worktreeStatusChanged(worktreeId: worktreeId, status: status) + } + return .unknown(type: type, payload: text) + + case "pong": + return .pong + + default: + return .unknown(type: type, payload: text) + } + } + + // MARK: - Keepalive Ping + + private func startPingTimer() { + stopPingTimer() + let timer = DispatchSource.makeTimerSource(queue: queue) + timer.schedule(deadline: .now() + pingInterval, repeating: pingInterval) + timer.setEventHandler { [weak self] in + self?.sendPing() + } + timer.resume() + pingTimer = timer + } + + private func stopPingTimer() { + pingTimer?.cancel() + pingTimer = nil + } + + private func sendPing() { + task?.sendPing { [weak self] error in + if let error = error { + NSLog("[WebSocketManager] ping error: \(error.localizedDescription)") + self?.queue.async { self?.handleConnectionLost() } + } + } + } + + // MARK: - Reconnect + + private func handleConnectionLost() { + guard !intentionalDisconnect else { return } + guard !isHandlingConnectionLoss else { return } + isHandlingConnectionLoss = true + stopPingTimer() + task?.cancel(with: .abnormalClosure, reason: nil) + task = nil + session?.invalidateAndCancel() + session = nil + scheduleReconnect() + } + + private func scheduleReconnect() { + reconnectAttempt += 1 + setState(.reconnecting(attempt: reconnectAttempt)) + + let delay = min(baseReconnectDelay * pow(2.0, Double(reconnectAttempt - 1)), maxReconnectDelay) + NSLog("[WebSocketManager] reconnecting in %.1fs (attempt %d)", delay, reconnectAttempt) + + let workItem = DispatchWorkItem { [weak self] in + guard let self = self, !self.intentionalDisconnect else { return } + self.reconnectWorkItem = nil + self.doConnect() + } + reconnectWorkItem?.cancel() + reconnectWorkItem = workItem + queue.asyncAfter(deadline: .now() + delay, execute: workItem) + } + + // MARK: - URLSessionWebSocketDelegate + + func urlSession(_ session: URLSession, webSocketTask: URLSessionWebSocketTask, didOpenWithProtocol protocol: String?) { + queue.async { [weak self] in + guard let self = self else { return } + guard self.task === webSocketTask else { return } + self.reconnectAttempt = 0 + self.isHandlingConnectionLoss = false + self.setState(.connected) + self.startPingTimer() + self.listenForMessages(for: webSocketTask) + } + } + + func urlSession(_ session: URLSession, webSocketTask: URLSessionWebSocketTask, didCloseWith closeCode: URLSessionWebSocketTask.CloseCode, reason: Data?) { + queue.async { [weak self] in + guard let self = self else { return } + guard self.task === webSocketTask else { return } + if self.intentionalDisconnect { + self.setState(.disconnected) + } else { + self.handleConnectionLost() + } + } + } + + func urlSession(_ session: URLSession, task: URLSessionTask, didCompleteWithError error: (any Error)?) { + guard error != nil else { return } + queue.async { [weak self] in + guard let self = self, !self.intentionalDisconnect else { return } + guard let webSocketTask = task as? URLSessionWebSocketTask, + self.task === webSocketTask else { return } + self.handleConnectionLost() + } + } +} diff --git a/ios/PPGMobile/PPGMobile/State/AppState.swift b/ios/PPGMobile/PPGMobile/State/AppState.swift new file mode 100644 index 0000000..c50dd7b --- /dev/null +++ b/ios/PPGMobile/PPGMobile/State/AppState.swift @@ -0,0 +1,314 @@ +import Foundation + +// MARK: - UserDefaults Keys + +private enum DefaultsKey { + static let savedConnections = "ppg_saved_connections" + static let lastConnectionId = "ppg_last_connection_id" +} + +/// Codable projection of ServerConnection without the token. +/// Tokens are stored separately in Keychain via TokenStorage. +private struct PersistedConnection: Codable { + let id: UUID + var name: String + var host: String + var port: Int + var caCertificate: String? + + init(from connection: ServerConnection) { + self.id = connection.id + self.name = connection.name + self.host = connection.host + self.port = connection.port + self.caCertificate = connection.caCertificate + } + + func toServerConnection(token: String) -> ServerConnection { + ServerConnection( + id: id, + name: name, + host: host, + port: port, + token: token, + caCertificate: caCertificate + ) + } +} + +// MARK: - AppState + +/// Root application state managing server connections and the REST/WS lifecycle. +/// +/// `AppState` is the single entry point for connection management. It persists +/// connection metadata to `UserDefaults` and tokens to Keychain via `TokenStorage`. +/// Auto-connects to the last-used server on launch and coordinates `PPGClient` +/// (REST) and `WebSocketManager` (WS) through `ManifestStore`. +@MainActor +@Observable +final class AppState { + + // MARK: - Connection State + + /// All saved server connections. + private(set) var connections: [ServerConnection] = [] + + /// The currently active connection, or `nil` if disconnected. + private(set) var activeConnection: ServerConnection? + + /// Whether a connection attempt is in progress. + private(set) var isConnecting = false + + /// User-facing error message, cleared on next connect attempt. + private(set) var errorMessage: String? + + // MARK: - WebSocket State + + /// Current WebSocket connection state. + private(set) var webSocketState: WebSocketConnectionState = .disconnected + + // MARK: - Connection Status (for Settings UI) + + var connectionStatus: ConnectionState { + if isConnecting { return .connecting } + if let error = errorMessage { return .error(error) } + if activeConnection != nil { return .connected } + return .disconnected + } + + // MARK: - Dependencies + + let client = PPGClient() + let manifestStore: ManifestStore + private(set) var wsManager: WebSocketManager? + + private let tokenStorage = TokenStorage() + + // MARK: - Computed + + var manifest: Manifest? { manifestStore.manifest } + var templates: [String] { [] } + + // MARK: - Init + + init() { + self.manifestStore = ManifestStore(client: client) + loadConnections() + } + + // MARK: - Auto-Connect + + /// Connects to the last-used server if one exists. + func autoConnect() async { + guard let lastId = UserDefaults.standard.string(forKey: DefaultsKey.lastConnectionId), + let uuid = UUID(uuidString: lastId), + let connection = connections.first(where: { $0.id == uuid }) else { + return + } + await connect(to: connection) + } + + // MARK: - Connect / Disconnect + + func connect(to connection: ServerConnection) async { + guard !isConnecting else { return } + + if activeConnection != nil { + disconnect() + } + + isConnecting = true + errorMessage = nil + + await client.configure(connection: connection) + + do { + try await client.testConnection() + } catch { + isConnecting = false + errorMessage = "Cannot reach server: \(error.localizedDescription)" + return + } + + activeConnection = connection + UserDefaults.standard.set(connection.id.uuidString, forKey: DefaultsKey.lastConnectionId) + + startWebSocket(for: connection) + await manifestStore.refresh() + + isConnecting = false + } + + func disconnect() { + stopWebSocket() + activeConnection = nil + manifestStore.clear() + webSocketState = .disconnected + } + + // MARK: - Agent Actions + + func killAgent(_ agentId: String) async { + do { + try await client.killAgent(agentId: agentId) + await manifestStore.refresh() + } catch { + errorMessage = "Failed to kill agent: \(error.localizedDescription)" + } + } + + // MARK: - Connection CRUD + + func addConnection(_ connection: ServerConnection) { + // Remove duplicate host:port + if let existing = connections.first(where: { $0.host == connection.host && $0.port == connection.port }), + existing.id != connection.id { + try? tokenStorage.delete(for: existing.id) + } + + if let index = connections.firstIndex(where: { $0.host == connection.host && $0.port == connection.port }) { + connections[index] = connection + } else { + connections.append(connection) + } + saveConnections() + } + + func addConnectionAndConnect(_ connection: ServerConnection) async { + addConnection(connection) + await connect(to: connection) + } + + func removeConnection(_ connection: ServerConnection) { + if activeConnection?.id == connection.id { + disconnect() + } + connections.removeAll { $0.id == connection.id } + try? tokenStorage.delete(for: connection.id) + saveConnections() + + if let lastId = UserDefaults.standard.string(forKey: DefaultsKey.lastConnectionId), + lastId == connection.id.uuidString { + UserDefaults.standard.removeObject(forKey: DefaultsKey.lastConnectionId) + } + } + + func updateConnection(_ connection: ServerConnection) async { + guard let index = connections.firstIndex(where: { $0.id == connection.id }) else { return } + connections[index] = connection + saveConnections() + + if activeConnection?.id == connection.id { + await connect(to: connection) + } + } + + // MARK: - Error Handling + + func clearError() { + errorMessage = nil + } + + // MARK: - WebSocket Lifecycle + + private func startWebSocket(for connection: ServerConnection) { + stopWebSocket() + + guard let wsURL = connection.webSocketURL else { return } + let ws = WebSocketManager(url: wsURL) + ws.onStateChange = { [weak self] state in + Task { @MainActor in + self?.webSocketState = state + } + } + ws.onEvent = { [weak self] event in + Task { @MainActor in + self?.handleWebSocketEvent(event) + } + } + wsManager = ws + ws.connect() + } + + private func stopWebSocket() { + wsManager?.disconnect() + wsManager = nil + } + + private func handleWebSocketEvent(_ event: WebSocketEvent) { + switch event { + case .manifestUpdated(let manifest): + manifestStore.applyManifest(manifest) + + case .agentStatusChanged(let agentId, let status): + manifestStore.updateAgentStatus(agentId: agentId, status: status) + + case .worktreeStatusChanged(let worktreeId, let statusRaw): + if let status = WorktreeStatus(rawValue: statusRaw) { + manifestStore.updateWorktreeStatus(worktreeId: worktreeId, status: status) + } + + case .pong: + break + + case .unknown: + break + } + } + + // MARK: - Persistence + + private func loadConnections() { + guard let data = UserDefaults.standard.data(forKey: DefaultsKey.savedConnections) else { + return + } + + let persisted: [PersistedConnection] + do { + persisted = try JSONDecoder().decode([PersistedConnection].self, from: data) + } catch { + errorMessage = "Failed to load saved connections." + return + } + + var loaded: [ServerConnection] = [] + var failedTokenLoad = false + for entry in persisted { + do { + let token = try tokenStorage.load(for: entry.id) + loaded.append(entry.toServerConnection(token: token)) + } catch { + failedTokenLoad = true + } + } + connections = loaded + + if failedTokenLoad { + errorMessage = "Some saved connection tokens could not be loaded." + } + } + + private func saveConnections() { + let persisted = connections.map { PersistedConnection(from: $0) } + do { + let data = try JSONEncoder().encode(persisted) + UserDefaults.standard.set(data, forKey: DefaultsKey.savedConnections) + } catch { + errorMessage = "Failed to save connections." + return + } + + var failedTokenSave = false + for connection in connections { + do { + try tokenStorage.save(token: connection.token, for: connection.id) + } catch { + failedTokenSave = true + } + } + + if failedTokenSave { + errorMessage = "Some connection tokens could not be saved." + } + } +} diff --git a/ios/PPGMobile/PPGMobile/State/ManifestStore.swift b/ios/PPGMobile/PPGMobile/State/ManifestStore.swift new file mode 100644 index 0000000..1c065a7 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/State/ManifestStore.swift @@ -0,0 +1,121 @@ +import Foundation + +// MARK: - ManifestStore + +/// Caches the ppg manifest and applies incremental WebSocket updates. +/// +/// `ManifestStore` owns the manifest data and provides read access to views. +/// It is updated either by a full REST fetch or by individual WebSocket events +/// (agent/worktree status changes) to keep the UI responsive without polling. +@MainActor +@Observable +final class ManifestStore { + + // MARK: - Published State + + /// The cached manifest, or `nil` if not yet loaded. + private(set) var manifest: Manifest? + + /// Whether a fetch is currently in progress. + private(set) var isLoading = false + + /// Last error from a fetch or WebSocket update. + private(set) var error: String? + + /// Timestamp of the last successful refresh. + private(set) var lastRefreshed: Date? + + // MARK: - Dependencies + + private let client: PPGClient + + // MARK: - Init + + init(client: PPGClient) { + self.client = client + } + + // MARK: - Full Refresh + + /// Fetches the full manifest from the REST API and replaces the cache. + func refresh() async { + isLoading = true + error = nil + defer { isLoading = false } + + do { + let fetched = try await client.fetchStatus() + manifest = fetched + lastRefreshed = Date() + } catch { + self.error = error.localizedDescription + } + } + + // MARK: - Incremental Updates + + /// Applies a full manifest snapshot received from WebSocket. + func applyManifest(_ updated: Manifest) { + manifest = updated + lastRefreshed = Date() + error = nil + } + + /// Updates a single agent's status in the cached manifest. + func updateAgentStatus(agentId: String, status: AgentStatus) { + guard var m = manifest else { return } + for (wtId, var worktree) in m.worktrees { + if var agent = worktree.agents[agentId] { + agent.status = status + worktree.agents[agentId] = agent + m.worktrees[wtId] = worktree + manifest = m + lastRefreshed = Date() + error = nil + return + } + } + } + + /// Updates a single worktree's status in the cached manifest. + func updateWorktreeStatus(worktreeId: String, status: WorktreeStatus) { + guard var m = manifest, + var worktree = m.worktrees[worktreeId] else { return } + worktree.status = status + m.worktrees[worktreeId] = worktree + manifest = m + lastRefreshed = Date() + error = nil + } + + // MARK: - Clear + + /// Resets the store to its initial empty state. + func clear() { + manifest = nil + isLoading = false + error = nil + lastRefreshed = nil + } + + // MARK: - Convenience + + /// All worktrees sorted by creation date (newest first). + var sortedWorktrees: [WorktreeEntry] { + manifest?.sortedWorktrees ?? [] + } + + /// All agents across all worktrees. + var allAgents: [AgentEntry] { + manifest?.allAgents ?? [] + } + + /// Counts of agents by status. + var agentCounts: [AgentStatus: Int] { + var counts: [AgentStatus: Int] = [:] + for agent in allAgents { + counts[agent.status, default: 0] += 1 + } + return counts + } +} diff --git a/ios/PPGMobile/PPGMobile/Views/Dashboard/AgentRow.swift b/ios/PPGMobile/PPGMobile/Views/Dashboard/AgentRow.swift new file mode 100644 index 0000000..cdebad2 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Views/Dashboard/AgentRow.swift @@ -0,0 +1,103 @@ +import SwiftUI + +struct AgentRow: View { + let agent: AgentEntry + var onKill: (() -> Void)? + var onRestart: (() -> Void)? + + @State private var confirmingKill = false + + var body: some View { + VStack(alignment: .leading, spacing: 6) { + HStack { + Image(systemName: agent.status.icon) + .foregroundStyle(agent.status.color) + .font(.body) + + VStack(alignment: .leading, spacing: 1) { + Text(agent.name) + .font(.subheadline) + .fontWeight(.medium) + + Text(agent.agentType) + .font(.caption) + .foregroundStyle(.secondary) + } + + Spacer() + + statusLabel + } + + Text(agent.prompt) + .font(.caption) + .foregroundStyle(.secondary) + .lineLimit(2) + + HStack { + if let date = agent.startDate { + Text(date, style: .relative) + .font(.caption2) + .foregroundStyle(.tertiary) + } + + Spacer() + + actionButtons + } + } + .padding(.vertical, 4) + .confirmationDialog("Kill Agent", isPresented: $confirmingKill) { + if let onKill { + Button("Kill", role: .destructive) { + onKill() + } + } + Button("Cancel", role: .cancel) {} + } message: { + Text("Kill agent \"\(agent.name)\"? This cannot be undone.") + } + } + + // MARK: - Status Label + + private var statusLabel: some View { + Text(agent.status.label) + .font(.caption) + .fontWeight(.medium) + .padding(.horizontal, 8) + .padding(.vertical, 3) + .background(agent.status.color.opacity(0.12)) + .foregroundStyle(agent.status.color) + .clipShape(Capsule()) + } + + // MARK: - Action Buttons + + @ViewBuilder + private var actionButtons: some View { + HStack(spacing: 12) { + if agent.status.isActive, onKill != nil { + Button { + confirmingKill = true + } label: { + Image(systemName: "stop.fill") + .font(.caption) + .foregroundStyle(.red) + } + .buttonStyle(.borderless) + } + + if (agent.status == .failed || agent.status == .killed), let onRestart { + Button { + onRestart() + } label: { + Image(systemName: "arrow.counterclockwise") + .font(.caption) + .foregroundStyle(.blue) + } + .buttonStyle(.borderless) + } + } + } +} diff --git a/ios/PPGMobile/PPGMobile/Views/Dashboard/DashboardView.swift b/ios/PPGMobile/PPGMobile/Views/Dashboard/DashboardView.swift new file mode 100644 index 0000000..806a934 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Views/Dashboard/DashboardView.swift @@ -0,0 +1,128 @@ +import SwiftUI + +struct DashboardView: View { + @Environment(AppState.self) private var appState + + var body: some View { + NavigationStack { + Group { + switch appState.connectionStatus { + case .disconnected: + disconnectedView + case .connecting: + ProgressView("Connecting...") + case .connected: + if appState.manifestStore.sortedWorktrees.isEmpty { + emptyStateView + } else { + worktreeList + } + case .error(let message): + errorView(message) + } + } + .navigationTitle(appState.manifest?.sessionName ?? "PPG") + .toolbar { + ToolbarItem(placement: .topBarTrailing) { + Button { + Task { await appState.manifestStore.refresh() } + } label: { + Image(systemName: "arrow.clockwise") + } + .disabled(appState.activeConnection == nil) + } + } + } + } + + // MARK: - Worktree List + + private var worktreeList: some View { + let worktrees = appState.manifestStore.sortedWorktrees + + return List { + let active = worktrees.filter { !$0.status.isTerminal } + let completed = worktrees.filter { $0.status.isTerminal } + + if !active.isEmpty { + Section("Active") { + ForEach(active) { worktree in + NavigationLink(value: worktree.id) { + WorktreeCard(worktree: worktree) + } + } + } + } + + if !completed.isEmpty { + Section("Completed") { + ForEach(completed) { worktree in + NavigationLink(value: worktree.id) { + WorktreeCard(worktree: worktree) + } + } + } + } + } + .listStyle(.insetGrouped) + .refreshable { + await appState.manifestStore.refresh() + } + .navigationDestination(for: String.self) { worktreeId in + if appState.manifest?.worktrees[worktreeId] != nil { + WorktreeDetailView(worktreeId: worktreeId) + } else { + ContentUnavailableView( + "Worktree Not Found", + systemImage: "questionmark.folder", + description: Text("This worktree may have been removed.") + ) + } + } + } + + // MARK: - Empty State + + private var emptyStateView: some View { + ContentUnavailableView { + Label("No Worktrees", systemImage: "arrow.triangle.branch") + } description: { + Text("Spawn agents from the CLI to see them here.") + } actions: { + Button("Refresh") { + Task { await appState.manifestStore.refresh() } + } + } + } + + // MARK: - Disconnected State + + private var disconnectedView: some View { + ContentUnavailableView { + Label("Disconnected", systemImage: "wifi.slash") + } description: { + Text("Unable to reach the ppg service. Check that the CLI is running and the server is started.") + } actions: { + Button("Retry") { + Task { await appState.autoConnect() } + } + .buttonStyle(.borderedProminent) + } + } + + // MARK: - Error State + + private func errorView(_ message: String) -> some View { + ContentUnavailableView { + Label("Connection Error", systemImage: "exclamationmark.triangle") + } description: { + Text(message) + } actions: { + Button("Retry") { + appState.clearError() + Task { await appState.autoConnect() } + } + .buttonStyle(.borderedProminent) + } + } +} diff --git a/ios/PPGMobile/PPGMobile/Views/Dashboard/WorktreeCard.swift b/ios/PPGMobile/PPGMobile/Views/Dashboard/WorktreeCard.swift new file mode 100644 index 0000000..e6d819b --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Views/Dashboard/WorktreeCard.swift @@ -0,0 +1,79 @@ +import SwiftUI + +struct WorktreeCard: View { + let worktree: WorktreeEntry + + var body: some View { + VStack(alignment: .leading, spacing: 8) { + HStack { + VStack(alignment: .leading, spacing: 2) { + Text(worktree.name) + .font(.headline) + + Text(worktree.branch) + .font(.caption) + .foregroundStyle(.secondary) + .lineLimit(1) + } + + Spacer() + + statusBadge + } + + HStack(spacing: 12) { + Label("\(worktree.agents.count)", systemImage: "person.2") + .font(.subheadline) + .foregroundStyle(.secondary) + + if !activeAgents.isEmpty { + Label("\(activeAgents.count) active", systemImage: "bolt.fill") + .font(.caption) + .foregroundStyle(.green) + } + + if !failedAgents.isEmpty { + Label("\(failedAgents.count) failed", systemImage: "exclamationmark.triangle.fill") + .font(.caption) + .foregroundStyle(.red) + } + + Spacer() + + if let date = worktree.createdDate { + Text(date, style: .relative) + .font(.caption2) + .foregroundStyle(.tertiary) + } + } + } + .padding(.vertical, 4) + } + + // MARK: - Status Badge + + private var statusBadge: some View { + HStack(spacing: 4) { + Image(systemName: worktree.status.icon) + .font(.caption2) + Text(worktree.status.label) + .font(.caption) + .fontWeight(.medium) + } + .padding(.horizontal, 8) + .padding(.vertical, 4) + .background(worktree.status.color.opacity(0.15)) + .foregroundStyle(worktree.status.color) + .clipShape(Capsule()) + } + + // MARK: - Helpers + + private var activeAgents: [AgentEntry] { + worktree.sortedAgents.filter { $0.status.isActive } + } + + private var failedAgents: [AgentEntry] { + worktree.sortedAgents.filter { $0.status == .failed } + } +} diff --git a/ios/PPGMobile/PPGMobile/Views/Dashboard/WorktreeDetailView.swift b/ios/PPGMobile/PPGMobile/Views/Dashboard/WorktreeDetailView.swift new file mode 100644 index 0000000..cf64176 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Views/Dashboard/WorktreeDetailView.swift @@ -0,0 +1,173 @@ +import SwiftUI + +struct WorktreeDetailView: View { + let worktreeId: String + @Environment(AppState.self) private var appState + + @State private var confirmingMerge = false + @State private var confirmingKill = false + + private var worktree: WorktreeEntry? { + appState.manifest?.worktrees[worktreeId] + } + + var body: some View { + Group { + if let worktree { + List { + infoSection(worktree) + agentsSection(worktree) + actionsSection(worktree) + } + .listStyle(.insetGrouped) + .navigationTitle(worktree.name) + .navigationBarTitleDisplayMode(.large) + .confirmationDialog("Merge Worktree", isPresented: $confirmingMerge) { + Button("Squash Merge") { + Task { + try? await appState.client.mergeWorktree(worktreeId: worktreeId) + await appState.manifestStore.refresh() + } + } + Button("Cancel", role: .cancel) {} + } message: { + Text("Merge \"\(worktree.name)\" back to the base branch?") + } + .confirmationDialog("Kill Worktree", isPresented: $confirmingKill) { + Button("Kill All Agents", role: .destructive) { + Task { + try? await appState.client.killWorktree(worktreeId: worktreeId) + await appState.manifestStore.refresh() + } + } + Button("Cancel", role: .cancel) {} + } message: { + Text("Kill all agents in \"\(worktree.name)\"? This cannot be undone.") + } + } else { + ContentUnavailableView( + "Worktree Not Found", + systemImage: "questionmark.folder", + description: Text("This worktree may have been removed.") + ) + } + } + } + + // MARK: - Info Section + + private func infoSection(_ worktree: WorktreeEntry) -> some View { + Section { + LabeledContent("Status") { + HStack(spacing: 4) { + Image(systemName: worktree.status.icon) + .font(.caption2) + Text(worktree.status.label) + .fontWeight(.medium) + } + .foregroundStyle(worktree.status.color) + } + + LabeledContent("Branch") { + Text(worktree.branch) + .font(.footnote.monospaced()) + .foregroundStyle(.secondary) + } + + LabeledContent("Agents") { + Text("\(worktree.agents.count)") + } + + if let date = worktree.createdDate { + LabeledContent("Created") { + Text(date, style: .relative) + } + } + + if let mergedDate = worktree.mergedDate { + LabeledContent("Merged") { + Text(mergedDate, style: .relative) + } + } + } header: { + Text("Details") + } + } + + // MARK: - Agents Section + + private func agentsSection(_ worktree: WorktreeEntry) -> some View { + Section { + if worktree.agents.isEmpty { + Text("No agents") + .foregroundStyle(.secondary) + } else { + ForEach(worktree.sortedAgents) { agent in + AgentRow( + agent: agent, + onKill: { + Task { await appState.killAgent(agent.id) } + }, + onRestart: { + Task { + try? await appState.client.restartAgent(agentId: agent.id) + await appState.manifestStore.refresh() + } + } + ) + } + } + } header: { + HStack { + Text("Agents") + Spacer() + Text(agentSummary(worktree)) + .font(.caption) + .foregroundStyle(.secondary) + } + } + } + + // MARK: - Actions Section + + private func actionsSection(_ worktree: WorktreeEntry) -> some View { + Section { + if worktree.status == .active || worktree.status == .running { + Button { + confirmingMerge = true + } label: { + Label("Merge Worktree", systemImage: "arrow.triangle.merge") + } + + Button(role: .destructive) { + confirmingKill = true + } label: { + Label("Kill All Agents", systemImage: "xmark.octagon") + } + } + + Button { + Task { + try? await appState.client.createPR(worktreeId: worktreeId) + await appState.manifestStore.refresh() + } + } label: { + Label("Create Pull Request", systemImage: "arrow.triangle.pull") + } + .disabled(worktree.status != .active && worktree.status != .running && worktree.status != .merged) + } header: { + Text("Actions") + } + } + + // MARK: - Helpers + + private func agentSummary(_ worktree: WorktreeEntry) -> String { + let active = worktree.sortedAgents.filter { $0.status.isActive }.count + let total = worktree.agents.count + if active > 0 { + return "\(active)/\(total) active" + } + return "\(total) total" + } +} diff --git a/ios/PPGMobile/PPGMobile/Views/Settings/AddServerView.swift b/ios/PPGMobile/PPGMobile/Views/Settings/AddServerView.swift new file mode 100644 index 0000000..6bca076 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Views/Settings/AddServerView.swift @@ -0,0 +1,115 @@ +import SwiftUI + +struct AddServerView: View { + @Environment(AppState.self) private var appState + @Environment(\.dismiss) private var dismiss + + @State private var name = "My Mac" + @State private var host = "" + @State private var port = "7700" + @State private var token = "" + @State private var showToken = false + + var body: some View { + NavigationStack { + Form { + Section("Server Details") { + TextField("Name", text: $name) + + TextField("Host (e.g., 192.168.1.100)", text: $host) + .textInputAutocapitalization(.never) + .autocorrectionDisabled() + .keyboardType(.URL) + + TextField("Port", text: $port) + .keyboardType(.numberPad) + } + + Section("Authentication") { + HStack { + Group { + if showToken { + TextField("Token", text: $token) + .fontDesign(.monospaced) + } else { + SecureField("Token", text: $token) + } + } + .textInputAutocapitalization(.never) + .autocorrectionDisabled() + + Button { + showToken.toggle() + } label: { + Image(systemName: showToken ? "eye.slash" : "eye") + .foregroundStyle(.secondary) + } + .buttonStyle(.plain) + } + } + + Section { + Button { + addServer() + } label: { + HStack { + Spacer() + Text("Add Server") + .fontWeight(.semibold) + Spacer() + } + } + .disabled(!isValid) + } + } + .navigationTitle("Add Server") + .navigationBarTitleDisplayMode(.inline) + .toolbar { + ToolbarItem(placement: .cancellationAction) { + Button("Cancel") { dismiss() } + } + } + } + } + + private var isValid: Bool { + !trimmedHost.isEmpty && parsedPort != nil + } + + private var trimmedName: String { + name.trimmingCharacters(in: .whitespacesAndNewlines) + } + + private var trimmedHost: String { + host.trimmingCharacters(in: .whitespacesAndNewlines) + } + + private var trimmedToken: String { + token.trimmingCharacters(in: .whitespacesAndNewlines) + } + + private var parsedPort: Int? { + guard + let value = Int(port.trimmingCharacters(in: .whitespacesAndNewlines)), + (1...65_535).contains(value) + else { + return nil + } + return value + } + + private func addServer() { + guard let validatedPort = parsedPort else { return } + let connection = ServerConnection( + name: trimmedName.isEmpty ? "My Mac" : trimmedName, + host: trimmedHost, + port: validatedPort, + token: trimmedToken + ) + appState.addConnection(connection) + Task { + await appState.connect(to: connection) + } + dismiss() + } +} diff --git a/ios/PPGMobile/PPGMobile/Views/Settings/QRScannerView.swift b/ios/PPGMobile/PPGMobile/Views/Settings/QRScannerView.swift new file mode 100644 index 0000000..4c69ed1 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Views/Settings/QRScannerView.swift @@ -0,0 +1,220 @@ +import SwiftUI +import AVFoundation + +/// QR code scanner for pairing with ppg serve. +/// Scans for ppg://connect URLs and creates a ServerConnection. +struct QRScannerView: View { + let onScan: (ServerConnection) -> Void + @Environment(\.dismiss) private var dismiss + @State private var scannedCode: String? + @State private var scannerResetToken = UUID() + @State private var showError = false + @State private var errorMessage = "" + @State private var permissionDenied = false + + var body: some View { + NavigationStack { + ZStack { + if permissionDenied { + cameraPermissionView + } else { + QRCameraView(onCodeScanned: handleScan) + .id(scannerResetToken) + .ignoresSafeArea() + + scanOverlay + } + } + .navigationTitle("Scan QR Code") + .navigationBarTitleDisplayMode(.inline) + .toolbar { + ToolbarItem(placement: .cancellationAction) { + Button("Cancel") { dismiss() } + } + } + .alert("Invalid QR Code", isPresented: $showError) { + Button("OK") { restartScanner() } + } message: { + Text(errorMessage) + } + .task { + await checkCameraPermission() + } + } + } + + private var scanOverlay: some View { + VStack { + Spacer() + + VStack(spacing: 12) { + Image(systemName: "qrcode.viewfinder") + .font(.system(size: 48)) + .foregroundStyle(.white) + + Text("Point camera at the QR code shown by `ppg serve`") + .font(.subheadline) + .foregroundStyle(.white) + .multilineTextAlignment(.center) + .padding(.horizontal) + } + .padding() + .background(.ultraThinMaterial) + .clipShape(RoundedRectangle(cornerRadius: 16)) + .padding() + } + } + + private var cameraPermissionView: some View { + ContentUnavailableView { + Label("Camera Access Required", systemImage: "camera.fill") + } description: { + Text("PPG Mobile needs camera access to scan QR codes for server pairing.") + } actions: { + Button("Open Settings") { + if let url = URL(string: UIApplication.openSettingsURLString) { + UIApplication.shared.open(url) + } + } + .buttonStyle(.borderedProminent) + } + } + + @MainActor + private func checkCameraPermission() async { + switch AVCaptureDevice.authorizationStatus(for: .video) { + case .authorized: + permissionDenied = false + case .notDetermined: + let granted = await AVCaptureDevice.requestAccess(for: .video) + permissionDenied = !granted + case .denied, .restricted: + permissionDenied = true + @unknown default: + permissionDenied = true + } + } + + private func handleScan(_ code: String) { + guard scannedCode == nil else { return } + scannedCode = code + + if let connection = ServerConnection.fromQRCode(code) { + onScan(connection) + } else { + errorMessage = "This QR code doesn't contain a valid ppg server connection.\n\nExpected format: ppg://connect?host=...&port=...&token=..." + showError = true + } + } + + private func restartScanner() { + scannedCode = nil + scannerResetToken = UUID() + } +} + +// MARK: - Camera UIViewRepresentable + +/// UIViewRepresentable wrapper for AVCaptureSession QR code scanning. +/// Manages session lifecycle on appear/disappear and handles preview bounds correctly. +struct QRCameraView: UIViewRepresentable { + let onCodeScanned: (String) -> Void + + func makeUIView(context: Context) -> CameraPreviewView { + let view = CameraPreviewView() + let coordinator = context.coordinator + + let session = AVCaptureSession() + coordinator.session = session + + guard let device = AVCaptureDevice.default(for: .video), + let input = try? AVCaptureDeviceInput(device: device) + else { return view } + + if session.canAddInput(input) { + session.addInput(input) + } + + let output = AVCaptureMetadataOutput() + if session.canAddOutput(output) { + session.addOutput(output) + output.setMetadataObjectsDelegate(coordinator, queue: .main) + output.metadataObjectTypes = [.qr] + } + + let previewLayer = AVCaptureVideoPreviewLayer(session: session) + previewLayer.videoGravity = .resizeAspectFill + view.previewLayer = previewLayer + view.layer.addSublayer(previewLayer) + + coordinator.startSession() + + return view + } + + func updateUIView(_ uiView: CameraPreviewView, context: Context) { + uiView.previewLayer?.frame = uiView.bounds + } + + static func dismantleUIView(_ uiView: CameraPreviewView, coordinator: Coordinator) { + coordinator.stopSession() + } + + func makeCoordinator() -> Coordinator { + Coordinator(onCodeScanned: onCodeScanned) + } + + // MARK: - Preview UIView + + /// Custom UIView that keeps the preview layer sized to its bounds. + class CameraPreviewView: UIView { + var previewLayer: AVCaptureVideoPreviewLayer? + + override func layoutSubviews() { + super.layoutSubviews() + previewLayer?.frame = bounds + } + } + + // MARK: - Coordinator + + class Coordinator: NSObject, AVCaptureMetadataOutputObjectsDelegate { + let onCodeScanned: (String) -> Void + var session: AVCaptureSession? + private var hasScanned = false + + init(onCodeScanned: @escaping (String) -> Void) { + self.onCodeScanned = onCodeScanned + } + + func startSession() { + guard let session, !session.isRunning else { return } + DispatchQueue.global(qos: .userInitiated).async { + session.startRunning() + } + } + + func stopSession() { + guard let session, session.isRunning else { return } + DispatchQueue.global(qos: .userInitiated).async { + session.stopRunning() + } + } + + func metadataOutput( + _ output: AVCaptureMetadataOutput, + didOutput metadataObjects: [AVMetadataObject], + from connection: AVCaptureConnection + ) { + guard !hasScanned, + let object = metadataObjects.first as? AVMetadataMachineReadableCodeObject, + object.type == .qr, + let value = object.stringValue + else { return } + + hasScanned = true + stopSession() + onCodeScanned(value) + } + } +} diff --git a/ios/PPGMobile/PPGMobile/Views/Settings/SettingsView.swift b/ios/PPGMobile/PPGMobile/Views/Settings/SettingsView.swift new file mode 100644 index 0000000..05ff5ec --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Views/Settings/SettingsView.swift @@ -0,0 +1,236 @@ +import SwiftUI + +struct SettingsView: View { + @Environment(AppState.self) private var appState + + @State private var showAddManual = false + @State private var showQRScanner = false + @State private var deleteTarget: ServerConnection? + @State private var testResult: TestResult? + @State private var showQRError = false + + private let repositoryURL = URL(string: "https://github.com/2witstudios/ppg-cli") + + private enum TestResult: Equatable { + case testing + case success + case failure(String) + } + + var body: some View { + NavigationStack { + List { + currentConnectionSection + savedServersSection + addServerSection + aboutSection + } + .navigationTitle("Settings") + .sheet(isPresented: $showQRScanner) { + QRScannerView { connection in + handleQRScan(connection) + } + } + .sheet(isPresented: $showAddManual) { + AddServerView() + } + .confirmationDialog( + "Delete Server", + isPresented: .init( + get: { deleteTarget != nil }, + set: { if !$0 { deleteTarget = nil } } + ), + presenting: deleteTarget + ) { server in + Button("Delete \"\(server.name)\"", role: .destructive) { + appState.removeConnection(server) + deleteTarget = nil + } + } message: { server in + Text("Remove \(server.name) (\(server.host):\(server.port))? This cannot be undone.") + } + .alert("Invalid QR Code", isPresented: $showQRError) { + Button("OK", role: .cancel) {} + } message: { + Text("The scanned code is not a valid ppg server. Expected format: ppg://connect?host=...&port=...&token=...") + } + } + } + + // MARK: - Sections + + @ViewBuilder + private var currentConnectionSection: some View { + Section("Current Connection") { + if let conn = appState.activeConnection { + HStack { + VStack(alignment: .leading) { + Text(conn.name) + .font(.headline) + Text("\(conn.host):\(conn.port)") + .font(.caption) + .foregroundStyle(.secondary) + } + Spacer() + connectionStatusBadge + } + + testConnectionRow + + Button("Disconnect", role: .destructive) { + appState.disconnect() + } + } else { + Text("Not connected") + .foregroundStyle(.secondary) + } + } + } + + @ViewBuilder + private var savedServersSection: some View { + Section("Saved Servers") { + ForEach(appState.connections) { conn in + Button { + Task { await appState.connect(to: conn) } + } label: { + HStack { + VStack(alignment: .leading) { + Text(conn.name) + Text("\(conn.host):\(conn.port)") + .font(.caption) + .foregroundStyle(.secondary) + } + Spacer() + if appState.activeConnection?.id == conn.id { + Image(systemName: "checkmark.circle.fill") + .foregroundStyle(.green) + } + } + } + .foregroundStyle(.primary) + .swipeActions(edge: .trailing, allowsFullSwipe: false) { + Button("Delete", role: .destructive) { + deleteTarget = conn + } + } + } + + if appState.connections.isEmpty { + Text("No saved servers") + .foregroundStyle(.secondary) + } + } + } + + @ViewBuilder + private var addServerSection: some View { + Section("Add Server") { + Button { + showQRScanner = true + } label: { + Label("Scan QR Code", systemImage: "qrcode.viewfinder") + } + + Button { + showAddManual = true + } label: { + Label("Enter Manually", systemImage: "keyboard") + } + } + } + + @ViewBuilder + private var aboutSection: some View { + Section("About") { + LabeledContent("PPG Mobile", value: appVersion) + LabeledContent("Server Protocol", value: "v1") + + if let repositoryURL { + Link(destination: repositoryURL) { + Label("GitHub Repository", systemImage: "link") + } + } + } + } + + // MARK: - Subviews + + @ViewBuilder + private var connectionStatusBadge: some View { + switch appState.connectionStatus { + case .connected: + Label("Connected", systemImage: "circle.fill") + .font(.caption) + .foregroundStyle(.green) + case .connecting: + ProgressView() + .controlSize(.small) + case .error(let msg): + Label(msg, systemImage: "exclamationmark.triangle.fill") + .font(.caption) + .foregroundStyle(.orange) + .lineLimit(1) + case .disconnected: + Label("Disconnected", systemImage: "circle") + .font(.caption) + .foregroundStyle(.secondary) + } + } + + @ViewBuilder + private var testConnectionRow: some View { + Button { + testConnection() + } label: { + HStack { + Label("Test Connection", systemImage: "antenna.radiowaves.left.and.right") + Spacer() + switch testResult { + case .testing: + ProgressView() + .controlSize(.small) + case .success: + Image(systemName: "checkmark.circle.fill") + .foregroundStyle(.green) + case .failure(let message): + Label(message, systemImage: "xmark.circle.fill") + .font(.caption) + .foregroundStyle(.red) + .lineLimit(1) + case nil: + EmptyView() + } + } + } + .disabled(testResult == .testing) + } + + // MARK: - Actions + + private func handleQRScan(_ connection: ServerConnection) { + showQRScanner = false + appState.addConnection(connection) + Task { await appState.connect(to: connection) } + } + + private func testConnection() { + testResult = .testing + Task { @MainActor in + do { + _ = try await appState.client.fetchStatus() + testResult = .success + } catch { + testResult = .failure(error.localizedDescription) + } + try? await Task.sleep(for: .seconds(3)) + if !Task.isCancelled { + testResult = nil + } + } + } + + private var appVersion: String { + Bundle.main.infoDictionary?["CFBundleShortVersionString"] as? String ?? "1.0.0" + } +} diff --git a/ios/PPGMobile/PPGMobile/Views/Spawn/SpawnView.swift b/ios/PPGMobile/PPGMobile/Views/Spawn/SpawnView.swift new file mode 100644 index 0000000..a8f3f27 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Views/Spawn/SpawnView.swift @@ -0,0 +1,185 @@ +import SwiftUI + +struct SpawnView: View { + @Environment(AppState.self) private var appState + + // Form fields + @State private var name = "" + @State private var prompt = "" + @State private var selectedVariant: AgentVariant = .claude + @State private var count = 1 + @State private var baseBranch = "" + + // UI state + @State private var isSpawning = false + @State private var errorMessage: String? + @State private var spawnedWorktreeId: String? + + private static let namePattern = /^[a-zA-Z0-9][a-zA-Z0-9\-]*$/ + + private var sanitizedName: String { + name.trimmingCharacters(in: .whitespaces) + } + + private var isFormValid: Bool { + let hasName = !sanitizedName.isEmpty && sanitizedName.wholeMatch(of: Self.namePattern) != nil + let hasPrompt = !prompt.trimmingCharacters(in: .whitespaces).isEmpty + return hasName && hasPrompt + } + + private var spawnableVariants: [AgentVariant] { + [.claude, .codex, .opencode] + } + + private var availableBranches: [String] { + var branches = Set() + branches.insert("main") + if let manifest = appState.manifestStore.manifest { + for wt in manifest.worktrees.values { + branches.insert(wt.baseBranch) + } + } + return branches.sorted() + } + + var body: some View { + NavigationStack { + Form { + nameSection + agentSection + promptSection + baseBranchSection + errorSection + } + .scrollDismissesKeyboard(.interactively) + .disabled(isSpawning) + .navigationTitle("Spawn") + .toolbar { + ToolbarItem(placement: .topBarTrailing) { + spawnButton + } + } + .navigationDestination(for: String.self) { worktreeId in + WorktreeDetailView(worktreeId: worktreeId) + } + } + } + + // MARK: - Sections + + private var nameSection: some View { + Section { + TextField("Worktree name", text: $name) + .textInputAutocapitalization(.never) + .autocorrectionDisabled() + } header: { + Text("Name") + } footer: { + if !sanitizedName.isEmpty && sanitizedName.wholeMatch(of: Self.namePattern) == nil { + Text("Only letters, numbers, and hyphens allowed") + .foregroundStyle(.red) + } else { + Text("Required. Letters, numbers, and hyphens (ppg/)") + } + } + } + + private var agentSection: some View { + Section("Agent") { + Picker("Type", selection: $selectedVariant) { + ForEach(spawnableVariants, id: \.self) { variant in + Label(variant.displayName, systemImage: variant.icon) + .tag(variant) + } + } + + Stepper("Count: \(count)", value: $count, in: 1...10) + } + } + + private var promptSection: some View { + Section { + TextEditor(text: $prompt) + .frame(minHeight: 120) + .font(.body) + } header: { + Text("Prompt") + } footer: { + Text("Required — describe the task for the agent") + } + } + + private var baseBranchSection: some View { + Section { + Picker("Base branch", selection: $baseBranch) { + Text("Default (current)").tag("") + ForEach(availableBranches, id: \.self) { branch in + Text(branch).tag(branch) + } + } + } footer: { + Text("Branch to create the worktree from") + } + } + + @ViewBuilder + private var errorSection: some View { + if let errorMessage { + Section { + Label(errorMessage, systemImage: "exclamationmark.triangle") + .foregroundStyle(.red) + } + } + } + + private var spawnButton: some View { + Button { + Task { await spawnWorktree() } + } label: { + if isSpawning { + ProgressView() + } else { + Text("Spawn") + .bold() + } + } + .disabled(!isFormValid || isSpawning) + } + + // MARK: - Actions + + @MainActor + private func spawnWorktree() async { + isSpawning = true + errorMessage = nil + + let trimmedPrompt = prompt.trimmingCharacters(in: .whitespaces) + + do { + let response = try await appState.client.spawn( + name: sanitizedName, + agent: selectedVariant.rawValue, + prompt: trimmedPrompt, + base: baseBranch.isEmpty ? nil : baseBranch, + count: count + ) + + await appState.manifestStore.refresh() + clearForm() + spawnedWorktreeId = response.worktreeId + } catch { + errorMessage = error.localizedDescription + } + + isSpawning = false + } + + private func clearForm() { + name = "" + prompt = "" + selectedVariant = .claude + count = 1 + baseBranch = "" + errorMessage = nil + } +} diff --git a/ios/PPGMobile/PPGMobile/Views/Terminal/TerminalInputBar.swift b/ios/PPGMobile/PPGMobile/Views/Terminal/TerminalInputBar.swift new file mode 100644 index 0000000..87cabee --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Views/Terminal/TerminalInputBar.swift @@ -0,0 +1,27 @@ +import SwiftUI + +/// Bottom input bar for sending text to a terminal pane via WebSocket. +struct TerminalInputBar: View { + @Binding var text: String + let onSend: () -> Void + + var body: some View { + HStack(spacing: 8) { + TextField("Send to terminal...", text: $text) + .font(.system(.body, design: .monospaced)) + .textInputAutocapitalization(.never) + .autocorrectionDisabled() + .textFieldStyle(.roundedBorder) + .onSubmit(onSend) + + Button(action: onSend) { + Image(systemName: "arrow.up.circle.fill") + .font(.title2) + } + .disabled(text.isEmpty) + } + .padding(.horizontal, 12) + .padding(.vertical, 8) + .background(.bar) + } +} diff --git a/ios/PPGMobile/PPGMobile/Views/Terminal/TerminalView.swift b/ios/PPGMobile/PPGMobile/Views/Terminal/TerminalView.swift new file mode 100644 index 0000000..ff7e827 --- /dev/null +++ b/ios/PPGMobile/PPGMobile/Views/Terminal/TerminalView.swift @@ -0,0 +1,236 @@ +import Foundation +import SwiftUI + +/// Terminal output view that subscribes to WebSocket terminal streaming. +/// Displays raw text output from tmux capture-pane with ANSI stripped server-side. +struct TerminalView: View { + let agentId: String + let agentName: String + + @Environment(AppState.self) private var appState + @State private var viewModel = TerminalViewModel() + @State private var inputText = "" + @State private var showKillConfirm = false + + var body: some View { + VStack(spacing: 0) { + terminalContent + + TerminalInputBar(text: $inputText) { + guard !inputText.isEmpty, let ws = appState.wsManager else { return } + ws.sendTerminalInput(agentId: agentId, text: inputText) + inputText = "" + } + } + .navigationTitle(agentName) + .navigationBarTitleDisplayMode(.inline) + .toolbar { + ToolbarItem(placement: .topBarTrailing) { + Button("Kill", systemImage: "xmark.circle") { + showKillConfirm = true + } + .tint(.red) + .disabled(agentIsTerminal) + } + } + .confirmationDialog("Kill Agent", isPresented: $showKillConfirm) { + Button("Kill Agent", role: .destructive) { + Task { await appState.killAgent(agentId) } + } + Button("Cancel", role: .cancel) {} + } + .task { await viewModel.subscribe(agentId: agentId, appState: appState) } + .onDisappear { + if let ws = appState.wsManager { + viewModel.unsubscribe(agentId: agentId, wsManager: ws) + } + } + } + + @ViewBuilder + private var terminalContent: some View { + ScrollViewReader { proxy in + ScrollView { + VStack(spacing: 0) { + if viewModel.output.isEmpty { + Text(statusMessage) + .font(.system(.footnote, design: .monospaced)) + .foregroundStyle(.secondary) + .frame(maxWidth: .infinity, alignment: .leading) + .padding(8) + } else { + Text(viewModel.output) + .font(.system(.footnote, design: .monospaced)) + .frame(maxWidth: .infinity, alignment: .leading) + .padding(8) + .textSelection(.enabled) + } + Color.clear + .frame(height: 1) + .id("terminal-bottom") + } + } + .defaultScrollAnchor(.bottom) + .background(Color.black) + .foregroundStyle(.green) + .onChange(of: viewModel.output) { _, _ in + withAnimation { + proxy.scrollTo("terminal-bottom", anchor: .bottom) + } + } + } + } + + private var statusMessage: String { + if appState.activeConnection == nil { + return "Not connected to server" + } + if viewModel.isSubscribed { + return "Waiting for output..." + } + return "Loading terminal output..." + } + + private var agentIsTerminal: Bool { + guard let manifest = appState.manifest else { return true } + for worktree in manifest.worktrees.values { + if let agent = worktree.agents[agentId] { + return agent.status.isTerminal + } + } + return true + } +} + +// MARK: - View Model + +@Observable +@MainActor +final class TerminalViewModel { + var output = "" + var hasError = false + private(set) var isSubscribed = false + + private static let maxOutputLength = 50_000 + private var subscriptionID: UUID? + + func subscribe(agentId: String, appState: AppState) async { + guard !isSubscribed else { return } + isSubscribed = true + + // Fetch initial log content via REST + do { + let logs = try await appState.client.fetchAgentLogs(agentId: agentId, lines: 200) + output = logs.output + trimOutput() + } catch { + output = "Failed to load logs: \(error.localizedDescription)" + hasError = true + } + + // Subscribe to live WebSocket updates + guard let ws = appState.wsManager else { return } + subscriptionID = TerminalMessageRouter.shared.addSubscriber(wsManager: ws) { [weak self] message in + guard message.type == "terminal:output", message.agentId == agentId, let data = message.data else { + return + } + Task { @MainActor [weak self] in + guard let self else { return } + self.output += data + self.trimOutput() + } + } + ws.subscribeTerminal(agentId: agentId) + } + + func unsubscribe(agentId: String, wsManager: WebSocketManager) { + guard isSubscribed else { return } + isSubscribed = false + wsManager.unsubscribeTerminal(agentId: agentId) + if let subscriptionID { + TerminalMessageRouter.shared.removeSubscriber(wsManager: wsManager, subscriberID: subscriptionID) + self.subscriptionID = nil + } + } + + private func trimOutput() { + guard output.count > Self.maxOutputLength else { return } + let startIndex = output.index(output.endIndex, offsetBy: -Self.maxOutputLength) + if let newlineIndex = output[startIndex...].firstIndex(of: "\n") { + output = String(output[output.index(after: newlineIndex)...]) + } else { + output = String(output[startIndex...]) + } + } +} + +// MARK: - Terminal Message Router + +/// Multiplexes WebSocket messages so multiple terminal views can subscribe safely. +private final class TerminalMessageRouter { + static let shared = TerminalMessageRouter() + + private struct State { + var previousOnMessage: ((ServerMessage) -> Void)? + var subscribers: [UUID: (ServerMessage) -> Void] + } + + private let lock = NSLock() + private var states: [ObjectIdentifier: State] = [:] + + private init() {} + + func addSubscriber( + wsManager: WebSocketManager, + subscriber: @escaping (ServerMessage) -> Void + ) -> UUID { + let managerID = ObjectIdentifier(wsManager) + let subscriberID = UUID() + + lock.lock() + if states[managerID] == nil { + let previousOnMessage = wsManager.onMessage + states[managerID] = State(previousOnMessage: previousOnMessage, subscribers: [:]) + wsManager.onMessage = { [weak self] message in + self?.dispatch(message: message, managerID: managerID) + } + } + states[managerID]?.subscribers[subscriberID] = subscriber + lock.unlock() + + return subscriberID + } + + func removeSubscriber(wsManager: WebSocketManager, subscriberID: UUID) { + let managerID = ObjectIdentifier(wsManager) + + lock.lock() + guard var state = states[managerID] else { + lock.unlock() + return + } + + state.subscribers.removeValue(forKey: subscriberID) + if state.subscribers.isEmpty { + states.removeValue(forKey: managerID) + lock.unlock() + wsManager.onMessage = state.previousOnMessage + return + } + + states[managerID] = state + lock.unlock() + } + + private func dispatch(message: ServerMessage, managerID: ObjectIdentifier) { + lock.lock() + let state = states[managerID] + let subscribers = state?.subscribers.values.map { $0 } ?? [] + lock.unlock() + + state?.previousOnMessage?(message) + for subscriber in subscribers { + subscriber(message) + } + } +} diff --git a/ios/PPGMobile/PPGMobileTests/AgentVariantTests.swift b/ios/PPGMobile/PPGMobileTests/AgentVariantTests.swift new file mode 100644 index 0000000..616a3e5 --- /dev/null +++ b/ios/PPGMobile/PPGMobileTests/AgentVariantTests.swift @@ -0,0 +1,30 @@ +import Testing +import SwiftUI +@testable import PPGMobile + +@Suite("AgentVariant") +struct AgentVariantTests { + @Test("resolves known agent types case-insensitively") + func resolvesKnownTypes() { + #expect(AgentVariant.from("claude") == .claude) + #expect(AgentVariant.from("codex") == .codex) + #expect(AgentVariant.from("opencode") == .opencode) + #expect(AgentVariant.from("Claude") == .claude) + #expect(AgentVariant.from("CODEX") == .codex) + } + + @Test("returns nil for unknown agent types") + func returnsNilForUnknown() { + #expect(AgentVariant.from("gpt4") == nil) + #expect(AgentVariant.from("") == nil) + #expect(AgentVariant.from("custom-agent") == nil) + } + + @Test("every variant has a non-empty displayName and sfSymbol") + func displayProperties() { + for variant in AgentVariant.allCases { + #expect(!variant.displayName.isEmpty) + #expect(!variant.sfSymbol.isEmpty) + } + } +} diff --git a/ios/PPGMobile/PPGMobileTests/ManifestTests.swift b/ios/PPGMobile/PPGMobileTests/ManifestTests.swift new file mode 100644 index 0000000..a56dcb0 --- /dev/null +++ b/ios/PPGMobile/PPGMobileTests/ManifestTests.swift @@ -0,0 +1,208 @@ +import Testing +import Foundation +@testable import PPGMobile + +@Suite("AgentStatus") +struct AgentStatusTests { + @Test("decodes canonical lifecycle values") + func decodesCanonicalValues() throws { + let cases = ["spawning", "running", "completed", "failed", "killed", "lost"] + for value in cases { + let json = Data("\"\(value)\"".utf8) + let status = try JSONDecoder().decode(AgentStatus.self, from: json) + #expect(status.rawValue == value) + } + } + + @Test("decodes TypeScript alias 'idle' as .running") + func decodesIdleAlias() throws { + let json = Data("\"idle\"".utf8) + let status = try JSONDecoder().decode(AgentStatus.self, from: json) + #expect(status == .running) + } + + @Test("decodes TypeScript alias 'exited' as .completed") + func decodesExitedAlias() throws { + let json = Data("\"exited\"".utf8) + let status = try JSONDecoder().decode(AgentStatus.self, from: json) + #expect(status == .completed) + } + + @Test("decodes TypeScript alias 'gone' as .lost") + func decodesGoneAlias() throws { + let json = Data("\"gone\"".utf8) + let status = try JSONDecoder().decode(AgentStatus.self, from: json) + #expect(status == .lost) + } + + @Test("rejects unknown status values") + func rejectsUnknown() { + let json = Data("\"banana\"".utf8) + #expect(throws: DecodingError.self) { + try JSONDecoder().decode(AgentStatus.self, from: json) + } + } + + @Test("encodes using lifecycle rawValue, not alias") + func encodesToCanonicalValue() throws { + let json = Data("\"idle\"".utf8) + let status = try JSONDecoder().decode(AgentStatus.self, from: json) + let encoded = try JSONEncoder().encode(status) + let raw = String(data: encoded, encoding: .utf8) + #expect(raw == "\"running\"") + } + + @Test("every case has a non-empty label, color, and sfSymbol") + func displayProperties() { + for status in AgentStatus.allCases { + #expect(!status.label.isEmpty) + #expect(!status.sfSymbol.isEmpty) + } + } +} + +@Suite("WorktreeStatus") +struct WorktreeStatusTests { + @Test("decodes all worktree status values") + func decodesAllValues() throws { + let cases = ["active", "merging", "merged", "failed", "cleaned"] + for value in cases { + let json = Data("\"\(value)\"".utf8) + let status = try JSONDecoder().decode(WorktreeStatus.self, from: json) + #expect(status.rawValue == value) + } + } + + @Test("every case has a non-empty label and sfSymbol") + func displayProperties() { + for status in WorktreeStatus.allCases { + #expect(!status.label.isEmpty) + #expect(!status.sfSymbol.isEmpty) + } + } +} + +@Suite("Manifest decoding") +struct ManifestDecodingTests { + static let sampleJSON = """ + { + "version": 1, + "projectRoot": "/Users/test/project", + "sessionName": "ppg", + "worktrees": { + "wt-abc123": { + "id": "wt-abc123", + "name": "feature-auth", + "path": "/Users/test/project/.worktrees/wt-abc123", + "branch": "ppg/feature-auth", + "baseBranch": "main", + "status": "active", + "tmuxWindow": "ppg:1", + "agents": { + "ag-test1234": { + "id": "ag-test1234", + "name": "claude", + "agentType": "claude", + "status": "running", + "tmuxTarget": "ppg:1.0", + "prompt": "Implement auth", + "startedAt": "2025-01-15T10:30:00.000Z" + } + }, + "createdAt": "2025-01-15T10:30:00.000Z" + } + }, + "createdAt": "2025-01-15T10:00:00.000Z", + "updatedAt": "2025-01-15T10:30:00.000Z" + } + """ + + @Test("decodes a full manifest from server JSON") + func decodesFullManifest() throws { + let data = Data(Self.sampleJSON.utf8) + let manifest = try JSONDecoder().decode(Manifest.self, from: data) + + #expect(manifest.version == 1) + #expect(manifest.sessionName == "ppg") + #expect(manifest.worktrees.count == 1) + + let worktree = manifest.worktrees["wt-abc123"] + #expect(worktree?.name == "feature-auth") + #expect(worktree?.status == .active) + #expect(worktree?.agents.count == 1) + + let agent = worktree?.agents["ag-test1234"] + #expect(agent?.agentType == "claude") + #expect(agent?.status == .running) + } + + @Test("decodes manifest with TypeScript status aliases") + func decodesWithAliases() throws { + let json = """ + { + "version": 1, + "projectRoot": "/test", + "sessionName": "ppg", + "worktrees": { + "wt-xyz789": { + "id": "wt-xyz789", + "name": "review", + "path": "/test/.worktrees/wt-xyz789", + "branch": "ppg/review", + "baseBranch": "main", + "status": "active", + "tmuxWindow": "ppg:2", + "agents": { + "ag-alias001": { + "id": "ag-alias001", + "name": "codex", + "agentType": "codex", + "status": "idle", + "tmuxTarget": "ppg:2.0", + "prompt": "Review code", + "startedAt": "2025-01-15T11:00:00.000Z" + }, + "ag-alias002": { + "id": "ag-alias002", + "name": "claude", + "agentType": "claude", + "status": "exited", + "tmuxTarget": "ppg:2.1", + "prompt": "Fix bug", + "startedAt": "2025-01-15T11:00:00.000Z", + "exitCode": 0 + }, + "ag-alias003": { + "id": "ag-alias003", + "name": "opencode", + "agentType": "opencode", + "status": "gone", + "tmuxTarget": "ppg:2.2", + "prompt": "Test", + "startedAt": "2025-01-15T11:00:00.000Z" + } + }, + "createdAt": "2025-01-15T11:00:00.000Z" + } + }, + "createdAt": "2025-01-15T10:00:00.000Z", + "updatedAt": "2025-01-15T11:00:00.000Z" + } + """ + let data = Data(json.utf8) + let manifest = try JSONDecoder().decode(Manifest.self, from: data) + let agents = manifest.worktrees["wt-xyz789"]!.agents + + #expect(agents["ag-alias001"]?.status == .running) // idle → running + #expect(agents["ag-alias002"]?.status == .completed) // exited → completed + #expect(agents["ag-alias003"]?.status == .lost) // gone → lost + } + + @Test("allAgents flattens agents across worktrees") + func allAgentsFlattens() throws { + let data = Data(Self.sampleJSON.utf8) + let manifest = try JSONDecoder().decode(Manifest.self, from: data) + #expect(manifest.allAgents.count == 1) + #expect(manifest.allAgents.first?.id == "ag-test1234") + } +} diff --git a/ios/PPGMobile/PPGMobileTests/ServerConnectionTests.swift b/ios/PPGMobile/PPGMobileTests/ServerConnectionTests.swift new file mode 100644 index 0000000..beaabd3 --- /dev/null +++ b/ios/PPGMobile/PPGMobileTests/ServerConnectionTests.swift @@ -0,0 +1,139 @@ +import Testing +import Foundation +@testable import PPGMobile + +@Suite("ServerConnection") +struct ServerConnectionTests { + + static func make( + host: String = "192.168.1.5", + port: Int = 7700, + ca: String? = nil, + token: String = "abc123" + ) -> ServerConnection { + ServerConnection(id: UUID(), host: host, port: port, caCertificate: ca, token: token) + } + + // MARK: - URL Builders + + @Test("baseURL uses http when no CA certificate") + func baseURLWithoutCA() { + let conn = Self.make() + #expect(conn.baseURL?.absoluteString == "http://192.168.1.5:7700") + } + + @Test("baseURL uses https when CA certificate is present") + func baseURLWithCA() { + let conn = Self.make(ca: "FAKECERT") + #expect(conn.baseURL?.absoluteString == "https://192.168.1.5:7700") + } + + @Test("restURL appends path to base URL") + func restURLAppendsPath() { + let conn = Self.make() + let url = conn.restURL(for: "/api/status") + #expect(url?.absoluteString == "http://192.168.1.5:7700/api/status") + } + + @Test("webSocketURL uses ws scheme without CA") + func webSocketWithoutCA() { + let conn = Self.make() + let url = conn.webSocketURL + #expect(url?.scheme == "ws") + #expect(url?.host == "192.168.1.5") + #expect(url?.port == 7700) + #expect(url?.path == "/ws") + #expect(url?.absoluteString.contains("token=abc123") == true) + } + + @Test("webSocketURL uses wss scheme with CA") + func webSocketWithCA() { + let conn = Self.make(ca: "FAKECERT") + #expect(conn.webSocketURL?.scheme == "wss") + } + + // MARK: - QR Code Round-trip + + @Test("qrCodeString produces parseable ppg:// URL") + func qrCodeStringFormat() { + let conn = Self.make() + let qr = conn.qrCodeString + #expect(qr.hasPrefix("ppg://connect?")) + #expect(qr.contains("host=192.168.1.5")) + #expect(qr.contains("port=7700")) + #expect(qr.contains("token=abc123")) + } + + @Test("fromQRCode round-trips with qrCodeString") + func qrRoundTrip() { + let original = Self.make() + let qr = original.qrCodeString + let parsed = ServerConnection.fromQRCode(qr) + + #expect(parsed?.host == original.host) + #expect(parsed?.port == original.port) + #expect(parsed?.token == original.token) + #expect(parsed?.caCertificate == original.caCertificate) + } + + @Test("fromQRCode round-trips with CA certificate") + func qrRoundTripWithCA() { + let original = Self.make(ca: "BASE64CERTDATA+/=") + let qr = original.qrCodeString + let parsed = ServerConnection.fromQRCode(qr) + + #expect(parsed?.host == original.host) + #expect(parsed?.caCertificate == original.caCertificate) + } + + @Test("fromQRCode round-trips with special characters in token") + func qrRoundTripSpecialChars() { + let original = Self.make(token: "tok+en/with=special&chars") + let qr = original.qrCodeString + let parsed = ServerConnection.fromQRCode(qr) + + #expect(parsed?.token == original.token) + } + + // MARK: - QR Parsing Edge Cases + + @Test("fromQRCode returns nil for non-ppg scheme") + func rejectsWrongScheme() { + #expect(ServerConnection.fromQRCode("https://connect?host=x&port=1&token=t") == nil) + } + + @Test("fromQRCode returns nil for wrong host") + func rejectsWrongHost() { + #expect(ServerConnection.fromQRCode("ppg://wrong?host=x&port=1&token=t") == nil) + } + + @Test("fromQRCode returns nil when required fields are missing") + func rejectsMissingFields() { + #expect(ServerConnection.fromQRCode("ppg://connect?host=x&port=1") == nil) // no token + #expect(ServerConnection.fromQRCode("ppg://connect?host=x&token=t") == nil) // no port + #expect(ServerConnection.fromQRCode("ppg://connect?port=1&token=t") == nil) // no host + } + + @Test("fromQRCode returns nil for non-numeric port") + func rejectsNonNumericPort() { + #expect(ServerConnection.fromQRCode("ppg://connect?host=x&port=abc&token=t") == nil) + } + + @Test("fromQRCode returns nil for empty string") + func rejectsEmptyString() { + #expect(ServerConnection.fromQRCode("") == nil) + } + + @Test("fromQRCode returns nil for garbage input") + func rejectsGarbage() { + #expect(ServerConnection.fromQRCode("not a url at all") == nil) + } + + // MARK: - Auth Header + + @Test("authorizationHeader has Bearer prefix") + func authHeader() { + let conn = Self.make(token: "my-secret-token") + #expect(conn.authorizationHeader == "Bearer my-secret-token") + } +} diff --git a/ios/PPGMobile/project.yml b/ios/PPGMobile/project.yml new file mode 100644 index 0000000..79a3013 --- /dev/null +++ b/ios/PPGMobile/project.yml @@ -0,0 +1,38 @@ +name: PPGMobile +options: + bundleIdPrefix: com.2witstudios + deploymentTarget: + iOS: "17.0" + xcodeVersion: "16.0" + generateEmptyDirectories: true + +settings: + base: + SWIFT_VERSION: "5.9" + +targets: + PPGMobile: + type: application + platform: iOS + sources: + - path: PPGMobile + excludes: + - "**/*Tests.swift" + settings: + base: + PRODUCT_BUNDLE_IDENTIFIER: com.2witstudios.ppg-mobile + INFOPLIST_GENERATION_MODE: GeneratedFile + MARKETING_VERSION: "1.0.0" + CURRENT_PROJECT_VERSION: "1" + GENERATE_INFOPLIST_FILE: true + INFOPLIST_KEY_UIApplicationSceneManifest_Generation: true + INFOPLIST_KEY_UILaunchScreen_Generation: true + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad: "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight" + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone: "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight" + SWIFT_EMIT_LOC_STRINGS: true + +schemes: + PPGMobile: + build: + targets: + PPGMobile: all diff --git a/linux/Cargo.lock b/linux/Cargo.lock new file mode 100644 index 0000000..067d581 --- /dev/null +++ b/linux/Cargo.lock @@ -0,0 +1,2893 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "aho-corasick" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" +dependencies = [ + "memchr", +] + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.6.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" + +[[package]] +name = "anstyle-parse" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" +dependencies = [ + "anstyle", + "once_cell_polyfill", + "windows-sys 0.61.2", +] + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "async-channel" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bumpalo" +version = "3.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cairo-rs" +version = "0.20.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e3bd0f4e25afa9cabc157908d14eeef9067d6448c49414d17b3fb55f0eadd0" +dependencies = [ + "bitflags", + "cairo-sys-rs", + "glib", + "libc", +] + +[[package]] +name = "cairo-sys-rs" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "059cc746549898cbfd9a47754288e5a958756650ef4652bbb6c5f71a6bda4f8b" +dependencies = [ + "glib-sys", + "libc", + "system-deps", +] + +[[package]] +name = "cc" +version = "1.2.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aebf35691d1bfb0ac386a69bac2fde4dd276fb618cf8bf4f5318fe285e821bb2" +dependencies = [ + "find-msvc-tools", + "shlex", +] + +[[package]] +name = "cfg-expr" +version = "0.20.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78cef5b5a1a6827c7322ae2a636368a573006b27cfa76c7ebd53e834daeaab6a" +dependencies = [ + "smallvec", + "target-lexicon", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "chrono" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-link", +] + +[[package]] +name = "colorchoice" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crypto-common" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "data-encoding" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "env_filter" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a1c3cc8e57274ec99de65301228b537f1e4eedc1b8e0f9411c6caac8ae7308f" +dependencies = [ + "log", + "regex", +] + +[[package]] +name = "env_logger" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2daee4ea451f429a58296525ddf28b45a3b64f1acf6587e2067437bb11e218d" +dependencies = [ + "anstream", + "anstyle", + "env_filter", + "jiff", + "log", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "event-listener" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" +dependencies = [ + "event-listener", + "pin-project-lite", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "field-offset" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38e2275cc4e4fc009b0669731a1e5ab7ebf11f469eaede2bab9309a5b4d6057f" +dependencies = [ + "memoffset", + "rustc_version", +] + +[[package]] +name = "find-msvc-tools" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-core", + "futures-macro", + "futures-sink", + "futures-task", + "pin-project-lite", + "slab", +] + +[[package]] +name = "gdk-pixbuf" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fd242894c084f4beed508a56952750bce3e96e85eb68fdc153637daa163e10c" +dependencies = [ + "gdk-pixbuf-sys", + "gio", + "glib", + "libc", +] + +[[package]] +name = "gdk-pixbuf-sys" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b34f3b580c988bd217e9543a2de59823fafae369d1a055555e5f95a8b130b96" +dependencies = [ + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "gdk4" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4850c9d9c1aecd1a3eb14fadc1cdb0ac0a2298037e116264c7473e1740a32d60" +dependencies = [ + "cairo-rs", + "gdk-pixbuf", + "gdk4-sys", + "gio", + "glib", + "libc", + "pango", +] + +[[package]] +name = "gdk4-sys" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f6eb95798e2b46f279cf59005daf297d5b69555428f185650d71974a910473a" +dependencies = [ + "cairo-sys-rs", + "gdk-pixbuf-sys", + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "pango-sys", + "pkg-config", + "system-deps", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "getrandom" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", + "wasip3", +] + +[[package]] +name = "gio" +version = "0.20.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e27e276e7b6b8d50f6376ee7769a71133e80d093bdc363bd0af71664228b831" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "gio-sys", + "glib", + "libc", + "pin-project-lite", + "smallvec", +] + +[[package]] +name = "gio-sys" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "521e93a7e56fc89e84aea9a52cfc9436816a4b363b030260b699950ff1336c83" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", + "windows-sys 0.59.0", +] + +[[package]] +name = "glib" +version = "0.20.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffc4b6e352d4716d84d7dde562dd9aee2a7d48beb872dd9ece7f2d1515b2d683" +dependencies = [ + "bitflags", + "futures-channel", + "futures-core", + "futures-executor", + "futures-task", + "futures-util", + "gio-sys", + "glib-macros", + "glib-sys", + "gobject-sys", + "libc", + "memchr", + "smallvec", +] + +[[package]] +name = "glib-macros" +version = "0.20.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8084af62f09475a3f529b1629c10c429d7600ee1398ae12dd3bf175d74e7145" +dependencies = [ + "heck", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "glib-sys" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ab79e1ed126803a8fb827e3de0e2ff95191912b8db65cee467edb56fc4cc215" +dependencies = [ + "libc", + "system-deps", +] + +[[package]] +name = "gobject-sys" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec9aca94bb73989e3cfdbf8f2e0f1f6da04db4d291c431f444838925c4c63eda" +dependencies = [ + "glib-sys", + "libc", + "system-deps", +] + +[[package]] +name = "graphene-rs" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b86dfad7d14251c9acaf1de63bc8754b7e3b4e5b16777b6f5a748208fe9519b" +dependencies = [ + "glib", + "graphene-sys", + "libc", +] + +[[package]] +name = "graphene-sys" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df583a85ba2d5e15e1797e40d666057b28bc2f60a67c9c24145e6db2cc3861ea" +dependencies = [ + "glib-sys", + "libc", + "pkg-config", + "system-deps", +] + +[[package]] +name = "gsk4" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61f5e72f931c8c9f65fbfc89fe0ddc7746f147f822f127a53a9854666ac1f855" +dependencies = [ + "cairo-rs", + "gdk4", + "glib", + "graphene-rs", + "gsk4-sys", + "libc", + "pango", +] + +[[package]] +name = "gsk4-sys" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "755059de55fa6f85a46bde8caf03e2184c96bfda1f6206163c72fb0ea12436dc" +dependencies = [ + "cairo-sys-rs", + "gdk4-sys", + "glib-sys", + "gobject-sys", + "graphene-sys", + "libc", + "pango-sys", + "system-deps", +] + +[[package]] +name = "gtk4" +version = "0.9.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f274dd0102c21c47bbfa8ebcb92d0464fab794a22fad6c3f3d5f165139a326d6" +dependencies = [ + "cairo-rs", + "field-offset", + "futures-channel", + "gdk-pixbuf", + "gdk4", + "gio", + "glib", + "graphene-rs", + "gsk4", + "gtk4-macros", + "gtk4-sys", + "libc", + "pango", +] + +[[package]] +name = "gtk4-macros" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ed1786c4703dd196baf7e103525ce0cf579b3a63a0570fe653b7ee6bac33999" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "gtk4-sys" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41e03b01e54d77c310e1d98647d73f996d04b2f29b9121fe493ea525a7ec03d6" +dependencies = [ + "cairo-sys-rs", + "gdk-pixbuf-sys", + "gdk4-sys", + "gio-sys", + "glib-sys", + "gobject-sys", + "graphene-sys", + "gsk4-sys", + "libc", + "pango-sys", + "system-deps", +] + +[[package]] +name = "h2" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "http" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" +dependencies = [ + "bytes", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "hyper" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "h2", + "http", + "http-body", + "httparse", + "itoa", + "pin-project-lite", + "pin-utils", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" +dependencies = [ + "base64", + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2", + "system-configuration", + "tokio", + "tower-service", + "tracing", + "windows-registry", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "iri-string" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "jiff" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3e3d65f018c6ae946ab16e80944b97096ed73c35b221d1c478a6c81d8f57940" +dependencies = [ + "jiff-static", + "log", + "portable-atomic", + "portable-atomic-util", + "serde_core", +] + +[[package]] +name = "jiff-static" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a17c2b211d863c7fde02cbea8a3c1a439b98e109286554f2860bdded7ff83818" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "js-sys" +version = "0.3.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49715b7073f385ba4bc528e5747d02e66cb39c6146efb66b781f131f0fb399c" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "libadwaita" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "500135d29c16aabf67baafd3e7741d48e8b8978ca98bac39e589165c8dc78191" +dependencies = [ + "gdk4", + "gio", + "glib", + "gtk4", + "libadwaita-sys", + "libc", + "pango", +] + +[[package]] +name = "libadwaita-sys" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6680988058c2558baf3f548a370e4e78da3bf7f08469daa822ac414842c912db" +dependencies = [ + "gdk4-sys", + "gio-sys", + "glib-sys", + "gobject-sys", + "gtk4-sys", + "libc", + "pango-sys", + "system-deps", +] + +[[package]] +name = "libc" +version = "0.2.182" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" + +[[package]] +name = "linux-raw-sys" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "lock_api" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "memoffset" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mio" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.61.2", +] + +[[package]] +name = "native-tls" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "465500e14ea162429d264d44189adc38b199b62b1c21eea9f69e4b73cb03bbf2" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + +[[package]] +name = "openssl" +version = "0.10.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "openssl-probe" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" + +[[package]] +name = "openssl-sys" +version = "0.9.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "pango" +version = "0.20.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6576b311f6df659397043a5fa8a021da8f72e34af180b44f7d57348de691ab5c" +dependencies = [ + "gio", + "glib", + "libc", + "pango-sys", +] + +[[package]] +name = "pango-sys" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "186909673fc09be354555c302c0b3dcf753cd9fa08dcb8077fa663c80fb243fa" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-link", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "portable-atomic" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" + +[[package]] +name = "portable-atomic-util" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a9db96d7fa8782dd8c15ce32ffe8680bbd1e978a43bf51a34d39483540495f5" +dependencies = [ + "portable-atomic", +] + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "ppg-desktop" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-channel", + "cairo-rs", + "chrono", + "env_logger", + "futures-util", + "gio", + "glib", + "gtk4", + "libadwaita", + "log", + "once_cell", + "pango", + "reqwest", + "serde", + "serde_json", + "tokio", + "tokio-tungstenite", + "toml 0.8.23", + "url", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "proc-macro-crate" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" +dependencies = [ + "toml_edit 0.23.10+spec-1.0.0", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.17", +] + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags", +] + +[[package]] +name = "regex" +version = "1.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" + +[[package]] +name = "reqwest" +version = "0.12.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-core", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-tls", + "hyper-util", + "js-sys", + "log", + "mime", + "native-tls", + "percent-encoding", + "pin-project-lite", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-native-tls", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.17", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls" +version = "0.23.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4" +dependencies = [ + "once_cell", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pki-types" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" +dependencies = [ + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" + +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "security-framework" +version = "3.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d" +dependencies = [ + "bitflags", + "core-foundation 0.10.1", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2691df843ecc5d231c0b14ece2acc3efb62c0a398c7e1d875f3983ce020e3" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_spanned" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776" +dependencies = [ + "serde_core", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "socket2" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "system-configuration" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a13f3d0daba03132c0aa9767f98351b3488edc2c100cda2d2ec2b04f3d8d3c8b" +dependencies = [ + "bitflags", + "core-foundation 0.9.4", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "system-deps" +version = "7.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c8f33736f986f16d69b6cb8b03f55ddcad5c41acc4ccc39dd88e84aa805e7f" +dependencies = [ + "cfg-expr", + "heck", + "pkg-config", + "toml 0.9.12+spec-1.1.0", + "version-compare", +] + +[[package]] +name = "target-lexicon" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df7f62577c25e07834649fc3b39fafdc597c0a3527dc1c60129201ccfcbaa50c" + +[[package]] +name = "tempfile" +version = "3.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82a72c767771b47409d2345987fda8628641887d5466101319899796367354a0" +dependencies = [ + "fastrand", + "getrandom 0.4.1", + "once_cell", + "rustix", + "windows-sys 0.61.2", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tokio" +version = "1.49.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" +dependencies = [ + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "tokio-macros" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edc5f74e248dc973e0dbb7b74c7e0d6fcc301c694ff50049504004ef4d0cdcd9" +dependencies = [ + "futures-util", + "log", + "native-tls", + "tokio", + "tokio-native-tls", + "tungstenite", +] + +[[package]] +name = "tokio-util" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", + "toml_edit 0.22.27", +] + +[[package]] +name = "toml" +version = "0.9.12+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf92845e79fc2e2def6a5d828f0801e29a2f8acc037becc5ab08595c7d5e9863" +dependencies = [ + "indexmap", + "serde_core", + "serde_spanned 1.0.4", + "toml_datetime 0.7.5+spec-1.1.0", + "toml_parser", + "toml_writer", + "winnow", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_datetime" +version = "0.7.5+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" +dependencies = [ + "serde_core", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap", + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", + "toml_write", + "winnow", +] + +[[package]] +name = "toml_edit" +version = "0.23.10+spec-1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269" +dependencies = [ + "indexmap", + "toml_datetime 0.7.5+spec-1.1.0", + "toml_parser", + "winnow", +] + +[[package]] +name = "toml_parser" +version = "1.0.9+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4" +dependencies = [ + "winnow", +] + +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + +[[package]] +name = "toml_writer" +version = "1.0.6+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" + +[[package]] +name = "tower" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" +dependencies = [ + "bitflags", + "bytes", + "futures-util", + "http", + "http-body", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "tungstenite" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18e5b8366ee7a95b16d32197d0b2604b43a0be89dc5fac9f8e96ccafbaedda8a" +dependencies = [ + "byteorder", + "bytes", + "data-encoding", + "http", + "httparse", + "log", + "native-tls", + "rand", + "sha1", + "thiserror", + "utf-8", +] + +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version-compare" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c2856837ef78f57382f06b2b8563a2f512f7185d732608fd9176cb3b8edf0e" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasip2" +version = "1.0.2+wasi-0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6532f9a5c1ece3798cb1c2cfdba640b9b3ba884f5db45973a6f442510a87d38e" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9c5522b3a28661442748e09d40924dfb9ca614b21c00d3fd135720e48b67db8" +dependencies = [ + "cfg-if", + "futures-util", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18a2d50fcf105fb33bb15f00e7a77b772945a2ee45dcf454961fd843e74c18e6" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03ce4caeaac547cdf713d280eda22a730824dd11e6b8c3ca9e42247b25c631e3" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75a326b8c223ee17883a4251907455a2431acc2791c98c26279376490c378c16" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + +[[package]] +name = "web-sys" +version = "0.3.91" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "854ba17bb104abfb26ba36da9729addc7ce7f06f5c0f90f3c391f8461cca21f9" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "windows-core" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.59.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-registry" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" +dependencies = [ + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + +[[package]] +name = "winnow" +version = "0.7.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" +dependencies = [ + "memchr", +] + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.8.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a789c6e490b576db9f7e6b6d661bcc9799f7c0ac8352f56ea20193b2681532e5" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f65c489a7071a749c849713807783f70672b28094011623e200cb86dcb835953" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/linux/Cargo.toml b/linux/Cargo.toml new file mode 100644 index 0000000..a2730e8 --- /dev/null +++ b/linux/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "ppg-desktop" +version = "0.1.0" +edition = "2021" +description = "PPG Desktop — Native Linux GUI for PPG agent orchestration" +license = "MIT" +rust-version = "1.78" + +[dependencies] +gtk4 = { version = "0.9", features = ["v4_12"] } +libadwaita = { version = "0.7", features = ["v1_5"] } +glib = "0.20" +gio = "0.20" +cairo-rs = { version = "0.20", features = ["v1_16"] } +pango = "0.20" +tokio = { version = "1", features = ["full"] } +reqwest = { version = "0.12", features = ["json"] } +tokio-tungstenite = { version = "0.24", features = ["native-tls"] } +futures-util = "0.3" +serde = { version = "1", features = ["derive"] } +serde_json = "1" +toml = "0.8" +url = "2" +chrono = { version = "0.4", features = ["serde"] } +once_cell = "1" +log = "0.4" +env_logger = "0.11" +anyhow = "1" +async-channel = "2" + +[profile.release] +opt-level = 3 +lto = true +strip = true diff --git a/linux/README.md b/linux/README.md new file mode 100644 index 0000000..958c6c1 --- /dev/null +++ b/linux/README.md @@ -0,0 +1,148 @@ +# PPG Desktop — Native Linux App + +Native Linux desktop application for PPG (Pure Point Guard), built with Rust, GTK4, and libadwaita. + +## Features + +- **Sidebar navigation**: Projects > Worktrees > Agents hierarchy with status badges +- **Terminal panes**: VTE terminals attached to tmux sessions (up to 2×3 grid) +- **Command palette**: Agent variant picker + prompt input (Ctrl+Shift+P) +- **Home dashboard**: Agent stats, git commit heatmap, recent commits +- **Settings**: Terminal font/size, appearance (dark/light/system), server connection +- **Real-time updates**: WebSocket integration with auto-reconnect +- **REST API client**: Connects to `ppg serve` HTTP endpoints + +## Prerequisites + +### System Dependencies + +```bash +# Ubuntu/Debian +sudo apt install \ + build-essential \ + libgtk-4-dev \ + libadwaita-1-dev \ + libvte-2.91-gtk4-dev \ + libcairo2-dev \ + libpango1.0-dev \ + pkg-config + +# Fedora +sudo dnf install \ + gtk4-devel \ + libadwaita-devel \ + vte291-gtk4-devel \ + cairo-devel \ + pango-devel + +# Arch +sudo pacman -S gtk4 libadwaita vte4 cairo pango +``` + +### Runtime Dependencies + +- **ppg**: `npm install -g ppg-cli` +- **tmux**: `sudo apt install tmux` (or equivalent) +- **Rust 1.78+**: [rustup.rs](https://rustup.rs) + +## Build + +```bash +cd linux/ +cargo build --release +``` + +The binary will be at `target/release/ppg-desktop`. + +## Run + +```bash +# Start the PPG server first +ppg serve start --port 3000 + +# Launch the desktop app +./target/release/ppg-desktop + +# Or with options +./target/release/ppg-desktop --url http://localhost:3000 --token mysecret +``` + +### CLI Options + +| Option | Description | Default | +|--------|-------------|---------| +| `--url`, `-u` | PPG server URL | `http://localhost:3000` | +| `--token`, `-t` | Bearer token | None | +| `--help`, `-h` | Show help | — | +| `--version`, `-V` | Show version | — | + +## Development + +```bash +# Run in debug mode +cargo run -- --url http://localhost:3000 + +# Check compilation +cargo check + +# Run tests +cargo test + +# Type checking +cargo clippy +``` + +## Architecture + +``` +src/ +├── main.rs # CLI arg parsing, GTK app launch +├── app.rs # PpgApplication (adw::Application), CSS loading +├── state.rs # AppState (Arc), Services bundle +├── models/ +│ ├── manifest.rs # Manifest, WorktreeEntry, AgentEntry (serde) +│ ├── agent_variant.rs # Claude, Codex, OpenCode, Terminal variants +│ └── settings.rs # AppSettings (TOML config) +├── api/ +│ ├── client.rs # PpgClient (reqwest HTTP wrapper) +│ └── websocket.rs # WsManager (tokio-tungstenite, glib dispatch) +├── ui/ +│ ├── window.rs # MainWindow (NavigationSplitView) +│ ├── sidebar.rs # SidebarView (ListBox tree) +│ ├── terminal_pane.rs # VTE terminal widget +│ ├── pane_grid.rs # Terminal grid layout (2×3) +│ ├── home_dashboard.rs # Stats + heatmap + commits +│ ├── command_palette.rs # Agent spawn dialog (Ctrl+Shift+P) +│ ├── worktree_detail.rs # Worktree info panel +│ ├── settings_dialog.rs # PreferencesWindow +│ └── setup_view.rs # Prerequisites check +└── util/ + └── shell.rs # Shell escape, tmux commands +``` + +## VTE Terminal Integration + +The terminal pane is designed to use VTE (Virtual Terminal Emulator), the same library used by GNOME Terminal. Each pane attaches to a tmux session to show live agent output. + +If the `vte4` crate is not available, the app shows a placeholder with instructions. To enable VTE: + +1. Install `libvte-2.91-gtk4-dev` +2. Add the `vte4` crate to `Cargo.toml` when bindings are available +3. Or use the C FFI approach documented in `terminal_pane.rs` + +## Settings Storage + +Settings are stored in `~/.config/ppg-desktop/settings.toml`: + +```toml +server_url = "http://localhost:3000" +font_family = "Monospace" +font_size = 12 +appearance = "system" +``` + +## Keyboard Shortcuts + +| Shortcut | Action | +|----------|--------| +| Ctrl+Shift+P | Open command palette | diff --git a/linux/src/api/client.rs b/linux/src/api/client.rs new file mode 100644 index 0000000..20c0372 --- /dev/null +++ b/linux/src/api/client.rs @@ -0,0 +1,247 @@ +use anyhow::{Context, Result}; +use reqwest::Client; +use serde::{Deserialize, Serialize}; + +use crate::models::manifest::Manifest; + +/// REST client for the ppg serve HTTP API. +#[derive(Clone)] +pub struct PpgClient { + client: Client, + base_url: String, + token: Option, +} + +// -- Request/Response types -- + +#[derive(Debug, Serialize)] +pub struct SpawnRequest { + pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub agent: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub prompt: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub count: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SpawnResponse { + pub worktree_id: String, + pub name: String, + pub branch: String, + pub agents: Vec, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SpawnedAgent { + pub id: String, + pub tmux_target: String, + pub session_id: Option, +} + +#[derive(Debug, Serialize)] +pub struct SendKeysRequest { + pub text: String, + pub mode: SendMode, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "kebab-case")] +pub enum SendMode { + Raw, + Literal, + WithEnter, +} + +#[derive(Debug, Serialize)] +pub struct RestartRequest { + #[serde(skip_serializing_if = "Option::is_none")] + pub prompt: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub agent: Option, +} + +#[derive(Debug, Serialize)] +pub struct MasterRequest { + #[serde(skip_serializing_if = "Option::is_none")] + pub name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub agent: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub prompt: Option, +} + +#[derive(Debug, Serialize)] +pub struct MergeRequest { + #[serde(skip_serializing_if = "Option::is_none")] + pub strategy: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub cleanup: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub force: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct LogsResponse { + pub agent_id: String, + pub lines: Vec, +} + +#[derive(Debug, Deserialize)] +pub struct HealthResponse { + pub status: String, +} + +impl PpgClient { + pub fn new(base_url: &str, token: Option) -> Self { + Self { + client: Client::new(), + base_url: base_url.trim_end_matches('/').to_string(), + token, + } + } + + pub fn update_connection(&mut self, base_url: &str, token: Option) { + self.base_url = base_url.trim_end_matches('/').to_string(); + self.token = token; + } + + fn url(&self, path: &str) -> String { + format!("{}{}", self.base_url, path) + } + + fn auth_header(&self) -> Option { + self.token.as_ref().map(|t| format!("Bearer {}", t)) + } + + async fn get(&self, path: &str) -> Result { + let mut req = self.client.get(self.url(path)); + if let Some(auth) = self.auth_header() { + req = req.header("Authorization", auth); + } + let resp = req.send().await.context("HTTP GET failed")?; + let status = resp.status(); + if !status.is_success() { + let body = resp.text().await.unwrap_or_default(); + anyhow::bail!("HTTP {} — {}", status, body); + } + resp.json().await.context("Failed to parse JSON response") + } + + async fn post( + &self, + path: &str, + body: &B, + ) -> Result { + let mut req = self.client.post(self.url(path)).json(body); + if let Some(auth) = self.auth_header() { + req = req.header("Authorization", auth); + } + let resp = req.send().await.context("HTTP POST failed")?; + let status = resp.status(); + if !status.is_success() { + let body = resp.text().await.unwrap_or_default(); + anyhow::bail!("HTTP {} — {}", status, body); + } + resp.json().await.context("Failed to parse JSON response") + } + + async fn post_no_body(&self, path: &str) -> Result { + let mut req = self.client.post(self.url(path)); + if let Some(auth) = self.auth_header() { + req = req.header("Authorization", auth); + } + let resp = req.send().await.context("HTTP POST failed")?; + let status = resp.status(); + if !status.is_success() { + let body = resp.text().await.unwrap_or_default(); + anyhow::bail!("HTTP {} — {}", status, body); + } + resp.json().await.context("Failed to parse JSON response") + } + + // -- Health -- + + pub async fn health(&self) -> Result { + self.get("/health").await + } + + pub async fn test_connection(&self) -> Result { + match self.health().await { + Ok(h) => Ok(h.status == "ok"), + Err(_) => Ok(false), + } + } + + // -- Status -- + + pub async fn status(&self) -> Result { + self.get("/api/status").await + } + + pub async fn worktrees(&self) -> Result { + self.get("/api/worktrees").await + } + + // -- Spawn -- + + pub async fn spawn(&self, req: &SpawnRequest) -> Result { + self.post("/api/spawn", req).await + } + + pub async fn spawn_master(&self, req: &MasterRequest) -> Result { + self.post("/api/agents/master", req).await + } + + // -- Agent operations -- + + pub async fn agent_logs(&self, agent_id: &str, lines: Option) -> Result { + let path = match lines { + Some(n) => format!("/api/agents/{}/logs?lines={}", agent_id, n), + None => format!("/api/agents/{}/logs", agent_id), + }; + self.get(&path).await + } + + pub async fn send_keys(&self, agent_id: &str, req: &SendKeysRequest) -> Result { + self.post(&format!("/api/agents/{}/send", agent_id), req).await + } + + pub async fn kill_agent(&self, agent_id: &str) -> Result { + self.post_no_body(&format!("/api/agents/{}/kill", agent_id)).await + } + + pub async fn restart_agent(&self, agent_id: &str, req: &RestartRequest) -> Result { + self.post(&format!("/api/agents/{}/restart", agent_id), req).await + } + + // -- Worktree operations -- + + pub async fn merge_worktree(&self, worktree_id: &str, req: &MergeRequest) -> Result { + self.post(&format!("/api/worktrees/{}/merge", worktree_id), req).await + } + + pub async fn kill_worktree(&self, worktree_id: &str) -> Result { + self.post_no_body(&format!("/api/worktrees/{}/kill", worktree_id)).await + } + + // -- Reset -- + + pub async fn reset(&self) -> Result { + self.post_no_body("/api/reset").await + } + + // -- Config -- + + pub async fn config(&self) -> Result { + self.get("/api/config").await + } + + pub async fn templates(&self) -> Result { + self.get("/api/templates").await + } +} diff --git a/linux/src/api/mod.rs b/linux/src/api/mod.rs new file mode 100644 index 0000000..de55293 --- /dev/null +++ b/linux/src/api/mod.rs @@ -0,0 +1,2 @@ +pub mod client; +pub mod websocket; diff --git a/linux/src/api/websocket.rs b/linux/src/api/websocket.rs new file mode 100644 index 0000000..70e646c --- /dev/null +++ b/linux/src/api/websocket.rs @@ -0,0 +1,226 @@ +use anyhow::Result; +use async_channel::Sender; +use futures_util::{SinkExt, StreamExt}; +use log::{error, info, warn}; +use serde::{Deserialize, Serialize}; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::Arc; +use tokio::sync::Mutex; +use tokio_tungstenite::{connect_async, tungstenite::Message}; + +use crate::models::manifest::{AgentStatus, Manifest, WorktreeStatus}; + +/// Events dispatched from the WebSocket to the GTK main thread. +#[derive(Debug, Clone)] +pub enum WsEvent { + Connected, + Disconnected, + ManifestUpdated(Manifest), + AgentStatusChanged { + worktree_id: String, + agent_id: String, + status: AgentStatus, + worktree_status: WorktreeStatus, + }, + TerminalOutput { + agent_id: String, + data: String, + }, + Error(String), +} + +/// Inbound server events (JSON). +#[derive(Debug, Deserialize)] +#[serde(tag = "type")] +enum ServerEvent { + #[serde(rename = "pong")] + Pong, + #[serde(rename = "manifest:updated", rename_all = "camelCase")] + ManifestUpdated { manifest: Manifest }, + #[serde(rename = "agent:status", rename_all = "camelCase")] + AgentStatus { + worktree_id: String, + agent_id: String, + status: AgentStatus, + worktree_status: WorktreeStatus, + }, + #[serde(rename = "terminal:output", rename_all = "camelCase")] + TerminalOutput { agent_id: String, data: String }, + #[serde(rename = "error")] + Error { code: String, message: String }, +} + +/// Outbound client commands (JSON). +#[derive(Debug, Serialize)] +#[serde(tag = "type")] +enum ClientCommand { + #[serde(rename = "ping")] + Ping, + #[serde(rename = "terminal:subscribe", rename_all = "camelCase")] + TerminalSubscribe { agent_id: String }, + #[serde(rename = "terminal:unsubscribe", rename_all = "camelCase")] + TerminalUnsubscribe { agent_id: String }, + #[serde(rename = "terminal:input", rename_all = "camelCase")] + TerminalInput { agent_id: String, data: String }, +} + +/// Manages WebSocket connection lifecycle with auto-reconnect. +pub struct WsManager { + running: Arc, +} + +impl WsManager { + pub fn new() -> Self { + Self { + running: Arc::new(AtomicBool::new(false)), + } + } + + /// Start the WebSocket connection loop on the tokio runtime. + /// Events are dispatched to the GTK main thread via an async_channel Sender. + pub fn connect( + &self, + base_url: &str, + token: Option, + tx: Sender, + runtime: &tokio::runtime::Handle, + ) { + self.running.store(true, Ordering::SeqCst); + + let ws_url = base_url + .replace("http://", "ws://") + .replace("https://", "wss://"); + let ws_url = format!("{}/api/events", ws_url.trim_end_matches('/')); + let running = self.running.clone(); + + runtime.spawn(async move { + let mut backoff_ms: u64 = 1000; + let max_backoff_ms: u64 = 30_000; + + while running.load(Ordering::SeqCst) { + info!("WebSocket connecting to {}", ws_url); + + let url = if let Some(ref t) = token { + format!("{}?token={}", ws_url, t) + } else { + ws_url.clone() + }; + + match connect_async(&url).await { + Ok((ws_stream, _)) => { + backoff_ms = 1000; + let _ = tx.send(WsEvent::Connected).await; + info!("WebSocket connected"); + + let (write, mut read) = ws_stream.split(); + let write = Arc::new(Mutex::new(write)); + + // Ping keepalive every 30s — actually sends a JSON ping command + let running_ping = running.clone(); + let write_ping = write.clone(); + let ping_handle = tokio::spawn(async move { + let mut interval = + tokio::time::interval(std::time::Duration::from_secs(30)); + loop { + interval.tick().await; + if !running_ping.load(Ordering::SeqCst) { + break; + } + let ping_msg = serde_json::to_string(&ClientCommand::Ping) + .unwrap_or_default(); + let mut w = write_ping.lock().await; + if w.send(Message::Text(ping_msg.into())).await.is_err() { + break; + } + } + }); + + while let Some(msg) = read.next().await { + if !running.load(Ordering::SeqCst) { + break; + } + match msg { + Ok(Message::Text(text)) => { + if let Err(e) = handle_message(&text, &tx).await { + warn!("Failed to handle WS message: {}", e); + } + } + Ok(Message::Ping(data)) => { + let mut w = write.lock().await; + let _ = w.send(Message::Pong(data)).await; + } + Ok(Message::Close(_)) => { + info!("WebSocket closed by server"); + break; + } + Err(e) => { + error!("WebSocket error: {}", e); + break; + } + _ => {} + } + } + + ping_handle.abort(); + let _ = tx.send(WsEvent::Disconnected).await; + } + Err(e) => { + error!("WebSocket connection failed: {}", e); + let _ = tx + .send(WsEvent::Error(format!("Connection failed: {}", e))) + .await; + } + } + + if !running.load(Ordering::SeqCst) { + break; + } + + // Exponential backoff + info!("Reconnecting in {}ms...", backoff_ms); + tokio::time::sleep(std::time::Duration::from_millis(backoff_ms)).await; + backoff_ms = (backoff_ms * 2).min(max_backoff_ms); + } + + info!("WebSocket connection loop ended"); + }); + } + + pub fn disconnect(&self) { + self.running.store(false, Ordering::SeqCst); + } +} + +async fn handle_message(text: &str, tx: &Sender) -> Result<()> { + let event: ServerEvent = serde_json::from_str(text)?; + match event { + ServerEvent::Pong => { /* Keepalive ACK */ } + ServerEvent::ManifestUpdated { manifest } => { + let _ = tx.send(WsEvent::ManifestUpdated(manifest)).await; + } + ServerEvent::AgentStatus { + worktree_id, + agent_id, + status, + worktree_status, + } => { + let _ = tx + .send(WsEvent::AgentStatusChanged { + worktree_id, + agent_id, + status, + worktree_status, + }) + .await; + } + ServerEvent::TerminalOutput { agent_id, data } => { + let _ = tx.send(WsEvent::TerminalOutput { agent_id, data }).await; + } + ServerEvent::Error { code, message } => { + let _ = tx + .send(WsEvent::Error(format!("{}: {}", code, message))) + .await; + } + } + Ok(()) +} diff --git a/linux/src/app.rs b/linux/src/app.rs new file mode 100644 index 0000000..34d0558 --- /dev/null +++ b/linux/src/app.rs @@ -0,0 +1,99 @@ +use gtk4::prelude::*; +use gtk4::{self as gtk, gio}; +use libadwaita as adw; +use libadwaita::prelude::*; + +use crate::models::settings::{AppSettings, Appearance}; +use crate::state::Services; +use crate::ui::window::MainWindow; + +/// PPG Desktop Application. +pub struct PpgApplication { + app: adw::Application, + services: Services, +} + +impl PpgApplication { + pub fn new(server_url: Option, token: Option) -> Self { + let app = adw::Application::builder() + .application_id("dev.ppg.desktop") + .flags(gio::ApplicationFlags::default()) + .build(); + + // Load settings, applying CLI overrides + let mut settings = AppSettings::load(); + if let Some(url) = server_url { + settings.server_url = url; + } + if let Some(t) = token { + settings.bearer_token = Some(t); + } + + let services = Services::new(settings); + + Self { app, services } + } + + pub fn run(&self) -> i32 { + let services = self.services.clone(); + + self.app.connect_startup(move |app| { + // Apply saved appearance + let settings = services.state.settings(); + let style_manager = adw::StyleManager::default(); + match settings.appearance { + Appearance::Dark => { + style_manager.set_color_scheme(adw::ColorScheme::ForceDark); + } + Appearance::Light => { + style_manager.set_color_scheme(adw::ColorScheme::ForceLight); + } + Appearance::System => { + style_manager.set_color_scheme(adw::ColorScheme::Default); + } + } + + // Load CSS + load_css(); + + // Register global actions + let about_action = gio::SimpleAction::new("about", None); + let app_about = app.clone(); + about_action.connect_activate(move |_, _| { + let about = adw::AboutDialog::builder() + .application_name("PPG Desktop") + .application_icon("utilities-terminal-symbolic") + .developer_name("2wit Studios") + .version("0.1.0") + .comments("Native Linux GUI for PPG agent orchestration") + .website("https://github.com/2witstudios/ppg-cli") + .license_type(gtk::License::MitX11) + .build(); + if let Some(win) = app_about.active_window() { + about.present(Some(&win)); + } + }); + app.add_action(&about_action); + }); + + let services_activate = self.services.clone(); + self.app.connect_activate(move |app| { + let main_window = MainWindow::new(app, services_activate.clone()); + main_window.present(); + main_window.start(); + }); + + self.app.run().into() + } +} + +fn load_css() { + let provider = gtk::CssProvider::new(); + provider.load_from_string(include_str!("style.css")); + + gtk::style_context_add_provider_for_display( + >k4::gdk::Display::default().expect("Could not get default display"), + &provider, + gtk::STYLE_PROVIDER_PRIORITY_APPLICATION, + ); +} diff --git a/linux/src/main.rs b/linux/src/main.rs new file mode 100644 index 0000000..0f08469 --- /dev/null +++ b/linux/src/main.rs @@ -0,0 +1,62 @@ +mod api; +mod app; +mod models; +mod state; +mod ui; +mod util; + +use app::PpgApplication; + +fn main() { + // Initialize logging + env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")) + .format_timestamp_millis() + .init(); + + // Parse CLI arguments + let args: Vec = std::env::args().collect(); + let mut server_url: Option = None; + let mut token: Option = None; + + let mut i = 1; + while i < args.len() { + match args[i].as_str() { + "--url" | "-u" => { + if i + 1 < args.len() { + server_url = Some(args[i + 1].clone()); + i += 1; + } + } + "--token" | "-t" => { + if i + 1 < args.len() { + token = Some(args[i + 1].clone()); + i += 1; + } + } + "--help" | "-h" => { + println!("PPG Desktop — Native Linux GUI for PPG agent orchestration"); + println!(); + println!("USAGE:"); + println!(" ppg-desktop [OPTIONS]"); + println!(); + println!("OPTIONS:"); + println!(" -u, --url PPG server URL (default: http://localhost:3000)"); + println!(" -t, --token Bearer token for authentication"); + println!(" -h, --help Print help information"); + println!(" -V, --version Print version information"); + std::process::exit(0); + } + "--version" | "-V" => { + println!("ppg-desktop {}", env!("CARGO_PKG_VERSION")); + std::process::exit(0); + } + _ => {} + } + i += 1; + } + + log::info!("Starting PPG Desktop"); + + let app = PpgApplication::new(server_url, token); + std::process::exit(app.run()); +} diff --git a/linux/src/models/agent_variant.rs b/linux/src/models/agent_variant.rs new file mode 100644 index 0000000..614db68 --- /dev/null +++ b/linux/src/models/agent_variant.rs @@ -0,0 +1,95 @@ +/// Agent variant definitions matching the macOS app's AgentVariant.swift. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum PromptDelivery { + /// Append prompt as a positional argument. + PositionalArg, + /// Send prompt as tmux keystrokes after launch. + SendKeys, + /// Not applicable (e.g., plain worktree). + None, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum VariantKind { + Agent, + Terminal, + Worktree, +} + +#[derive(Debug, Clone)] +pub struct AgentVariant { + pub id: &'static str, + pub display_name: &'static str, + pub icon_name: &'static str, + pub subtitle: &'static str, + pub default_command: &'static str, + pub prompt_delivery: PromptDelivery, + pub prompt_placeholder: &'static str, + pub kind: VariantKind, +} + +pub const CLAUDE: AgentVariant = AgentVariant { + id: "claude", + display_name: "Claude", + icon_name: "user-available-symbolic", + subtitle: "AI coding agent", + default_command: "claude --dangerously-skip-permissions", + prompt_delivery: PromptDelivery::PositionalArg, + prompt_placeholder: "Enter prompt...", + kind: VariantKind::Agent, +}; + +pub const CODEX: AgentVariant = AgentVariant { + id: "codex", + display_name: "Codex", + icon_name: "applications-engineering-symbolic", + subtitle: "OpenAI coding CLI", + default_command: "codex --full-auto", + prompt_delivery: PromptDelivery::PositionalArg, + prompt_placeholder: "Enter prompt...", + kind: VariantKind::Agent, +}; + +pub const OPENCODE: AgentVariant = AgentVariant { + id: "opencode", + display_name: "OpenCode", + icon_name: "applications-science-symbolic", + subtitle: "Open-source agent", + default_command: "opencode", + prompt_delivery: PromptDelivery::SendKeys, + prompt_placeholder: "Enter prompt...", + kind: VariantKind::Agent, +}; + +pub const TERMINAL: AgentVariant = AgentVariant { + id: "terminal", + display_name: "Terminal", + icon_name: "utilities-terminal-symbolic", + subtitle: "Shell session", + default_command: "", + prompt_delivery: PromptDelivery::SendKeys, + prompt_placeholder: "Enter initial command (optional)...", + kind: VariantKind::Terminal, +}; + +pub const WORKTREE: AgentVariant = AgentVariant { + id: "worktree", + display_name: "Worktree", + icon_name: "folder-symbolic", + subtitle: "Git worktree", + default_command: "", + prompt_delivery: PromptDelivery::None, + prompt_placeholder: "Enter worktree name...", + kind: VariantKind::Worktree, +}; + +pub fn all_variants() -> Vec<&'static AgentVariant> { + vec![&CLAUDE, &CODEX, &OPENCODE, &TERMINAL, &WORKTREE] +} + +pub fn pane_variants() -> Vec<&'static AgentVariant> { + all_variants() + .into_iter() + .filter(|v| v.kind != VariantKind::Worktree) + .collect() +} diff --git a/linux/src/models/manifest.rs b/linux/src/models/manifest.rs new file mode 100644 index 0000000..087ae86 --- /dev/null +++ b/linux/src/models/manifest.rs @@ -0,0 +1,123 @@ +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum AgentStatus { + Running, + Idle, + Exited, + Gone, +} + +impl AgentStatus { + pub fn css_class(&self) -> &'static str { + match self { + Self::Running => "status-running", + Self::Idle => "status-idle", + Self::Exited => "status-exited", + Self::Gone => "status-gone", + } + } + + pub fn label(&self) -> &'static str { + match self { + Self::Running => "Running", + Self::Idle => "Idle", + Self::Exited => "Exited", + Self::Gone => "Gone", + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum WorktreeStatus { + Active, + Merging, + Merged, + Failed, + Cleaned, +} + +impl WorktreeStatus { + pub fn css_class(&self) -> &'static str { + match self { + Self::Active => "status-running", + Self::Merging => "status-idle", + Self::Merged => "status-exited", + Self::Failed => "status-failed", + Self::Cleaned => "status-gone", + } + } + + pub fn label(&self) -> &'static str { + match self { + Self::Active => "Active", + Self::Merging => "Merging", + Self::Merged => "Merged", + Self::Failed => "Failed", + Self::Cleaned => "Cleaned", + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct AgentEntry { + pub id: String, + pub name: String, + pub agent_type: String, + pub status: AgentStatus, + pub tmux_target: String, + pub prompt: String, + pub started_at: String, + pub exit_code: Option, + pub session_id: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct WorktreeEntry { + pub id: String, + pub name: String, + pub path: String, + pub branch: String, + pub base_branch: String, + pub status: WorktreeStatus, + pub tmux_window: String, + pub pr_url: Option, + pub agents: HashMap, + pub created_at: String, + pub merged_at: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Manifest { + pub version: u32, + pub project_root: String, + pub session_name: String, + pub worktrees: HashMap, + pub created_at: String, + pub updated_at: String, +} + +impl Manifest { + /// Count agents across all worktrees matching a given status. + pub fn count_agents_by_status(&self, status: AgentStatus) -> usize { + self.worktrees + .values() + .flat_map(|wt| wt.agents.values()) + .filter(|a| a.status == status) + .count() + } + + /// Get all agents as (worktree_id, agent) pairs. + pub fn all_agents(&self) -> Vec<(&str, &AgentEntry)> { + self.worktrees + .iter() + .flat_map(|(wt_id, wt)| wt.agents.values().map(move |a| (wt_id.as_str(), a))) + .collect() + } +} diff --git a/linux/src/models/mod.rs b/linux/src/models/mod.rs new file mode 100644 index 0000000..bd6a745 --- /dev/null +++ b/linux/src/models/mod.rs @@ -0,0 +1,3 @@ +pub mod agent_variant; +pub mod manifest; +pub mod settings; diff --git a/linux/src/models/settings.rs b/linux/src/models/settings.rs new file mode 100644 index 0000000..967175f --- /dev/null +++ b/linux/src/models/settings.rs @@ -0,0 +1,92 @@ +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AppSettings { + #[serde(default = "default_server_url")] + pub server_url: String, + #[serde(default)] + pub bearer_token: Option, + #[serde(default = "default_font_family")] + pub font_family: String, + #[serde(default = "default_font_size")] + pub font_size: u32, + #[serde(default)] + pub appearance: Appearance, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)] +#[serde(rename_all = "lowercase")] +pub enum Appearance { + #[default] + System, + Dark, + Light, +} + +impl Appearance { + pub fn label(&self) -> &'static str { + match self { + Self::System => "System", + Self::Dark => "Dark", + Self::Light => "Light", + } + } + + pub fn all() -> &'static [Appearance] { + &[Self::System, Self::Dark, Self::Light] + } +} + +fn default_server_url() -> String { + "http://localhost:3000".to_string() +} + +fn default_font_family() -> String { + "Monospace".to_string() +} + +fn default_font_size() -> u32 { + 12 +} + +impl Default for AppSettings { + fn default() -> Self { + Self { + server_url: default_server_url(), + bearer_token: None, + font_family: default_font_family(), + font_size: default_font_size(), + appearance: Appearance::default(), + } + } +} + +impl AppSettings { + fn config_path() -> PathBuf { + let config_dir = glib::user_config_dir().join("ppg-desktop"); + config_dir.join("settings.toml") + } + + pub fn load() -> Self { + let path = Self::config_path(); + if path.exists() { + match std::fs::read_to_string(&path) { + Ok(content) => toml::from_str(&content).unwrap_or_default(), + Err(_) => Self::default(), + } + } else { + Self::default() + } + } + + pub fn save(&self) -> anyhow::Result<()> { + let path = Self::config_path(); + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent)?; + } + let content = toml::to_string_pretty(self)?; + std::fs::write(path, content)?; + Ok(()) + } +} diff --git a/linux/src/state.rs b/linux/src/state.rs new file mode 100644 index 0000000..b89f258 --- /dev/null +++ b/linux/src/state.rs @@ -0,0 +1,184 @@ +use std::sync::{Arc, RwLock}; + +use crate::api::client::PpgClient; +use crate::api::websocket::{WsEvent, WsManager}; +use crate::models::manifest::Manifest; +use crate::models::settings::AppSettings; + +/// Connection lifecycle states. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ConnectionState { + Disconnected, + Connecting, + Connected, + Reconnecting, + Error(String), +} + +impl ConnectionState { + pub fn label(&self) -> &str { + match self { + Self::Disconnected => "Disconnected", + Self::Connecting => "Connecting...", + Self::Connected => "Connected", + Self::Reconnecting => "Reconnecting...", + Self::Error(msg) => msg.as_str(), + } + } + + pub fn css_class(&self) -> &str { + match self { + Self::Connected => "status-running", + Self::Connecting | Self::Reconnecting => "status-idle", + Self::Disconnected => "status-gone", + Self::Error(_) => "status-failed", + } + } +} + +/// Shared application state — thread-safe via Arc. +#[derive(Clone)] +pub struct AppState { + inner: Arc>, +} + +struct AppStateInner { + pub manifest: Option, + pub connection: ConnectionState, + pub settings: AppSettings, +} + +impl AppState { + pub fn new(settings: AppSettings) -> Self { + Self { + inner: Arc::new(RwLock::new(AppStateInner { + manifest: None, + connection: ConnectionState::Disconnected, + settings, + })), + } + } + + pub fn manifest(&self) -> Option { + self.inner.read().unwrap().manifest.clone() + } + + pub fn set_manifest(&self, manifest: Manifest) { + self.inner.write().unwrap().manifest = Some(manifest); + } + + pub fn connection_state(&self) -> ConnectionState { + self.inner.read().unwrap().connection.clone() + } + + pub fn set_connection_state(&self, state: ConnectionState) { + self.inner.write().unwrap().connection = state; + } + + pub fn settings(&self) -> AppSettings { + self.inner.read().unwrap().settings.clone() + } + + pub fn update_settings(&self, f: F) { + let mut inner = self.inner.write().unwrap(); + f(&mut inner.settings); + let _ = inner.settings.save(); + } +} + +/// Bundles all shared services for easy passing to UI components. +/// +/// The `ws_tx` sender is created once and shared. Any code path that needs +/// to reconnect the WebSocket can grab a clone of `ws_tx` and pass it to +/// `WsManager::connect()`. The receiver end is drained by a single +/// `glib::spawn_future_local` loop set up in `MainWindow::connect()`. +#[derive(Clone)] +pub struct Services { + pub state: AppState, + pub client: Arc>, + pub ws: Arc>, + pub runtime: tokio::runtime::Handle, + /// Persistent sender for WS events. Cloned for each connect() call. + /// The receiver is drained on the GTK main thread. + pub ws_tx: async_channel::Sender, + /// Receiver stored here so the event loop can be started once. + ws_rx: Arc>>>, + /// Toast message sender — UI components send error/info messages here. + pub toast_tx: async_channel::Sender, + toast_rx: Arc>>>, +} + +/// Message for the toast overlay. +#[derive(Debug, Clone)] +pub struct ToastMessage { + pub text: String, + pub is_error: bool, +} + +impl Services { + pub fn new(settings: AppSettings) -> Self { + let client = PpgClient::new(&settings.server_url, settings.bearer_token.clone()); + let runtime = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .build() + .expect("Failed to create tokio runtime"); + let handle = runtime.handle().clone(); + + // Keep runtime alive by leaking it — it lives for the app's lifetime. + std::mem::forget(runtime); + + let (ws_tx, ws_rx) = async_channel::unbounded::(); + let (toast_tx, toast_rx) = async_channel::unbounded::(); + + Self { + state: AppState::new(settings), + client: Arc::new(RwLock::new(client)), + ws: Arc::new(RwLock::new(WsManager::new())), + runtime: handle, + ws_tx, + ws_rx: Arc::new(RwLock::new(Some(ws_rx))), + toast_tx, + toast_rx: Arc::new(RwLock::new(Some(toast_rx))), + } + } + + /// Take the WS event receiver. Can only be called once (by the window). + pub fn take_ws_rx(&self) -> Option> { + self.ws_rx.write().unwrap().take() + } + + /// Take the toast receiver. Can only be called once (by the window). + pub fn take_toast_rx(&self) -> Option> { + self.toast_rx.write().unwrap().take() + } + + /// Reconnect the WebSocket using the current settings. + /// The events flow into the same `ws_tx` → GTK event loop. + pub fn reconnect_ws(&self) { + let settings = self.state.settings(); + let ws = self.ws.read().unwrap(); + ws.disconnect(); + ws.connect( + &settings.server_url, + settings.bearer_token.clone(), + self.ws_tx.clone(), + &self.runtime, + ); + } + + /// Send a toast message to the UI. + pub fn toast(&self, text: impl Into) { + let _ = self.toast_tx.try_send(ToastMessage { + text: text.into(), + is_error: false, + }); + } + + /// Send an error toast message to the UI. + pub fn toast_error(&self, text: impl Into) { + let _ = self.toast_tx.try_send(ToastMessage { + text: text.into(), + is_error: true, + }); + } +} diff --git a/linux/src/style.css b/linux/src/style.css new file mode 100644 index 0000000..f5d4bf0 --- /dev/null +++ b/linux/src/style.css @@ -0,0 +1,29 @@ +/* PPG Desktop — Custom CSS */ + +/* Status colors */ +.status-running { color: #34d399; } +.status-idle { color: #fbbf24; } +.status-exited { color: #60a5fa; } +.status-gone { color: #9ca3af; } +.status-failed { color: #f87171; } + +/* Sidebar styling */ +.sidebar { + background-color: alpha(@window_bg_color, 0.95); +} + +/* Card styling for stats */ +.card { + padding: 8px; + border-radius: 12px; +} + +/* Monospace text */ +.monospace { + font-family: "JetBrains Mono", "Fira Code", "Source Code Pro", monospace; +} + +/* Navigation sidebar selection */ +.navigation-sidebar row:selected { + border-radius: 8px; +} diff --git a/linux/src/ui/command_palette.rs b/linux/src/ui/command_palette.rs new file mode 100644 index 0000000..86b5284 --- /dev/null +++ b/linux/src/ui/command_palette.rs @@ -0,0 +1,294 @@ +use gtk4::prelude::*; +use gtk4::{self as gtk}; +use libadwaita as adw; +use libadwaita::prelude::*; + +use crate::api::client::SpawnRequest; +use crate::models::agent_variant::{self, AgentVariant, VariantKind}; +use crate::state::Services; + +/// Command palette overlay (Ctrl+Shift+P) for spawning agents. +/// +/// Phase 1: Pick an agent variant +/// Phase 2: Enter a prompt +#[derive(Clone)] +pub struct CommandPalette { + dialog: adw::Dialog, + #[allow(dead_code)] + services: Services, +} + +impl CommandPalette { + pub fn new(services: Services) -> Self { + let dialog = adw::Dialog::new(); + dialog.set_title("Command Palette"); + dialog.set_content_width(500); + dialog.set_content_height(400); + + let content = gtk::Box::new(gtk::Orientation::Vertical, 0); + + // Phase 1: Variant selection + let search_entry = gtk::SearchEntry::new(); + search_entry.set_placeholder_text(Some("Search agent types...")); + search_entry.set_margin_top(12); + search_entry.set_margin_start(12); + search_entry.set_margin_end(12); + content.append(&search_entry); + + let variant_list = gtk::ListBox::new(); + variant_list.set_selection_mode(gtk::SelectionMode::Single); + variant_list.add_css_class("boxed-list"); + variant_list.set_margin_top(8); + variant_list.set_margin_start(12); + variant_list.set_margin_end(12); + + for variant in agent_variant::all_variants() { + let row = create_variant_row(variant); + variant_list.append(&row); + } + + let variant_scroll = gtk::ScrolledWindow::new(); + variant_scroll.set_vexpand(true); + variant_scroll.set_child(Some(&variant_list)); + content.append(&variant_scroll); + + // Phase 2: Prompt input (hidden initially) + let prompt_box = gtk::Box::new(gtk::Orientation::Vertical, 8); + prompt_box.set_margin_top(12); + prompt_box.set_margin_start(12); + prompt_box.set_margin_end(12); + prompt_box.set_margin_bottom(12); + prompt_box.set_visible(false); + + let selected_label = gtk::Label::new(None); + selected_label.add_css_class("title-4"); + selected_label.set_halign(gtk::Align::Start); + prompt_box.append(&selected_label); + + let text_scroll = gtk::ScrolledWindow::new(); + text_scroll.set_vexpand(true); + text_scroll.set_min_content_height(120); + + let text_view = gtk::TextView::new(); + text_view.set_wrap_mode(gtk::WrapMode::WordChar); + text_view.set_top_margin(8); + text_view.set_bottom_margin(8); + text_view.set_left_margin(8); + text_view.set_right_margin(8); + text_view.add_css_class("monospace"); + + // Key controller for Enter to submit (Shift+Enter for newline) + let spawn_trigger = std::rc::Rc::new(std::cell::Cell::new(false)); + let spawn_trigger_key = spawn_trigger.clone(); + let key_controller = gtk::EventControllerKey::new(); + key_controller.connect_key_pressed(move |_, keyval, _keycode, modifiers| { + if keyval == gtk4::gdk::Key::Return + && !modifiers.contains(gtk4::gdk::ModifierType::SHIFT_MASK) + { + spawn_trigger_key.set(true); + return gtk4::glib::Propagation::Stop; + } + gtk4::glib::Propagation::Proceed + }); + text_view.add_controller(key_controller); + + text_scroll.set_child(Some(&text_view)); + prompt_box.append(&text_scroll); + + let button_row = gtk::Box::new(gtk::Orientation::Horizontal, 8); + button_row.set_halign(gtk::Align::End); + + let back_button = gtk::Button::with_label("Back"); + let spawn_button = gtk::Button::with_label("Spawn"); + spawn_button.add_css_class("suggested-action"); + + button_row.append(&back_button); + button_row.append(&spawn_button); + prompt_box.append(&button_row); + + content.append(&prompt_box); + dialog.set_child(Some(&content)); + + // -- Filtering -- + let variant_list_filter = variant_list.clone(); + search_entry.connect_search_changed(move |entry| { + let query = entry.text().to_lowercase(); + let mut idx = 0; + while let Some(row) = variant_list_filter.row_at_index(idx) { + let name = row.widget_name(); + let visible = query.is_empty() || name.as_str().contains(&query); + row.set_visible(visible); + idx += 1; + } + }); + + // -- Phase transitions -- + let prompt_box_ref = prompt_box.clone(); + let variant_scroll_ref = variant_scroll.clone(); + let search_ref = search_entry.clone(); + let selected_label_ref = selected_label.clone(); + let text_view_ref = text_view.clone(); + + let selected_variant_id = std::rc::Rc::new(std::cell::RefCell::new(String::new())); + let selected_id_activate = selected_variant_id.clone(); + + variant_list.connect_row_activated(move |_, row| { + let variant_id = row.widget_name().to_string(); + *selected_id_activate.borrow_mut() = variant_id.clone(); + + let display = agent_variant::all_variants() + .iter() + .find(|v| v.id == variant_id) + .map(|v| v.display_name) + .unwrap_or("Agent"); + + selected_label_ref.set_text(&format!("Spawn {} Agent", display)); + + variant_scroll_ref.set_visible(false); + search_ref.set_visible(false); + prompt_box_ref.set_visible(true); + text_view_ref.grab_focus(); + }); + + // Back button + let prompt_box_back = prompt_box.clone(); + let variant_scroll_back = variant_scroll.clone(); + let search_back = search_entry.clone(); + back_button.connect_clicked(move |_| { + prompt_box_back.set_visible(false); + variant_scroll_back.set_visible(true); + search_back.set_visible(true); + search_back.grab_focus(); + }); + + // Spawn button + let services_spawn = services.clone(); + let dialog_ref = dialog.clone(); + let selected_id_spawn = selected_variant_id.clone(); + let text_view_spawn = text_view.clone(); + let do_spawn = std::rc::Rc::new(move || { + let variant_id = selected_id_spawn.borrow().clone(); + let buffer = text_view_spawn.buffer(); + let prompt = buffer + .text(&buffer.start_iter(), &buffer.end_iter(), false) + .to_string(); + + if prompt.trim().is_empty() && variant_id != "terminal" { + return; + } + + let client = services_spawn.client.clone(); + let variant = variant_id.clone(); + let prompt_text = prompt.clone(); + let toast_tx = services_spawn.toast_tx.clone(); + services_spawn.runtime.spawn(async move { + let req = SpawnRequest { + name: variant.clone(), + agent: Some(variant), + prompt: if prompt_text.is_empty() { + None + } else { + Some(prompt_text) + }, + count: None, + }; + match client.read().unwrap().spawn(&req).await { + Ok(resp) => { + log::info!("Spawned: {} in {}", resp.name, resp.worktree_id); + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Spawned {} in {}", resp.name, resp.worktree_id), + is_error: false, + }).await; + } + Err(e) => { + log::error!("Spawn failed: {}", e); + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Spawn failed: {}", e), + is_error: true, + }).await; + } + } + }); + + dialog_ref.close(); + }); + + let do_spawn_btn = do_spawn.clone(); + spawn_button.connect_clicked(move |_| { + do_spawn_btn(); + }); + + // Check the Enter key trigger on idle. + // Store the source ID so we can cancel it when the dialog closes. + let do_spawn_key = do_spawn.clone(); + let spawn_trigger_check = spawn_trigger.clone(); + let timer_id = std::rc::Rc::new(std::cell::RefCell::new(Some( + glib::timeout_add_local(std::time::Duration::from_millis(50), move || { + if spawn_trigger_check.get() { + spawn_trigger_check.set(false); + do_spawn_key(); + return glib::ControlFlow::Break; + } + glib::ControlFlow::Continue + }), + ))); + + // Cancel the timer when the dialog is closed without spawning + let timer_id_close = timer_id.clone(); + dialog.connect_closed(move |_| { + if let Some(id) = timer_id_close.borrow_mut().take() { + id.remove(); + } + }); + + Self { dialog, services } + } + + pub fn present(&self, parent: &adw::ApplicationWindow) { + self.dialog.present(Some(parent)); + } +} + +fn create_variant_row(variant: &AgentVariant) -> gtk::ListBoxRow { + let row = gtk::ListBoxRow::new(); + row.set_widget_name(variant.id); + + let hbox = gtk::Box::new(gtk::Orientation::Horizontal, 12); + hbox.set_margin_top(8); + hbox.set_margin_bottom(8); + hbox.set_margin_start(12); + hbox.set_margin_end(12); + + let icon = gtk::Image::from_icon_name(variant.icon_name); + icon.set_pixel_size(24); + + let vbox = gtk::Box::new(gtk::Orientation::Vertical, 2); + vbox.set_hexpand(true); + + let name_label = gtk::Label::new(Some(variant.display_name)); + name_label.set_halign(gtk::Align::Start); + name_label.add_css_class("heading"); + + let subtitle_label = gtk::Label::new(Some(variant.subtitle)); + subtitle_label.set_halign(gtk::Align::Start); + subtitle_label.add_css_class("caption"); + subtitle_label.add_css_class("dim-label"); + + vbox.append(&name_label); + vbox.append(&subtitle_label); + + let kind_label = gtk::Label::new(Some(match variant.kind { + VariantKind::Agent => "Agent", + VariantKind::Terminal => "Terminal", + VariantKind::Worktree => "Worktree", + })); + kind_label.add_css_class("caption"); + kind_label.add_css_class("dim-label"); + + hbox.append(&icon); + hbox.append(&vbox); + hbox.append(&kind_label); + row.set_child(Some(&hbox)); + + row +} diff --git a/linux/src/ui/home_dashboard.rs b/linux/src/ui/home_dashboard.rs new file mode 100644 index 0000000..ae56c2c --- /dev/null +++ b/linux/src/ui/home_dashboard.rs @@ -0,0 +1,387 @@ +use cairo; +use gtk4::prelude::*; +use gtk4::{self as gtk}; + +use crate::models::manifest::{AgentStatus, Manifest}; +use crate::state::Services; + +use std::cell::RefCell; +use std::collections::HashMap; +use std::rc::Rc; + +/// Home dashboard view with stats, commit heatmap, and recent commits. +#[derive(Clone)] +pub struct HomeDashboard { + container: gtk::Box, + stats_running: gtk::Label, + stats_completed: gtk::Label, + stats_failed: gtk::Label, + stats_total: gtk::Label, + worktree_count: gtk::Label, + project_label: gtk::Label, + heatmap_area: gtk::DrawingArea, + commits_list: gtk::ListBox, + heatmap_data: Rc>>, + services: Services, +} + +impl HomeDashboard { + pub fn new(services: Services) -> Self { + let container = gtk::Box::new(gtk::Orientation::Vertical, 16); + container.set_margin_top(24); + container.set_margin_bottom(24); + container.set_margin_start(24); + container.set_margin_end(24); + + // -- Header -- + let header_label = gtk::Label::new(Some("Dashboard")); + header_label.add_css_class("title-1"); + header_label.set_halign(gtk::Align::Start); + container.append(&header_label); + + let project_label = gtk::Label::new(Some("No project connected")); + project_label.add_css_class("dim-label"); + project_label.set_halign(gtk::Align::Start); + container.append(&project_label); + + // -- Stats cards row -- + let stats_row = gtk::Box::new(gtk::Orientation::Horizontal, 12); + stats_row.set_homogeneous(true); + + let (running_card, stats_running) = create_stat_card("Running", "0", "status-running"); + let (completed_card, stats_completed) = create_stat_card("Completed", "0", "status-exited"); + let (failed_card, stats_failed) = create_stat_card("Failed", "0", "status-failed"); + let (killed_card, stats_total) = create_stat_card("Killed", "0", "status-gone"); + + stats_row.append(&running_card); + stats_row.append(&completed_card); + stats_row.append(&failed_card); + stats_row.append(&killed_card); + container.append(&stats_row); + + // Worktree count + let worktree_count = gtk::Label::new(Some("0 worktrees")); + worktree_count.add_css_class("caption"); + worktree_count.add_css_class("dim-label"); + worktree_count.set_halign(gtk::Align::Start); + container.append(&worktree_count); + + // -- Commit heatmap -- + let heatmap_label = gtk::Label::new(Some("Commit Activity (90 days)")); + heatmap_label.add_css_class("title-4"); + heatmap_label.set_halign(gtk::Align::Start); + heatmap_label.set_margin_top(16); + container.append(&heatmap_label); + + let heatmap_data: Rc>> = Rc::new(RefCell::new(vec![0; 91])); + + let heatmap_area = gtk::DrawingArea::new(); + heatmap_area.set_content_width(13 * 16 + 12 * 2); // 13 cols, 16px each, 2px gap + heatmap_area.set_content_height(7 * 16 + 6 * 2); // 7 rows, 16px each, 2px gap + + let data_ref = heatmap_data.clone(); + heatmap_area.set_draw_func(move |_area, cr, width, height| { + draw_heatmap(cr, width, height, &data_ref.borrow()); + }); + container.append(&heatmap_area); + + // -- Recent commits -- + let commits_label = gtk::Label::new(Some("Recent Commits")); + commits_label.add_css_class("title-4"); + commits_label.set_halign(gtk::Align::Start); + commits_label.set_margin_top(16); + container.append(&commits_label); + + let scrolled = gtk::ScrolledWindow::new(); + scrolled.set_max_content_height(200); + scrolled.set_propagate_natural_height(true); + + let commits_list = gtk::ListBox::new(); + commits_list.set_selection_mode(gtk::SelectionMode::None); + commits_list.add_css_class("boxed-list"); + + commits_list.append(&create_commit_row("—", "Waiting for connection...", "")); + + scrolled.set_child(Some(&commits_list)); + container.append(&scrolled); + + Self { + container, + stats_running, + stats_completed, + stats_failed, + stats_total, + worktree_count, + project_label, + heatmap_area, + commits_list, + heatmap_data, + services, + } + } + + pub fn widget(&self) -> >k::Box { + &self.container + } + + /// Update dashboard stats from a new manifest. + pub fn update_manifest(&self, manifest: &Manifest) { + let all_agents: Vec<_> = manifest + .worktrees + .values() + .flat_map(|wt| wt.agents.values()) + .collect(); + + let running = all_agents.iter().filter(|a| a.status == AgentStatus::Running).count(); + let completed = all_agents + .iter() + .filter(|a| a.status == AgentStatus::Exited && a.exit_code == Some(0)) + .count(); + let failed = all_agents + .iter() + .filter(|a| a.status == AgentStatus::Exited && a.exit_code != Some(0)) + .count(); + let killed = all_agents.iter().filter(|a| a.status == AgentStatus::Gone).count(); + + self.stats_running.set_text(&running.to_string()); + self.stats_completed.set_text(&completed.to_string()); + self.stats_failed.set_text(&failed.to_string()); + self.stats_total.set_text(&killed.to_string()); + + self.worktree_count + .set_text(&format!("{} worktrees", manifest.worktrees.len())); + + self.project_label + .set_text(&format!("Project: {}", manifest.project_root)); + + // Fetch git log data for heatmap and recent commits (async) + self.fetch_heatmap_data(&manifest.project_root); + self.fetch_recent_commits(&manifest.project_root); + } + + fn fetch_recent_commits(&self, project_root: &str) { + let root = project_root.to_string(); + let commits_list = self.commits_list.clone(); + + std::thread::spawn(move || { + let output = std::process::Command::new("git") + .args([ + "log", + "--format=%h|%s|%ar", + "-n", + "10", + ]) + .current_dir(&root) + .output(); + + if let Ok(output) = output { + let stdout = String::from_utf8_lossy(&output.stdout); + let commits: Vec<(String, String, String)> = stdout + .lines() + .filter_map(|line| { + let parts: Vec<&str> = line.splitn(3, '|').collect(); + if parts.len() == 3 { + Some(( + parts[0].to_string(), + parts[1].to_string(), + parts[2].to_string(), + )) + } else { + None + } + }) + .collect(); + + glib::idle_add_once(move || { + // Clear existing rows + while let Some(row) = commits_list.row_at_index(0) { + commits_list.remove(&row); + } + + if commits.is_empty() { + commits_list.append(&create_commit_row("—", "No commits found", "")); + } else { + for (hash, message, time) in &commits { + commits_list.append(&create_commit_row(hash, message, time)); + } + } + }); + } + }); + } + + fn fetch_heatmap_data(&self, project_root: &str) { + let root = project_root.to_string(); + let data_ref = self.heatmap_data.clone(); + let area_ref = self.heatmap_area.clone(); + + // Run git log in background + std::thread::spawn(move || { + let output = std::process::Command::new("git") + .args(["log", "--format=%aI", "--since=90 days ago"]) + .current_dir(&root) + .output(); + + if let Ok(output) = output { + let stdout = String::from_utf8_lossy(&output.stdout); + let mut day_counts: HashMap = HashMap::new(); + + for line in stdout.lines() { + if let Some(date) = line.split('T').next() { + *day_counts.entry(date.to_string()).or_insert(0) += 1; + } + } + + // Convert to 91-day array (today at the end) + let today = chrono::Local::now().date_naive(); + let mut counts = vec![0u32; 91]; + for i in 0..91 { + let date = today - chrono::Duration::days(90 - i as i64); + let key = date.format("%Y-%m-%d").to_string(); + counts[i] = day_counts.get(&key).copied().unwrap_or(0); + } + + glib::idle_add_once(move || { + *data_ref.borrow_mut() = counts; + area_ref.queue_draw(); + }); + } + }); + } +} + +fn create_stat_card(title: &str, value: &str, value_class: &str) -> (gtk::Frame, gtk::Label) { + let frame = gtk::Frame::new(None); + frame.add_css_class("card"); + + let vbox = gtk::Box::new(gtk::Orientation::Vertical, 4); + vbox.set_margin_top(12); + vbox.set_margin_bottom(12); + vbox.set_margin_start(16); + vbox.set_margin_end(16); + vbox.set_halign(gtk::Align::Center); + + let value_label = gtk::Label::new(Some(value)); + value_label.add_css_class("title-1"); + value_label.add_css_class(value_class); + + let title_label = gtk::Label::new(Some(title)); + title_label.add_css_class("caption"); + title_label.add_css_class("dim-label"); + + vbox.append(&value_label); + vbox.append(&title_label); + frame.set_child(Some(&vbox)); + + (frame, value_label) +} + +fn create_commit_row(hash: &str, message: &str, time: &str) -> gtk::ListBoxRow { + let row = gtk::ListBoxRow::new(); + let hbox = gtk::Box::new(gtk::Orientation::Horizontal, 8); + hbox.set_margin_top(6); + hbox.set_margin_bottom(6); + hbox.set_margin_start(12); + hbox.set_margin_end(12); + + let hash_label = gtk::Label::new(Some(hash)); + hash_label.add_css_class("monospace"); + hash_label.add_css_class("caption"); + + let msg_label = gtk::Label::new(Some(message)); + msg_label.set_halign(gtk::Align::Start); + msg_label.set_hexpand(true); + msg_label.set_ellipsize(pango::EllipsizeMode::End); + + let time_label = gtk::Label::new(Some(time)); + time_label.add_css_class("caption"); + time_label.add_css_class("dim-label"); + + hbox.append(&hash_label); + hbox.append(&msg_label); + hbox.append(&time_label); + row.set_child(Some(&hbox)); + + row +} + +/// Draw the commit heatmap grid (13 columns × 7 rows) using cairo. +fn draw_heatmap(cr: &cairo::Context, _width: i32, _height: i32, data: &[u32]) { + let cell_size: f64 = 14.0; + let gap: f64 = 2.0; + let step = cell_size + gap; + + // Find max for color scaling + let max_val = data.iter().copied().max().unwrap_or(1).max(1); + + // Colors: 5 levels from no activity to high activity + let colors = [ + (0.15, 0.15, 0.18), // empty / no commits (dark gray) + (0.12, 0.30, 0.17), // level 1 + (0.15, 0.50, 0.25), // level 2 + (0.18, 0.70, 0.35), // level 3 + (0.20, 0.83, 0.40), // level 4 (brightest green) + ]; + + // Data is 91 days, laid out in 13 columns × 7 rows (column-major, week-aligned) + for day_idx in 0..data.len().min(91) { + let col = day_idx / 7; + let row = day_idx % 7; + + let x = col as f64 * step; + let y = row as f64 * step; + + let count = data[day_idx]; + let level = if count == 0 { + 0 + } else { + let ratio = count as f64 / max_val as f64; + if ratio <= 0.25 { + 1 + } else if ratio <= 0.50 { + 2 + } else if ratio <= 0.75 { + 3 + } else { + 4 + } + }; + + let (r, g, b) = colors[level]; + cr.set_source_rgb(r, g, b); + + // Rounded rectangle + let radius = 2.0; + cr.new_sub_path(); + cr.arc( + x + cell_size - radius, + y + radius, + radius, + -std::f64::consts::FRAC_PI_2, + 0.0, + ); + cr.arc( + x + cell_size - radius, + y + cell_size - radius, + radius, + 0.0, + std::f64::consts::FRAC_PI_2, + ); + cr.arc( + x + radius, + y + cell_size - radius, + radius, + std::f64::consts::FRAC_PI_2, + std::f64::consts::PI, + ); + cr.arc( + x + radius, + y + radius, + radius, + std::f64::consts::PI, + 3.0 * std::f64::consts::FRAC_PI_2, + ); + cr.close_path(); + let _ = cr.fill(); + } +} diff --git a/linux/src/ui/mod.rs b/linux/src/ui/mod.rs new file mode 100644 index 0000000..41f85d2 --- /dev/null +++ b/linux/src/ui/mod.rs @@ -0,0 +1,9 @@ +pub mod command_palette; +pub mod home_dashboard; +pub mod pane_grid; +pub mod settings_dialog; +pub mod setup_view; +pub mod sidebar; +pub mod terminal_pane; +pub mod window; +pub mod worktree_detail; diff --git a/linux/src/ui/pane_grid.rs b/linux/src/ui/pane_grid.rs new file mode 100644 index 0000000..78c3ef6 --- /dev/null +++ b/linux/src/ui/pane_grid.rs @@ -0,0 +1,175 @@ +use gtk4::prelude::*; +use gtk4::{self as gtk}; +use std::cell::RefCell; +use std::collections::HashMap; +use std::rc::Rc; + +use crate::state::Services; +use crate::ui::terminal_pane::TerminalPane; + +/// Grid layout for terminal panes (up to 2 columns × 3 rows). +#[derive(Clone)] +pub struct PaneGrid { + container: gtk::Box, + grid: gtk::Grid, + services: Services, + panes: Rc>>, + empty_state: gtk::Box, +} + +impl PaneGrid { + pub fn new(services: Services) -> Self { + let container = gtk::Box::new(gtk::Orientation::Vertical, 0); + + // Grid for terminal panes + let grid = gtk::Grid::new(); + grid.set_row_homogeneous(true); + grid.set_column_homogeneous(true); + grid.set_row_spacing(2); + grid.set_column_spacing(2); + grid.set_vexpand(true); + grid.set_hexpand(true); + + // Empty state + let empty_state = create_empty_state(); + + container.append(&empty_state); + + Self { + container, + grid, + services, + panes: Rc::new(RefCell::new(HashMap::new())), + empty_state, + } + } + + pub fn widget(&self) -> >k::Box { + &self.container + } + + /// Show a specific agent's terminal. + pub fn show_agent(&self, worktree_id: &str, agent_id: &str) { + let key = format!("{}:{}", worktree_id, agent_id); + + // Get manifest to find tmux target + let manifest = match self.services.state.manifest() { + Some(m) => m, + None => return, + }; + + let (session_name, window_target) = { + let wt = match manifest.worktrees.get(worktree_id) { + Some(wt) => wt, + None => return, + }; + let agent = match wt.agents.get(agent_id) { + Some(a) => a, + None => return, + }; + (manifest.session_name.clone(), wt.tmux_window.clone()) + }; + + // Create a pane if it doesn't exist + let mut panes = self.panes.borrow_mut(); + if !panes.contains_key(&key) { + let pane = TerminalPane::new(self.services.clone()); + pane.attach_to_tmux(&session_name, &window_target); + panes.insert(key.clone(), pane); + } + + // Replace grid contents with the selected pane + // Remove all children from grid + while let Some(child) = self.grid.first_child() { + self.grid.remove(&child); + } + + if let Some(pane) = panes.get(&key) { + self.grid.attach(pane.widget(), 0, 0, 1, 1); + } + + // Switch from empty state to grid + if self.empty_state.parent().is_some() { + self.container.remove(&self.empty_state); + } + if self.grid.parent().is_none() { + self.container.append(&self.grid); + } + } + + /// Show all agents for a worktree in a grid layout. + pub fn show_worktree(&self, worktree_id: &str) { + let manifest = match self.services.state.manifest() { + Some(m) => m, + None => return, + }; + + let wt = match manifest.worktrees.get(worktree_id) { + Some(wt) => wt, + None => return, + }; + + // Clear grid + while let Some(child) = self.grid.first_child() { + self.grid.remove(&child); + } + + let agents: Vec<_> = wt.agents.values().collect(); + if agents.is_empty() { + return; + } + + // Calculate grid dimensions (up to 2 cols × 3 rows) + let count = agents.len().min(6); + let cols = if count <= 1 { 1 } else { 2 }; + + let mut panes = self.panes.borrow_mut(); + for (i, agent) in agents.iter().take(6).enumerate() { + let key = format!("{}:{}", worktree_id, agent.id); + let col = (i % cols) as i32; + let row = (i / cols) as i32; + + if !panes.contains_key(&key) { + let pane = TerminalPane::new(self.services.clone()); + pane.attach_to_tmux(&manifest.session_name, &wt.tmux_window); + panes.insert(key.clone(), pane); + } + + if let Some(pane) = panes.get(&key) { + self.grid.attach(pane.widget(), col, row, 1, 1); + } + } + + if self.empty_state.parent().is_some() { + self.container.remove(&self.empty_state); + } + if self.grid.parent().is_none() { + self.container.append(&self.grid); + } + } +} + +fn create_empty_state() -> gtk::Box { + let container = gtk::Box::new(gtk::Orientation::Vertical, 12); + container.set_halign(gtk::Align::Center); + container.set_valign(gtk::Align::Center); + container.set_vexpand(true); + + let icon = gtk::Image::from_icon_name("utilities-terminal-symbolic"); + icon.set_pixel_size(64); + icon.add_css_class("dim-label"); + + let label = gtk::Label::new(Some("Select an agent from the sidebar")); + label.add_css_class("title-3"); + label.add_css_class("dim-label"); + + let hint = gtk::Label::new(Some("Terminal panes will appear here")); + hint.add_css_class("caption"); + hint.add_css_class("dim-label"); + + container.append(&icon); + container.append(&label); + container.append(&hint); + + container +} diff --git a/linux/src/ui/settings_dialog.rs b/linux/src/ui/settings_dialog.rs new file mode 100644 index 0000000..bf52a44 --- /dev/null +++ b/linux/src/ui/settings_dialog.rs @@ -0,0 +1,175 @@ +use gtk4::prelude::*; +use gtk4::{self as gtk}; +use libadwaita as adw; +use libadwaita::prelude::*; + +use crate::models::settings::Appearance; +use crate::state::Services; + +/// Settings dialog using adw::PreferencesWindow. +pub struct SettingsDialog { + window: adw::PreferencesWindow, +} + +impl SettingsDialog { + pub fn new(services: Services) -> Self { + let window = adw::PreferencesWindow::new(); + window.set_title("Settings"); + window.set_default_size(600, 500); + window.set_search_enabled(false); + + let settings = services.state.settings(); + + // -- Connection group -- + let connection_group = adw::PreferencesGroup::new(); + connection_group.set_title("Connection"); + connection_group.set_description(Some("PPG server connection settings")); + + let url_row = adw::EntryRow::new(); + url_row.set_title("Server URL"); + url_row.set_text(&settings.server_url); + connection_group.add(&url_row); + + let token_row = adw::PasswordEntryRow::new(); + token_row.set_title("Bearer Token"); + if let Some(ref token) = settings.bearer_token { + token_row.set_text(token); + } + connection_group.add(&token_row); + + let test_button = gtk::Button::with_label("Test Connection"); + test_button.set_halign(gtk::Align::Start); + test_button.set_margin_top(8); + + let services_test = services.clone(); + let test_btn_ref = test_button.clone(); + test_button.connect_clicked(move |_| { + test_btn_ref.set_label("Testing..."); + test_btn_ref.set_sensitive(false); + + let client = services_test.client.clone(); + let btn = test_btn_ref.clone(); + services_test.runtime.spawn(async move { + let result = client.read().unwrap().test_connection().await; + let label = match result { + Ok(true) => "Connected!", + Ok(false) => "Failed", + Err(_) => "Error", + }; + let label_owned = label.to_string(); + glib::idle_add_once(move || { + btn.set_label(&label_owned); + btn.set_sensitive(true); + }); + }); + }); + connection_group.add(&test_button); + + let connection_page = adw::PreferencesPage::new(); + connection_page.set_title("Connection"); + connection_page.set_icon_name(Some("network-server-symbolic")); + connection_page.add(&connection_group); + + // -- Terminal group -- + let terminal_group = adw::PreferencesGroup::new(); + terminal_group.set_title("Terminal"); + terminal_group.set_description(Some("Terminal appearance settings")); + + let font_row = adw::EntryRow::new(); + font_row.set_title("Font Family"); + font_row.set_text(&settings.font_family); + terminal_group.add(&font_row); + + let size_row = adw::SpinRow::with_range(8.0, 32.0, 1.0); + size_row.set_title("Font Size"); + size_row.set_value(settings.font_size as f64); + terminal_group.add(&size_row); + + // -- Appearance group -- + let appearance_group = adw::PreferencesGroup::new(); + appearance_group.set_title("Appearance"); + + let appearance_row = adw::ComboRow::new(); + appearance_row.set_title("Color Scheme"); + let model = gtk::StringList::new(&["System", "Dark", "Light"]); + appearance_row.set_model(Some(&model)); + appearance_row.set_selected(match settings.appearance { + Appearance::System => 0, + Appearance::Dark => 1, + Appearance::Light => 2, + }); + appearance_group.add(&appearance_row); + + let appearance_page = adw::PreferencesPage::new(); + appearance_page.set_title("Appearance"); + appearance_page.set_icon_name(Some("applications-graphics-symbolic")); + appearance_page.add(&terminal_group); + appearance_page.add(&appearance_group); + + window.add(&connection_page); + window.add(&appearance_page); + + // Save settings on close + let services_save = services.clone(); + let url_row_ref = url_row.clone(); + let token_row_ref = token_row.clone(); + let font_row_ref = font_row.clone(); + window.connect_close_request(move |_| { + let url = url_row_ref.text().to_string(); + let token_text = token_row_ref.text().to_string(); + let token = if token_text.is_empty() { + None + } else { + Some(token_text) + }; + let font = font_row_ref.text().to_string(); + let size = size_row.value() as u32; + let appearance = match appearance_row.selected() { + 1 => Appearance::Dark, + 2 => Appearance::Light, + _ => Appearance::System, + }; + + services_save.state.update_settings(|s| { + s.server_url = url.clone(); + s.bearer_token = token.clone(); + s.font_family = font; + s.font_size = size; + s.appearance = appearance; + }); + + // Update client connection and reconnect WebSocket + services_save + .client + .write() + .unwrap() + .update_connection(&url, token); + + services_save.reconnect_ws(); + services_save.toast("Settings saved — reconnecting..."); + + // Apply appearance + let style_manager = adw::StyleManager::default(); + match appearance { + Appearance::Dark => { + style_manager.set_color_scheme(adw::ColorScheme::ForceDark); + } + Appearance::Light => { + style_manager.set_color_scheme(adw::ColorScheme::ForceLight); + } + Appearance::System => { + style_manager.set_color_scheme(adw::ColorScheme::Default); + } + } + + glib::Propagation::Proceed + }); + + Self { window } + } + + pub fn present(&self, parent: &adw::ApplicationWindow) { + self.window.set_transient_for(Some(parent)); + self.window.present(); + } +} diff --git a/linux/src/ui/setup_view.rs b/linux/src/ui/setup_view.rs new file mode 100644 index 0000000..6897639 --- /dev/null +++ b/linux/src/ui/setup_view.rs @@ -0,0 +1,175 @@ +use gtk4::prelude::*; +use gtk4::{self as gtk}; + +use crate::state::Services; +use crate::util::shell::command_exists; + +/// First-run setup view that checks prerequisites (ppg, tmux). +#[derive(Clone)] +pub struct SetupView { + container: gtk::Box, + ppg_status: gtk::Label, + ppg_icon: gtk::Image, + tmux_status: gtk::Label, + tmux_icon: gtk::Image, + retry_button: gtk::Button, + continue_button: gtk::Button, + services: Services, +} + +impl SetupView { + pub fn new(services: Services) -> Self { + let container = gtk::Box::new(gtk::Orientation::Vertical, 24); + container.set_halign(gtk::Align::Center); + container.set_valign(gtk::Align::Center); + container.set_margin_top(48); + container.set_margin_bottom(48); + container.set_margin_start(48); + container.set_margin_end(48); + container.set_width_request(400); + + // Header + let title = gtk::Label::new(Some("PPG Desktop Setup")); + title.add_css_class("title-1"); + container.append(&title); + + let subtitle = gtk::Label::new(Some("Checking prerequisites...")); + subtitle.add_css_class("dim-label"); + container.append(&subtitle); + + // Checks list + let checks_box = gtk::Box::new(gtk::Orientation::Vertical, 12); + checks_box.set_margin_top(24); + + // ppg check + let (ppg_row, ppg_icon, ppg_status) = create_check_row("ppg", "PPG CLI tool"); + checks_box.append(&ppg_row); + + // tmux check + let (tmux_row, tmux_icon, tmux_status) = create_check_row("tmux", "Terminal multiplexer"); + checks_box.append(&tmux_row); + + container.append(&checks_box); + + // Install hints + let hints_box = gtk::Box::new(gtk::Orientation::Vertical, 4); + hints_box.set_margin_top(16); + + let ppg_hint = gtk::Label::new(Some("Install ppg: npm install -g ppg-cli")); + ppg_hint.add_css_class("monospace"); + ppg_hint.add_css_class("caption"); + ppg_hint.set_selectable(true); + + let tmux_hint = gtk::Label::new(Some("Install tmux: sudo apt install tmux")); + tmux_hint.add_css_class("monospace"); + tmux_hint.add_css_class("caption"); + tmux_hint.set_selectable(true); + + hints_box.append(&ppg_hint); + hints_box.append(&tmux_hint); + container.append(&hints_box); + + // Buttons + let button_box = gtk::Box::new(gtk::Orientation::Horizontal, 12); + button_box.set_halign(gtk::Align::Center); + button_box.set_margin_top(24); + + let retry_button = gtk::Button::with_label("Retry"); + let continue_button = gtk::Button::with_label("Continue"); + continue_button.add_css_class("suggested-action"); + continue_button.set_sensitive(false); + + button_box.append(&retry_button); + button_box.append(&continue_button); + container.append(&button_box); + + let view = Self { + container, + ppg_status, + ppg_icon, + tmux_status, + tmux_icon, + retry_button: retry_button.clone(), + continue_button: continue_button.clone(), + services, + }; + + // Run initial check + view.check_prerequisites(); + + // Retry button + let view_retry = view.clone(); + retry_button.connect_clicked(move |_| { + view_retry.check_prerequisites(); + }); + + view + } + + pub fn widget(&self) -> >k::Box { + &self.container + } + + pub fn check_prerequisites(&self) { + let ppg_ok = command_exists("ppg"); + let tmux_ok = command_exists("tmux"); + + update_check_status(&self.ppg_icon, &self.ppg_status, ppg_ok); + update_check_status(&self.tmux_icon, &self.tmux_status, tmux_ok); + + self.continue_button.set_sensitive(ppg_ok && tmux_ok); + } + + pub fn connect_continue(&self, f: F) { + self.continue_button.connect_clicked(move |_| f()); + } +} + +fn create_check_row(name: &str, description: &str) -> (gtk::Box, gtk::Image, gtk::Label) { + let row = gtk::Box::new(gtk::Orientation::Horizontal, 12); + row.set_margin_start(8); + row.set_margin_end(8); + + let icon = gtk::Image::from_icon_name("dialog-question-symbolic"); + icon.set_pixel_size(24); + + let vbox = gtk::Box::new(gtk::Orientation::Vertical, 2); + vbox.set_hexpand(true); + + let name_label = gtk::Label::new(Some(name)); + name_label.set_halign(gtk::Align::Start); + name_label.add_css_class("heading"); + + let desc_label = gtk::Label::new(Some(description)); + desc_label.set_halign(gtk::Align::Start); + desc_label.add_css_class("caption"); + desc_label.add_css_class("dim-label"); + + vbox.append(&name_label); + vbox.append(&desc_label); + + let status_label = gtk::Label::new(Some("Checking...")); + status_label.add_css_class("caption"); + + row.append(&icon); + row.append(&vbox); + row.append(&status_label); + + (row, icon, status_label) +} + +fn update_check_status(icon: >k::Image, status: >k::Label, found: bool) { + if found { + icon.set_icon_name(Some("emblem-ok-symbolic")); + icon.add_css_class("success"); + status.set_text("Found"); + status.add_css_class("status-running"); + status.remove_css_class("status-failed"); + } else { + icon.set_icon_name(Some("dialog-error-symbolic")); + icon.add_css_class("error"); + status.set_text("Not found"); + status.add_css_class("status-failed"); + status.remove_css_class("status-running"); + } +} diff --git a/linux/src/ui/sidebar.rs b/linux/src/ui/sidebar.rs new file mode 100644 index 0000000..d76755a --- /dev/null +++ b/linux/src/ui/sidebar.rs @@ -0,0 +1,449 @@ +use gtk4::prelude::*; +use gtk4::{self as gtk, gio}; + +use crate::api::client::RestartRequest; +use crate::models::manifest::{AgentEntry, AgentStatus, Manifest, WorktreeEntry}; +use crate::state::Services; +use crate::ui::window::SidebarSelection; + +use std::cell::RefCell; +use std::rc::Rc; + +/// Sidebar with project > worktree > agent hierarchy. +#[derive(Clone)] +pub struct SidebarView { + container: gtk::Box, + list_box: gtk::ListBox, + services: Services, + on_selection: Rc>>>, +} + +impl SidebarView { + pub fn new(services: Services) -> Self { + let container = gtk::Box::new(gtk::Orientation::Vertical, 0); + container.add_css_class("sidebar"); + + // Sidebar header + let header_box = gtk::Box::new(gtk::Orientation::Horizontal, 8); + header_box.set_margin_top(12); + header_box.set_margin_bottom(8); + header_box.set_margin_start(12); + header_box.set_margin_end(12); + let title = gtk::Label::new(Some("PPG")); + title.add_css_class("title-3"); + title.set_halign(gtk::Align::Start); + header_box.append(&title); + container.append(&header_box); + + let scrolled = gtk::ScrolledWindow::new(); + scrolled.set_vexpand(true); + scrolled.set_policy(gtk::PolicyType::Never, gtk::PolicyType::Automatic); + + let list_box = gtk::ListBox::new(); + list_box.set_selection_mode(gtk::SelectionMode::Single); + list_box.add_css_class("navigation-sidebar"); + scrolled.set_child(Some(&list_box)); + container.append(&scrolled); + + // Add "Dashboard" row at the top + let dashboard_row = create_section_row("Dashboard", "go-home-symbolic"); + list_box.append(&dashboard_row); + + let on_selection: Rc>>> = + Rc::new(RefCell::new(None)); + + let on_sel_ref = on_selection.clone(); + list_box.connect_row_activated(move |_, row| { + if let Some(ref cb) = *on_sel_ref.borrow() { + let selection = row_to_selection(row); + cb(selection); + } + }); + + Self { + container, + list_box, + services, + on_selection, + } + } + + pub fn widget(&self) -> >k::Box { + &self.container + } + + pub fn connect_selection_changed(&self, f: F) { + *self.on_selection.borrow_mut() = Some(Box::new(f)); + } + + /// Rebuild the sidebar from a new manifest. + pub fn update_manifest(&self, manifest: &Manifest) { + // Remove all rows except the Dashboard row (index 0) + while let Some(row) = self.list_box.row_at_index(1) { + self.list_box.remove(&row); + } + + // Section: Worktrees + if !manifest.worktrees.is_empty() { + let section = create_section_header("Worktrees"); + self.list_box.append(§ion); + + let mut worktrees: Vec<_> = manifest.worktrees.values().collect(); + worktrees.sort_by(|a, b| a.created_at.cmp(&b.created_at)); + + for wt in worktrees { + let wt_row = create_worktree_row(wt, &self.services); + self.list_box.append(&wt_row); + + // Agent children + let mut agents: Vec<_> = wt.agents.values().collect(); + agents.sort_by(|a, b| a.started_at.cmp(&b.started_at)); + + for agent in agents { + let agent_row = create_agent_row(&wt.id, agent, &self.services); + self.list_box.append(&agent_row); + } + } + } + } + + /// Update a single agent's status badge without full rebuild. + pub fn update_agent_status( + &self, + _worktree_id: &str, + _agent_id: &str, + _status: AgentStatus, + ) { + // For simplicity, trigger a full manifest refresh from state. + // A production app would do targeted updates. + if let Some(manifest) = self.services.state.manifest() { + self.update_manifest(&manifest); + } + } +} + +fn create_section_row(label: &str, icon_name: &str) -> gtk::ListBoxRow { + let row = gtk::ListBoxRow::new(); + let hbox = gtk::Box::new(gtk::Orientation::Horizontal, 8); + hbox.set_margin_top(4); + hbox.set_margin_bottom(4); + hbox.set_margin_start(8); + hbox.set_margin_end(8); + + let icon = gtk::Image::from_icon_name(icon_name); + let label_widget = gtk::Label::new(Some(label)); + label_widget.set_halign(gtk::Align::Start); + label_widget.set_hexpand(true); + + hbox.append(&icon); + hbox.append(&label_widget); + row.set_child(Some(&hbox)); + + // Store selection data + row.set_widget_name("dashboard"); + + row +} + +fn create_section_header(title: &str) -> gtk::ListBoxRow { + let row = gtk::ListBoxRow::new(); + row.set_selectable(false); + row.set_activatable(false); + + let label = gtk::Label::new(Some(title)); + label.add_css_class("caption"); + label.add_css_class("dim-label"); + label.set_halign(gtk::Align::Start); + label.set_margin_top(12); + label.set_margin_bottom(4); + label.set_margin_start(12); + + row.set_child(Some(&label)); + row +} + +fn create_worktree_row(wt: &WorktreeEntry, services: &Services) -> gtk::ListBoxRow { + let row = gtk::ListBoxRow::new(); + let hbox = gtk::Box::new(gtk::Orientation::Horizontal, 8); + hbox.set_margin_top(4); + hbox.set_margin_bottom(4); + hbox.set_margin_start(12); + hbox.set_margin_end(8); + + // Status dot + let dot = gtk::Label::new(Some("\u{25CF}")); // ● + dot.add_css_class(wt.status.css_class()); + + let name_label = gtk::Label::new(Some(&wt.name)); + name_label.set_halign(gtk::Align::Start); + name_label.set_hexpand(true); + + // Agent count badge + let count = wt.agents.len(); + let badge = gtk::Label::new(Some(&count.to_string())); + badge.add_css_class("caption"); + badge.add_css_class("dim-label"); + + hbox.append(&dot); + hbox.append(&name_label); + hbox.append(&badge); + row.set_child(Some(&hbox)); + + // Store worktree ID for selection + row.set_widget_name(&format!("wt:{}", wt.id)); + + // -- Context menu (right-click) -- + let wt_id = wt.id.clone(); + let services_ctx = services.clone(); + + let menu = gio::Menu::new(); + menu.append(Some("Kill Worktree"), Some(&format!("wt.kill.{}", wt_id))); + menu.append(Some("Merge Worktree"), Some(&format!("wt.merge.{}", wt_id))); + + let popover = gtk::PopoverMenu::from_model(Some(&menu)); + popover.set_parent(&hbox); + popover.set_has_arrow(false); + + // Register actions on the row + let action_group = gio::SimpleActionGroup::new(); + + let kill_action = gio::SimpleAction::new(&format!("kill.{}", wt_id), None); + let services_kill = services_ctx.clone(); + let wt_id_kill = wt_id.clone(); + kill_action.connect_activate(move |_, _| { + let client = services_kill.client.clone(); + let id = wt_id_kill.clone(); + let toast_tx = services_kill.toast_tx.clone(); + services_kill.runtime.spawn(async move { + match client.read().unwrap().kill_worktree(&id).await { + Ok(_) => { + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Killed worktree {}", id), + is_error: false, + }).await; + } + Err(e) => { + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Kill failed: {}", e), + is_error: true, + }).await; + } + } + }); + }); + action_group.add_action(&kill_action); + + let merge_action = gio::SimpleAction::new(&format!("merge.{}", wt_id), None); + let services_merge = services_ctx.clone(); + let wt_id_merge = wt_id.clone(); + merge_action.connect_activate(move |_, _| { + let client = services_merge.client.clone(); + let id = wt_id_merge.clone(); + let toast_tx = services_merge.toast_tx.clone(); + services_merge.runtime.spawn(async move { + let req = crate::api::client::MergeRequest { + strategy: Some("squash".to_string()), + cleanup: Some(true), + force: None, + }; + match client.read().unwrap().merge_worktree(&id, &req).await { + Ok(_) => { + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Merged worktree {}", id), + is_error: false, + }).await; + } + Err(e) => { + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Merge failed: {}", e), + is_error: true, + }).await; + } + } + }); + }); + action_group.add_action(&merge_action); + + row.insert_action_group("wt", Some(&action_group)); + + // Right-click gesture + let gesture = gtk::GestureClick::new(); + gesture.set_button(3); // Right mouse button + let popover_ref = popover.clone(); + gesture.connect_released(move |gesture, _, x, y| { + gesture.set_state(gtk::EventSequenceState::Claimed); + popover_ref.set_pointing_to(Some(>k4::gdk::Rectangle::new(x as i32, y as i32, 1, 1))); + popover_ref.popup(); + }); + hbox.add_controller(gesture); + + row +} + +fn create_agent_row(worktree_id: &str, agent: &AgentEntry, services: &Services) -> gtk::ListBoxRow { + let row = gtk::ListBoxRow::new(); + let hbox = gtk::Box::new(gtk::Orientation::Horizontal, 8); + hbox.set_margin_top(2); + hbox.set_margin_bottom(2); + hbox.set_margin_start(32); // Indented under worktree + hbox.set_margin_end(8); + + // Status dot (smaller) + let dot = gtk::Label::new(Some("\u{2022}")); // • + dot.add_css_class(agent.status.css_class()); + + let type_label = gtk::Label::new(Some(&agent.agent_type)); + type_label.add_css_class("caption"); + type_label.add_css_class("dim-label"); + + let name_label = gtk::Label::new(Some(&agent.name)); + name_label.set_halign(gtk::Align::Start); + name_label.set_hexpand(true); + name_label.set_ellipsize(pango::EllipsizeMode::End); + + hbox.append(&dot); + hbox.append(&type_label); + hbox.append(&name_label); + row.set_child(Some(&hbox)); + + // Store agent ID for selection + let agent_id = agent.id.clone(); + row.set_widget_name(&format!("ag:{}:{}", worktree_id, agent_id)); + + // -- Context menu (right-click) -- + let menu = gio::Menu::new(); + menu.append(Some("Kill Agent"), Some(&format!("ag.kill.{}", agent_id))); + menu.append(Some("Restart Agent"), Some(&format!("ag.restart.{}", agent_id))); + menu.append(Some("View Logs"), Some(&format!("ag.logs.{}", agent_id))); + + let popover = gtk::PopoverMenu::from_model(Some(&menu)); + popover.set_parent(&hbox); + popover.set_has_arrow(false); + + let action_group = gio::SimpleActionGroup::new(); + + // Kill agent + let kill_action = gio::SimpleAction::new(&format!("kill.{}", agent_id), None); + let services_kill = services.clone(); + let aid_kill = agent_id.clone(); + kill_action.connect_activate(move |_, _| { + let client = services_kill.client.clone(); + let id = aid_kill.clone(); + let toast_tx = services_kill.toast_tx.clone(); + services_kill.runtime.spawn(async move { + match client.read().unwrap().kill_agent(&id).await { + Ok(_) => { + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Killed agent {}", id), + is_error: false, + }).await; + } + Err(e) => { + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Kill failed: {}", e), + is_error: true, + }).await; + } + } + }); + }); + action_group.add_action(&kill_action); + + // Restart agent + let restart_action = gio::SimpleAction::new(&format!("restart.{}", agent_id), None); + let services_restart = services.clone(); + let aid_restart = agent_id.clone(); + restart_action.connect_activate(move |_, _| { + let client = services_restart.client.clone(); + let id = aid_restart.clone(); + let toast_tx = services_restart.toast_tx.clone(); + services_restart.runtime.spawn(async move { + let req = RestartRequest { + prompt: None, + agent: None, + }; + match client.read().unwrap().restart_agent(&id, &req).await { + Ok(_) => { + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Restarted agent {}", id), + is_error: false, + }).await; + } + Err(e) => { + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Restart failed: {}", e), + is_error: true, + }).await; + } + } + }); + }); + action_group.add_action(&restart_action); + + // View logs (fetch and show in toast for now — full log viewer is future work) + let logs_action = gio::SimpleAction::new(&format!("logs.{}", agent_id), None); + let services_logs = services.clone(); + let aid_logs = agent_id.clone(); + logs_action.connect_activate(move |_, _| { + let client = services_logs.client.clone(); + let id = aid_logs.clone(); + let toast_tx = services_logs.toast_tx.clone(); + services_logs.runtime.spawn(async move { + match client.read().unwrap().agent_logs(&id, Some(50)).await { + Ok(resp) => { + let line_count = resp.lines.len(); + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Fetched {} log lines for {}", line_count, id), + is_error: false, + }).await; + // Log the actual lines for now + for line in &resp.lines { + log::info!("[{}] {}", id, line); + } + } + Err(e) => { + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Logs failed: {}", e), + is_error: true, + }).await; + } + } + }); + }); + action_group.add_action(&logs_action); + + row.insert_action_group("ag", Some(&action_group)); + + // Right-click gesture + let gesture = gtk::GestureClick::new(); + gesture.set_button(3); + let popover_ref = popover.clone(); + gesture.connect_released(move |gesture, _, x, y| { + gesture.set_state(gtk::EventSequenceState::Claimed); + popover_ref.set_pointing_to(Some(>k4::gdk::Rectangle::new(x as i32, y as i32, 1, 1))); + popover_ref.popup(); + }); + hbox.add_controller(gesture); + + row +} + +fn row_to_selection(row: >k::ListBoxRow) -> SidebarSelection { + let name = row.widget_name(); + let name_str = name.as_str(); + + if name_str == "dashboard" { + SidebarSelection::Dashboard + } else if let Some(wt_id) = name_str.strip_prefix("wt:") { + SidebarSelection::Worktree(wt_id.to_string()) + } else if let Some(rest) = name_str.strip_prefix("ag:") { + let parts: Vec<&str> = rest.splitn(2, ':').collect(); + if parts.len() == 2 { + SidebarSelection::Agent(parts[0].to_string(), parts[1].to_string()) + } else { + SidebarSelection::Dashboard + } + } else { + SidebarSelection::Dashboard + } +} diff --git a/linux/src/ui/terminal_pane.rs b/linux/src/ui/terminal_pane.rs new file mode 100644 index 0000000..500fc7b --- /dev/null +++ b/linux/src/ui/terminal_pane.rs @@ -0,0 +1,73 @@ +use gtk4::prelude::*; +use gtk4::{self as gtk}; + +use crate::state::Services; +use crate::util::shell::tmux_attach_shell_command; + +/// A terminal pane that embeds a VTE terminal widget. +/// +/// Since vte4-rs may not be available as a crate, we use a fallback +/// placeholder. When VTE is available, the `create_vte_terminal` function +/// would return a real terminal widget. +#[derive(Clone)] +pub struct TerminalPane { + widget: gtk::Widget, + #[allow(dead_code)] + services: Services, +} + +impl TerminalPane { + pub fn new(services: Services) -> Self { + let widget = create_fallback_widget().upcast(); + + Self { widget, services } + } + + pub fn widget(&self) -> >k::Widget { + &self.widget + } + + /// Attach this terminal to a tmux session/window. + pub fn attach_to_tmux(&self, session_name: &str, window_target: &str) { + let _cmd = tmux_attach_shell_command(session_name, window_target); + // When VTE is available: + // spawn_in_terminal(&self.widget, &cmd); + } +} + +/// Fallback widget when VTE is not available. +fn create_fallback_widget() -> gtk::Box { + let container = gtk::Box::new(gtk::Orientation::Vertical, 8); + container.set_halign(gtk::Align::Center); + container.set_valign(gtk::Align::Center); + + let icon = gtk::Image::from_icon_name("utilities-terminal-symbolic"); + icon.set_pixel_size(48); + icon.add_css_class("dim-label"); + + let label = gtk::Label::new(Some("Terminal Pane")); + label.add_css_class("title-3"); + + let hint = gtk::Label::new(Some( + "VTE terminal widget will be embedded here.\n\ + Install libvte-2.91-gtk4-dev and rebuild with VTE support.", + )); + hint.add_css_class("dim-label"); + hint.set_justify(gtk::Justification::Center); + + let tmux_hint = gtk::Label::new(Some( + "The terminal connects to tmux sessions to show live agent output.\n\ + Use 'ppg attach ' in a regular terminal for now.", + )); + tmux_hint.add_css_class("caption"); + tmux_hint.add_css_class("dim-label"); + tmux_hint.set_margin_top(8); + tmux_hint.set_justify(gtk::Justification::Center); + + container.append(&icon); + container.append(&label); + container.append(&hint); + container.append(&tmux_hint); + + container +} diff --git a/linux/src/ui/window.rs b/linux/src/ui/window.rs new file mode 100644 index 0000000..a2ef1dc --- /dev/null +++ b/linux/src/ui/window.rs @@ -0,0 +1,364 @@ +use gtk4::prelude::*; +use gtk4::{self as gtk, gio}; +use libadwaita as adw; +use libadwaita::prelude::*; + +use crate::api::websocket::WsEvent; +use crate::state::{ConnectionState, Services, ToastMessage}; +use crate::ui::command_palette::CommandPalette; +use crate::ui::home_dashboard::HomeDashboard; +use crate::ui::pane_grid::PaneGrid; +use crate::ui::settings_dialog::SettingsDialog; +use crate::ui::setup_view::SetupView; +use crate::ui::sidebar::SidebarView; +use crate::ui::worktree_detail::WorktreeDetail; + +/// The main application window using NavigationSplitView. +pub struct MainWindow { + pub window: adw::ApplicationWindow, + sidebar: SidebarView, + stack: gtk::Stack, + home_dashboard: HomeDashboard, + pane_grid: PaneGrid, + worktree_detail: WorktreeDetail, + setup_view: SetupView, + status_label: gtk::Label, + toast_overlay: adw::ToastOverlay, + services: Services, +} + +impl MainWindow { + pub fn new(app: &adw::Application, services: Services) -> Self { + let window = adw::ApplicationWindow::builder() + .application(app) + .default_width(1280) + .default_height(800) + .title("PPG Desktop") + .build(); + + // -- Header bar -- + let header = adw::HeaderBar::new(); + + let status_label = gtk::Label::new(Some("Disconnected")); + status_label.add_css_class("status-gone"); + status_label.add_css_class("caption"); + header.pack_start(&status_label); + + let menu_button = gtk::MenuButton::builder() + .icon_name("open-menu-symbolic") + .build(); + + let menu = gio::Menu::new(); + menu.append(Some("Settings"), Some("app.settings")); + menu.append(Some("Reconnect"), Some("app.reconnect")); + menu.append(Some("About"), Some("app.about")); + menu_button.set_menu_model(Some(&menu)); + header.pack_end(&menu_button); + + // -- Content stack -- + let stack = gtk::Stack::new(); + stack.set_transition_type(gtk::StackTransitionType::Crossfade); + + let home_dashboard = HomeDashboard::new(services.clone()); + let pane_grid = PaneGrid::new(services.clone()); + let worktree_detail = WorktreeDetail::new(services.clone()); + let setup_view = SetupView::new(services.clone()); + + stack.add_named(&home_dashboard.widget(), Some("dashboard")); + stack.add_named(&pane_grid.widget(), Some("terminal")); + stack.add_named(&worktree_detail.widget(), Some("worktree")); + stack.add_named(&setup_view.widget(), Some("setup")); + + // -- Sidebar -- + let sidebar = SidebarView::new(services.clone()); + + // When sidebar selection changes, update the content stack + let stack_ref = stack.clone(); + let pane_grid_ref = pane_grid.clone(); + let worktree_detail_ref = worktree_detail.clone(); + sidebar.connect_selection_changed(move |selection| match selection { + SidebarSelection::Dashboard => { + stack_ref.set_visible_child_name("dashboard"); + } + SidebarSelection::Worktree(wt_id) => { + worktree_detail_ref.set_worktree(&wt_id); + stack_ref.set_visible_child_name("worktree"); + } + SidebarSelection::Agent(wt_id, agent_id) => { + pane_grid_ref.show_agent(&wt_id, &agent_id); + stack_ref.set_visible_child_name("terminal"); + } + }); + + // -- Navigation split view -- + let sidebar_page = adw::NavigationPage::builder() + .title("PPG") + .child(&sidebar.widget()) + .build(); + + // Wrap content in a toast overlay for notifications + let toast_overlay = adw::ToastOverlay::new(); + let content_box = gtk::Box::new(gtk::Orientation::Vertical, 0); + content_box.append(&header); + content_box.append(&stack); + stack.set_vexpand(true); + toast_overlay.set_child(Some(&content_box)); + + let content_page = adw::NavigationPage::builder() + .title("Dashboard") + .child(&toast_overlay) + .build(); + + let split_view = adw::NavigationSplitView::new(); + split_view.set_sidebar(&sidebar_page); + split_view.set_content(&content_page); + + window.set_content(Some(&split_view)); + + // -- Keyboard shortcut: Ctrl+Shift+P -> command palette -- + let palette_action = gio::SimpleAction::new("command-palette", None); + let services_cp = services.clone(); + let window_ref = window.clone(); + palette_action.connect_activate(move |_, _| { + let palette = CommandPalette::new(services_cp.clone()); + palette.present(&window_ref); + }); + window.add_action(&palette_action); + + // Ctrl+Shift+P keybinding + let shortcut_ctrl = gtk::ShortcutController::new(); + shortcut_ctrl.set_scope(gtk::ShortcutScope::Global); + let trigger = gtk::ShortcutTrigger::parse_string("p").unwrap(); + let action = gtk::ShortcutAction::parse_string("action(app.command-palette)").unwrap(); + let shortcut = gtk::Shortcut::new(Some(trigger), Some(action)); + shortcut_ctrl.add_shortcut(shortcut); + window.add_controller(shortcut_ctrl); + + // -- Settings action -- + let settings_action = gio::SimpleAction::new("settings", None); + let services_sa = services.clone(); + let window_ref2 = window.clone(); + settings_action.connect_activate(move |_, _| { + let dialog = SettingsDialog::new(services_sa.clone()); + dialog.present(&window_ref2); + }); + app.add_action(&settings_action); + + // -- Reconnect action (uses centralized reconnect_ws) -- + let reconnect_action = gio::SimpleAction::new("reconnect", None); + let services_ra = services.clone(); + reconnect_action.connect_activate(move |_, _| { + services_ra.state.set_connection_state(ConnectionState::Connecting); + services_ra.reconnect_ws(); + services_ra.toast("Reconnecting..."); + }); + app.add_action(&reconnect_action); + + Self { + window, + sidebar, + stack, + home_dashboard, + pane_grid, + worktree_detail, + setup_view, + status_label, + toast_overlay, + services, + } + } + + pub fn present(&self) { + self.window.present(); + } + + /// Check prerequisites and show setup view or connect immediately. + /// This should be called once after the window is created. + pub fn start(&self) { + use crate::util::shell::command_exists; + + // Wire the setup view's continue button to switch to dashboard and connect + let stack = self.stack.clone(); + let services_continue = self.services.clone(); + self.setup_view.connect_continue(move || { + stack.set_visible_child_name("dashboard"); + // Trigger connection when prerequisites are satisfied + services_continue.reconnect_ws(); + }); + + // Check if prerequisites are met + let ppg_ok = command_exists("ppg"); + let tmux_ok = command_exists("tmux"); + + if ppg_ok && tmux_ok { + // Prerequisites met — go straight to dashboard and connect + self.stack.set_visible_child_name("dashboard"); + self.connect(); + } else { + // Show setup view first + self.stack.set_visible_child_name("setup"); + // Still set up the event loops so they're ready when the user continues + self.setup_event_loops(); + } + } + + /// Set up event loops for WS and toast receivers. + /// Called once — either from connect() or from start() for deferred connect. + fn setup_event_loops(&self) { + let services = self.services.clone(); + let status_label = self.status_label.clone(); + let sidebar = self.sidebar.clone(); + let home = self.home_dashboard.clone(); + + // Take the persistent WS event receiver from Services. + if let Some(rx) = services.take_ws_rx() { + let services_rx = services.clone(); + let sidebar_rx = sidebar.clone(); + let home_rx = home.clone(); + let status_rx = status_label.clone(); + glib::spawn_future_local(async move { + while let Ok(event) = rx.recv().await { + match event { + WsEvent::Connected => { + services_rx + .state + .set_connection_state(ConnectionState::Connected); + update_status_ui( + &status_rx, + &services_rx.state.connection_state(), + ); + } + WsEvent::Disconnected => { + services_rx + .state + .set_connection_state(ConnectionState::Reconnecting); + update_status_ui( + &status_rx, + &services_rx.state.connection_state(), + ); + } + WsEvent::ManifestUpdated(manifest) => { + services_rx.state.set_manifest(manifest.clone()); + sidebar_rx.update_manifest(&manifest); + home_rx.update_manifest(&manifest); + } + WsEvent::AgentStatusChanged { + worktree_id, + agent_id, + status, + .. + } => { + sidebar_rx.update_agent_status( + &worktree_id, + &agent_id, + status, + ); + } + WsEvent::TerminalOutput { .. } => { + // Terminal output handled by subscribed panes + } + WsEvent::Error(msg) => { + services_rx + .state + .set_connection_state(ConnectionState::Error(msg)); + update_status_ui( + &status_rx, + &services_rx.state.connection_state(), + ); + } + } + } + }); + } + + // Drain toast messages and show them via the toast overlay. + if let Some(toast_rx) = services.take_toast_rx() { + let overlay = self.toast_overlay.clone(); + glib::spawn_future_local(async move { + while let Ok(msg) = toast_rx.recv().await { + let toast = adw::Toast::new(&msg.text); + if msg.is_error { + toast.set_timeout(5); + } else { + toast.set_timeout(3); + } + overlay.add_toast(toast); + } + }); + } + } + + /// Start WebSocket and initial data fetch. + /// Event loops must already be set up via setup_event_loops() or start(). + pub fn connect(&self) { + self.setup_event_loops(); + + self.services + .state + .set_connection_state(ConnectionState::Connecting); + self.update_status_label(); + + let services = self.services.clone(); + let status_label = self.status_label.clone(); + let sidebar = self.sidebar.clone(); + let home = self.home_dashboard.clone(); + + // Start WebSocket connection using centralized reconnect_ws. + // This sends events through the persistent ws_tx → ws_rx pipeline. + services.reconnect_ws(); + + // Initial status fetch via HTTP + let client = services.client.clone(); + let state = services.state.clone(); + let sidebar_init = sidebar.clone(); + let home_init = home.clone(); + let status_init = status_label.clone(); + let toast_tx = services.toast_tx.clone(); + services.runtime.spawn(async move { + match client.read().unwrap().status().await { + Ok(manifest) => { + let m = manifest.clone(); + glib::idle_add_once(move || { + state.set_manifest(m.clone()); + state.set_connection_state(ConnectionState::Connected); + sidebar_init.update_manifest(&m); + home_init.update_manifest(&m); + update_status_ui(&status_init, &ConnectionState::Connected); + }); + } + Err(e) => { + let msg = format!("{}", e); + let toast_msg = msg.clone(); + let _ = toast_tx.try_send(ToastMessage { + text: format!("Connection failed: {}", toast_msg), + is_error: true, + }); + glib::idle_add_once(move || { + update_status_ui(&status_init, &ConnectionState::Error(msg)); + }); + } + } + }); + } + + fn update_status_label(&self) { + let state = self.services.state.connection_state(); + update_status_ui(&self.status_label, &state); + } +} + +fn update_status_ui(label: >k::Label, state: &ConnectionState) { + label.set_text(state.label()); + for cls in &["status-running", "status-idle", "status-gone", "status-failed"] { + label.remove_css_class(cls); + } + label.add_css_class(state.css_class()); +} + +/// Sidebar selection types. +#[derive(Debug, Clone)] +pub enum SidebarSelection { + Dashboard, + Worktree(String), + Agent(String, String), +} diff --git a/linux/src/ui/worktree_detail.rs b/linux/src/ui/worktree_detail.rs new file mode 100644 index 0000000..f0a6d98 --- /dev/null +++ b/linux/src/ui/worktree_detail.rs @@ -0,0 +1,285 @@ +use gtk4::prelude::*; +use gtk4::{self as gtk}; + +use crate::api::client::MergeRequest; +use crate::state::Services; + +/// Detail panel for a selected worktree. +#[derive(Clone)] +pub struct WorktreeDetail { + container: gtk::Box, + name_label: gtk::Label, + status_label: gtk::Label, + branch_label: gtk::Label, + base_label: gtk::Label, + path_label: gtk::Label, + created_label: gtk::Label, + pr_url_label: gtk::Label, + agents_list: gtk::ListBox, + merge_button: gtk::Button, + kill_button: gtk::Button, + services: Services, + current_id: std::rc::Rc>>, +} + +impl WorktreeDetail { + pub fn new(services: Services) -> Self { + let container = gtk::Box::new(gtk::Orientation::Vertical, 16); + container.set_margin_top(24); + container.set_margin_bottom(24); + container.set_margin_start(24); + container.set_margin_end(24); + + // Header + let header_box = gtk::Box::new(gtk::Orientation::Horizontal, 12); + + let name_label = gtk::Label::new(Some("Worktree")); + name_label.add_css_class("title-1"); + name_label.set_halign(gtk::Align::Start); + name_label.set_hexpand(true); + + let status_label = gtk::Label::new(Some("Unknown")); + status_label.add_css_class("caption"); + + header_box.append(&name_label); + header_box.append(&status_label); + container.append(&header_box); + + // Info grid + let info_grid = gtk::Grid::new(); + info_grid.set_row_spacing(8); + info_grid.set_column_spacing(16); + + let branch_label = gtk::Label::new(Some("—")); + let base_label = gtk::Label::new(Some("—")); + let path_label = gtk::Label::new(Some("—")); + let created_label = gtk::Label::new(Some("—")); + let pr_url_label = gtk::Label::new(Some("—")); + + add_info_row(&info_grid, 0, "Branch", &branch_label); + add_info_row(&info_grid, 1, "Base Branch", &base_label); + add_info_row(&info_grid, 2, "Path", &path_label); + add_info_row(&info_grid, 3, "Created", &created_label); + add_info_row(&info_grid, 4, "PR URL", &pr_url_label); + + container.append(&info_grid); + + // Agents section + let agents_header = gtk::Label::new(Some("Agents")); + agents_header.add_css_class("title-4"); + agents_header.set_halign(gtk::Align::Start); + agents_header.set_margin_top(16); + container.append(&agents_header); + + let agents_scroll = gtk::ScrolledWindow::new(); + agents_scroll.set_vexpand(true); + agents_scroll.set_propagate_natural_height(true); + agents_scroll.set_max_content_height(300); + + let agents_list = gtk::ListBox::new(); + agents_list.set_selection_mode(gtk::SelectionMode::None); + agents_list.add_css_class("boxed-list"); + agents_scroll.set_child(Some(&agents_list)); + container.append(&agents_scroll); + + // Action buttons + let button_box = gtk::Box::new(gtk::Orientation::Horizontal, 8); + button_box.set_halign(gtk::Align::End); + button_box.set_margin_top(16); + + let kill_button = gtk::Button::with_label("Kill All Agents"); + kill_button.add_css_class("destructive-action"); + + let merge_button = gtk::Button::with_label("Merge"); + merge_button.add_css_class("suggested-action"); + + button_box.append(&kill_button); + button_box.append(&merge_button); + container.append(&button_box); + + let current_id: std::rc::Rc>> = + std::rc::Rc::new(std::cell::RefCell::new(None)); + + // Kill button action + let services_kill = services.clone(); + let id_kill = current_id.clone(); + kill_button.connect_clicked(move |_| { + if let Some(ref wt_id) = *id_kill.borrow() { + let client = services_kill.client.clone(); + let id = wt_id.clone(); + let toast_tx = services_kill.toast_tx.clone(); + services_kill.runtime.spawn(async move { + match client.read().unwrap().kill_worktree(&id).await { + Ok(_) => { + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Killed worktree {}", id), + is_error: false, + }).await; + } + Err(e) => { + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Kill failed: {}", e), + is_error: true, + }).await; + } + } + }); + } + }); + + // Merge button action + let services_merge = services.clone(); + let id_merge = current_id.clone(); + merge_button.connect_clicked(move |_| { + if let Some(ref wt_id) = *id_merge.borrow() { + let client = services_merge.client.clone(); + let id = wt_id.clone(); + let toast_tx = services_merge.toast_tx.clone(); + services_merge.runtime.spawn(async move { + let req = MergeRequest { + strategy: Some("squash".to_string()), + cleanup: Some(true), + force: None, + }; + match client.read().unwrap().merge_worktree(&id, &req).await { + Ok(_) => { + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Merged worktree {}", id), + is_error: false, + }).await; + } + Err(e) => { + let _ = toast_tx.send(crate::state::ToastMessage { + text: format!("Merge failed: {}", e), + is_error: true, + }).await; + } + } + }); + } + }); + + Self { + container, + name_label, + status_label, + branch_label, + base_label, + path_label, + created_label, + pr_url_label, + agents_list, + merge_button, + kill_button, + services, + current_id, + } + } + + pub fn widget(&self) -> >k::Box { + &self.container + } + + pub fn set_worktree(&self, worktree_id: &str) { + *self.current_id.borrow_mut() = Some(worktree_id.to_string()); + + let manifest = match self.services.state.manifest() { + Some(m) => m, + None => return, + }; + + let wt = match manifest.worktrees.get(worktree_id) { + Some(wt) => wt, + None => return, + }; + + self.name_label.set_text(&wt.name); + + // Update status with styling + self.status_label.set_text(wt.status.label()); + for cls in &[ + "status-running", + "status-idle", + "status-exited", + "status-gone", + "status-failed", + ] { + self.status_label.remove_css_class(cls); + } + self.status_label.add_css_class(wt.status.css_class()); + + self.branch_label.set_text(&wt.branch); + self.base_label.set_text(&wt.base_branch); + self.path_label.set_text(&wt.path); + self.created_label.set_text(&wt.created_at); + self.pr_url_label.set_text( + wt.pr_url.as_deref().unwrap_or("—"), + ); + + // Rebuild agents list + while let Some(row) = self.agents_list.row_at_index(0) { + self.agents_list.remove(&row); + } + + let mut agents: Vec<_> = wt.agents.values().collect(); + agents.sort_by(|a, b| a.started_at.cmp(&b.started_at)); + + for agent in agents { + let row = create_agent_detail_row(agent); + self.agents_list.append(&row); + } + } +} + +fn add_info_row(grid: >k::Grid, row: i32, label_text: &str, value: >k::Label) { + let label = gtk::Label::new(Some(label_text)); + label.add_css_class("dim-label"); + label.set_halign(gtk::Align::Start); + + value.set_halign(gtk::Align::Start); + value.set_selectable(true); + value.set_hexpand(true); + + grid.attach(&label, 0, row, 1, 1); + grid.attach(value, 1, row, 1, 1); +} + +fn create_agent_detail_row( + agent: &crate::models::manifest::AgentEntry, +) -> gtk::ListBoxRow { + let row = gtk::ListBoxRow::new(); + let hbox = gtk::Box::new(gtk::Orientation::Horizontal, 8); + hbox.set_margin_top(8); + hbox.set_margin_bottom(8); + hbox.set_margin_start(12); + hbox.set_margin_end(12); + + let dot = gtk::Label::new(Some("\u{25CF}")); + dot.add_css_class(agent.status.css_class()); + + let vbox = gtk::Box::new(gtk::Orientation::Vertical, 2); + vbox.set_hexpand(true); + + let name_label = gtk::Label::new(Some(&agent.name)); + name_label.set_halign(gtk::Align::Start); + name_label.add_css_class("heading"); + + let info_label = gtk::Label::new(Some(&format!( + "{} — {} — {}", + agent.agent_type, + agent.status.label(), + agent.id + ))); + info_label.set_halign(gtk::Align::Start); + info_label.add_css_class("caption"); + info_label.add_css_class("dim-label"); + + vbox.append(&name_label); + vbox.append(&info_label); + + hbox.append(&dot); + hbox.append(&vbox); + row.set_child(Some(&hbox)); + + row +} diff --git a/linux/src/util/mod.rs b/linux/src/util/mod.rs new file mode 100644 index 0000000..327cf1b --- /dev/null +++ b/linux/src/util/mod.rs @@ -0,0 +1 @@ +pub mod shell; diff --git a/linux/src/util/shell.rs b/linux/src/util/shell.rs new file mode 100644 index 0000000..efcc3bb --- /dev/null +++ b/linux/src/util/shell.rs @@ -0,0 +1,92 @@ +/// Shell-escape a string for safe use in tmux send-keys or shell commands. +pub fn shell_escape(s: &str) -> String { + if s.is_empty() { + return "''".to_string(); + } + if s.chars() + .all(|c| c.is_alphanumeric() || c == '-' || c == '_' || c == '.' || c == '/' || c == ':') + { + return s.to_string(); + } + format!("'{}'", s.replace('\'', "'\\''")) +} + +/// Build a tmux attach command that connects to a specific session/window. +pub fn tmux_attach_command(session_name: &str, window_target: &str) -> Vec { + vec![ + "tmux".to_string(), + "new-session".to_string(), + "-t".to_string(), + session_name.to_string(), + "-s".to_string(), + format!("{}-view-{}", session_name, window_target), + ";".to_string(), + "set-option".to_string(), + "destroy-unattached".to_string(), + "on".to_string(), + ";".to_string(), + "set-option".to_string(), + "status".to_string(), + "off".to_string(), + ";".to_string(), + "set-option".to_string(), + "mouse".to_string(), + "on".to_string(), + ";".to_string(), + "select-window".to_string(), + "-t".to_string(), + format!(":{}", window_target), + ] +} + +/// Build a tmux attach command as a single shell string (for VTE spawn). +pub fn tmux_attach_shell_command(session_name: &str, window_target: &str) -> String { + format!( + "tmux new-session -t {} -s {}-view-{} \ + \\; set-option destroy-unattached on \ + \\; set-option status off \ + \\; set-option mouse on \ + \\; select-window -t :{}", + shell_escape(session_name), + shell_escape(session_name), + shell_escape(window_target), + shell_escape(window_target), + ) +} + +/// Check if a command is available in PATH. +pub fn command_exists(cmd: &str) -> bool { + std::process::Command::new("which") + .arg(cmd) + .output() + .map(|o| o.status.success()) + .unwrap_or(false) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_shell_escape_simple() { + assert_eq!(shell_escape("hello"), "hello"); + assert_eq!(shell_escape("path/to/file"), "path/to/file"); + } + + #[test] + fn test_shell_escape_special() { + assert_eq!(shell_escape("hello world"), "'hello world'"); + assert_eq!(shell_escape("it's"), "'it'\\''s'"); + } + + #[test] + fn test_shell_escape_empty() { + assert_eq!(shell_escape(""), "''"); + } + + #[test] + fn test_command_exists() { + assert!(command_exists("sh")); + assert!(!command_exists("nonexistent_binary_xyz")); + } +} diff --git a/package-lock.json b/package-lock.json index a036a8f..5866ff1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,20 +9,26 @@ "version": "0.3.3", "license": "MIT", "dependencies": { + "@fastify/cors": "^11.2.0", "commander": "^14.0.0", "cron-parser": "^5.5.0", "execa": "^9.5.2", + "fastify": "^5.7.4", "nanoid": "^5.1.5", "proper-lockfile": "^4.1.2", + "qrcode-terminal": "^0.12.0", "write-file-atomic": "^7.0.0", + "ws": "^8.19.0", "yaml": "^2.7.1" }, "bin": { "ppg": "dist/cli.js" }, "devDependencies": { - "@types/node": "^22.13.4", + "@types/node": "^22.19.13", "@types/proper-lockfile": "^4.1.4", + "@types/qrcode-terminal": "^0.12.2", + "@types/ws": "^8.18.1", "tsup": "^8.4.0", "tsx": "^4.19.3", "typescript": "^5.7.3", @@ -32,827 +38,203 @@ "node": ">=20" } }, - "node_modules/@esbuild/aix-ppc64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz", - "integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz", - "integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz", - "integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz", - "integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz", - "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz", - "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz", - "integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz", - "integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz", - "integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz", - "integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ia32": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz", - "integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-loong64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz", - "integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-mips64el": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz", - "integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==", - "cpu": [ - "mips64el" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ppc64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz", - "integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-riscv64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz", - "integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-s390x": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz", - "integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz", - "integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz", - "integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz", - "integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz", - "integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz", - "integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openharmony-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz", - "integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openharmony" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/sunos-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz", - "integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-arm64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz", - "integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-ia32": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz", - "integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-x64": { - "version": "0.27.3", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz", - "integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.13", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", - "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0", - "@jridgewell/trace-mapping": "^0.3.24" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", - "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", - "dev": true, - "license": "MIT" - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.31", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", - "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.58.0.tgz", - "integrity": "sha512-mr0tmS/4FoVk1cnaeN244A/wjvGDNItZKR8hRhnmCzygyRXYtKF5jVDSIILR1U97CTzAYmbgIj/Dukg62ggG5w==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-android-arm64": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.58.0.tgz", - "integrity": "sha512-+s++dbp+/RTte62mQD9wLSbiMTV+xr/PeRJEc/sFZFSBRlHPNPVaf5FXlzAL77Mr8FtSfQqCN+I598M8U41ccQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.58.0.tgz", - "integrity": "sha512-MFWBwTcYs0jZbINQBXHfSrpSQJq3IUOakcKPzfeSznONop14Pxuqa0Kg19GD0rNBMPQI2tFtu3UzapZpH0Uc1Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.58.0.tgz", - "integrity": "sha512-yiKJY7pj9c9JwzuKYLFaDZw5gma3fI9bkPEIyofvVfsPqjCWPglSHdpdwXpKGvDeYDms3Qal8qGMEHZ1M/4Udg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.58.0.tgz", - "integrity": "sha512-x97kCoBh5MOevpn/CNK9W1x8BEzO238541BGWBc315uOlN0AD/ifZ1msg+ZQB05Ux+VF6EcYqpiagfLJ8U3LvQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.58.0.tgz", - "integrity": "sha512-Aa8jPoZ6IQAG2eIrcXPpjRcMjROMFxCt1UYPZZtCxRV68WkuSigYtQ/7Zwrcr2IvtNJo7T2JfDXyMLxq5L4Jlg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.58.0.tgz", - "integrity": "sha512-Ob8YgT5kD/lSIYW2Rcngs5kNB/44Q2RzBSPz9brf2WEtcGR7/f/E9HeHn1wYaAwKBni+bdXEwgHvUd0x12lQSA==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.58.0.tgz", - "integrity": "sha512-K+RI5oP1ceqoadvNt1FecL17Qtw/n9BgRSzxif3rTL2QlIu88ccvY+Y9nnHe/cmT5zbH9+bpiJuG1mGHRVwF4Q==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.58.0.tgz", - "integrity": "sha512-T+17JAsCKUjmbopcKepJjHWHXSjeW7O5PL7lEFaeQmiVyw4kkc5/lyYKzrv6ElWRX/MrEWfPiJWqbTvfIvjM1Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.58.0.tgz", - "integrity": "sha512-cCePktb9+6R9itIJdeCFF9txPU7pQeEHB5AbHu/MKsfH/k70ZtOeq1k4YAtBv9Z7mmKI5/wOLYjQ+B9QdxR6LA==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-loong64-gnu": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.58.0.tgz", - "integrity": "sha512-iekUaLkfliAsDl4/xSdoCJ1gnnIXvoNz85C8U8+ZxknM5pBStfZjeXgB8lXobDQvvPRCN8FPmmuTtH+z95HTmg==", + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz", + "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==", "cpu": [ - "loong64" + "x64" ], "dev": true, "license": "MIT", "optional": true, "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-loong64-musl": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.58.0.tgz", - "integrity": "sha512-68ofRgJNl/jYJbxFjCKE7IwhbfxOl1muPN4KbIqAIe32lm22KmU7E8OPvyy68HTNkI2iV/c8y2kSPSm2mW/Q9Q==", - "cpu": [ - "loong64" + "darwin" ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "engines": { + "node": ">=18" + } }, - "node_modules/@rollup/rollup-linux-ppc64-gnu": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.58.0.tgz", - "integrity": "sha512-dpz8vT0i+JqUKuSNPCP5SYyIV2Lh0sNL1+FhM7eLC457d5B9/BC3kDPp5BBftMmTNsBarcPcoz5UGSsnCiw4XQ==", - "cpu": [ - "ppc64" + "node_modules/@fastify/ajv-compiler": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@fastify/ajv-compiler/-/ajv-compiler-4.0.5.tgz", + "integrity": "sha512-KoWKW+MhvfTRWL4qrhUwAAZoaChluo0m0vbiJlGMt2GXvL4LVPQEjt8kSpHI3IBq5Rez8fg+XeH3cneztq+C7A==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } ], - "dev": true, "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "dependencies": { + "ajv": "^8.12.0", + "ajv-formats": "^3.0.1", + "fast-uri": "^3.0.0" + } }, - "node_modules/@rollup/rollup-linux-ppc64-musl": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.58.0.tgz", - "integrity": "sha512-4gdkkf9UJ7tafnweBCR/mk4jf3Jfl0cKX9Np80t5i78kjIH0ZdezUv/JDI2VtruE5lunfACqftJ8dIMGN4oHew==", - "cpu": [ - "ppc64" + "node_modules/@fastify/cors": { + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/@fastify/cors/-/cors-11.2.0.tgz", + "integrity": "sha512-LbLHBuSAdGdSFZYTLVA3+Ch2t+sA6nq3Ejc6XLAKiQ6ViS2qFnvicpj0htsx03FyYeLs04HfRNBsz/a8SvbcUw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } ], - "dev": true, "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "dependencies": { + "fastify-plugin": "^5.0.0", + "toad-cache": "^3.7.0" + } }, - "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.58.0.tgz", - "integrity": "sha512-YFS4vPnOkDTD/JriUeeZurFYoJhPf9GQQEF/v4lltp3mVcBmnsAdjEWhr2cjUCZzZNzxCG0HZOvJU44UGHSdzw==", - "cpu": [ - "riscv64" + "node_modules/@fastify/error": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@fastify/error/-/error-4.2.0.tgz", + "integrity": "sha512-RSo3sVDXfHskiBZKBPRgnQTtIqpi/7zhJOEmAxCiBcM7d0uwdGdxLlsCaLzGs8v8NnxIRlfG0N51p5yFaOentQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "license": "MIT" }, - "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.58.0.tgz", - "integrity": "sha512-x2xgZlFne+QVNKV8b4wwaCS8pwq3y14zedZ5DqLzjdRITvreBk//4Knbcvm7+lWmms9V9qFp60MtUd0/t/PXPw==", - "cpu": [ - "riscv64" + "node_modules/@fastify/fast-json-stringify-compiler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@fastify/fast-json-stringify-compiler/-/fast-json-stringify-compiler-5.0.3.tgz", + "integrity": "sha512-uik7yYHkLr6fxd8hJSZ8c+xF4WafPK+XzneQDPU+D10r5X19GW8lJcom2YijX2+qtFF1ENJlHXKFM9ouXNJYgQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } ], - "dev": true, "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "dependencies": { + "fast-json-stringify": "^6.0.0" + } }, - "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.58.0.tgz", - "integrity": "sha512-jIhrujyn4UnWF8S+DHSkAkDEO3hLX0cjzxJZPLF80xFyzyUIYgSMRcYQ3+uqEoyDD2beGq7Dj7edi8OnJcS/hg==", - "cpu": [ - "s390x" + "node_modules/@fastify/forwarded": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@fastify/forwarded/-/forwarded-3.0.1.tgz", + "integrity": "sha512-JqDochHFqXs3C3Ml3gOY58zM7OqO9ENqPo0UqAjAjH8L01fRZqwX9iLeX34//kiJubF7r2ZQHtBRU36vONbLlw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "license": "MIT" }, - "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.58.0.tgz", - "integrity": "sha512-+410Srdoh78MKSJxTQ+hZ/Mx+ajd6RjjPwBPNd0R3J9FtL6ZA0GqiiyNjCO9In0IzZkCNrpGymSfn+kgyPQocg==", - "cpu": [ - "x64" + "node_modules/@fastify/merge-json-schemas": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@fastify/merge-json-schemas/-/merge-json-schemas-0.2.1.tgz", + "integrity": "sha512-OA3KGBCy6KtIvLf8DINC5880o5iBlDX4SxzLQS8HorJAbqluzLRn80UXU0bxZn7UOFhFgpRJDasfwn9nG4FG4A==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } ], - "dev": true, "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "dependencies": { + "dequal": "^2.0.3" + } }, - "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.58.0.tgz", - "integrity": "sha512-ZjMyby5SICi227y1MTR3VYBpFTdZs823Rs/hpakufleBoufoOIB6jtm9FEoxn/cgO7l6PM2rCEl5Kre5vX0QrQ==", - "cpu": [ - "x64" + "node_modules/@fastify/proxy-addr": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@fastify/proxy-addr/-/proxy-addr-5.1.0.tgz", + "integrity": "sha512-INS+6gh91cLUjB+PVHfu1UqcB76Sqtpyp7bnL+FYojhjygvOPA9ctiD/JDKsyD9Xgu4hUhCSJBPig/w7duNajw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } ], - "dev": true, "license": "MIT", - "optional": true, - "os": [ - "linux" - ] + "dependencies": { + "@fastify/forwarded": "^3.0.0", + "ipaddr.js": "^2.1.0" + } }, - "node_modules/@rollup/rollup-openbsd-x64": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.58.0.tgz", - "integrity": "sha512-ds4iwfYkSQ0k1nb8LTcyXw//ToHOnNTJtceySpL3fa7tc/AsE+UpUFphW126A6fKBGJD5dhRvg8zw1rvoGFxmw==", - "cpu": [ - "x64" - ], + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ] + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } }, - "node_modules/@rollup/rollup-openharmony-arm64": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.58.0.tgz", - "integrity": "sha512-fd/zpJniln4ICdPkjWFhZYeY/bpnaN9pGa6ko+5WD38I0tTqk9lXMgXZg09MNdhpARngmxiCg0B0XUamNw/5BQ==", - "cpu": [ - "arm64" - ], + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "openharmony" - ] + "engines": { + "node": ">=6.0.0" + } }, - "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.58.0.tgz", - "integrity": "sha512-YpG8dUOip7DCz3nr/JUfPbIUo+2d/dy++5bFzgi4ugOGBIox+qMbbqt/JoORwvI/C9Kn2tz6+Bieoqd5+B1CjA==", - "cpu": [ - "arm64" - ], + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] + "license": "MIT" }, - "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.58.0.tgz", - "integrity": "sha512-b9DI8jpFQVh4hIXFr0/+N/TzLdpBIoPzjt0Rt4xJbW3mzguV3mduR9cNgiuFcuL/TeORejJhCWiAXe3E/6PxWA==", - "cpu": [ - "ia32" - ], + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", "dev": true, "license": "MIT", - "optional": true, - "os": [ - "win32" - ] + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } }, - "node_modules/@rollup/rollup-win32-x64-gnu": { - "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.58.0.tgz", - "integrity": "sha512-CSrVpmoRJFN06LL9xhkitkwUcTZtIotYAF5p6XOR2zW0Zz5mzb3IPpcoPhB02frzMHFNo1reQ9xSF5fFm3hUsQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] + "node_modules/@pinojs/redact": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@pinojs/redact/-/redact-0.4.0.tgz", + "integrity": "sha512-k2ENnmBugE/rzQfEcdWHcCY+/FM3VLzH9cYEsbdsoqrvzAKRhUZeRNhAZvB8OitQJ1TBed3yqWtdjzS6wJKBwg==", + "license": "MIT" }, - "node_modules/@rollup/rollup-win32-x64-msvc": { + "node_modules/@rollup/rollup-darwin-x64": { "version": "4.58.0", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.58.0.tgz", - "integrity": "sha512-QFsBgQNTnh5K0t/sBsjJLq24YVqEIVkGpfN2VHsnN90soZyhaiA9UUHufcctVNL4ypJY0wrwad0wslx2KJQ1/w==", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.58.0.tgz", + "integrity": "sha512-yiKJY7pj9c9JwzuKYLFaDZw5gma3fI9bkPEIyofvVfsPqjCWPglSHdpdwXpKGvDeYDms3Qal8qGMEHZ1M/4Udg==", "cpu": [ "x64" ], @@ -860,7 +242,7 @@ "license": "MIT", "optional": true, "os": [ - "win32" + "darwin" ] }, "node_modules/@sec-ant/readable-stream": { @@ -907,9 +289,9 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "22.19.11", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.11.tgz", - "integrity": "sha512-BH7YwL6rA93ReqeQS1c4bsPpcfOmJasG+Fkr6Y59q83f9M1WcBRHR2vM+P9eOisYRcN3ujQoiZY8uk5W+1WL8w==", + "version": "22.19.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.13.tgz", + "integrity": "sha512-akNQMv0wW5uyRpD2v2IEyRSZiR+BeGuoB6L310EgGObO44HSMNT8z1xzio28V8qOrgYaopIDNA18YgdXd+qTiw==", "dev": true, "license": "MIT", "dependencies": { @@ -926,6 +308,13 @@ "@types/retry": "*" } }, + "node_modules/@types/qrcode-terminal": { + "version": "0.12.2", + "resolved": "https://registry.npmjs.org/@types/qrcode-terminal/-/qrcode-terminal-0.12.2.tgz", + "integrity": "sha512-v+RcIEJ+Uhd6ygSQ0u5YYY7ZM+la7GgPbs0V/7l/kFs2uO4S8BcIUEMoP7za4DNIqNnUD5npf0A/7kBhrCKG5Q==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/retry": { "version": "0.12.5", "resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.5.tgz", @@ -933,6 +322,16 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/ws": { + "version": "8.18.1", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz", + "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@vitest/expect": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", @@ -1048,6 +447,12 @@ "url": "https://opencollective.com/vitest" } }, + "node_modules/abstract-logging": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/abstract-logging/-/abstract-logging-2.0.1.tgz", + "integrity": "sha512-2BjRTZxTPvheOvGbBslFSYOUkr+SjPtOnrLP33f+VIWLzezQpZcqVg7ja3L4dBXmzzgwT+a029jRx5PCi3JuiA==", + "license": "MIT" + }, "node_modules/acorn": { "version": "8.16.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz", @@ -1061,6 +466,39 @@ "node": ">=0.4.0" } }, + "node_modules/ajv": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.18.0.tgz", + "integrity": "sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-formats": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz", + "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==", + "license": "MIT", + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, "node_modules/any-promise": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", @@ -1074,8 +512,37 @@ "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", "dev": true, "license": "MIT", - "engines": { - "node": ">=12" + "engines": { + "node": ">=12" + } + }, + "node_modules/atomic-sleep": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz", + "integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==", + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/avvio": { + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/avvio/-/avvio-9.2.0.tgz", + "integrity": "sha512-2t/sy01ArdHHE0vRH5Hsay+RtCZt3dLPji7W7/MMOCEgze5b7SNDC4j5H6FnVgPkI1MTNFGzHdHrVXDDl7QSSQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "@fastify/error": "^4.0.0", + "fastq": "^1.17.1" } }, "node_modules/bundle-require": { @@ -1173,6 +640,19 @@ "node": "^14.18.0 || >=16.10.0" } }, + "node_modules/cookie": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz", + "integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, "node_modules/cron-parser": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-5.5.0.tgz", @@ -1227,6 +707,15 @@ "node": ">=6" } }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/es-module-lexer": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", @@ -1322,6 +811,125 @@ "node": ">=12.0.0" } }, + "node_modules/fast-decode-uri-component": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/fast-decode-uri-component/-/fast-decode-uri-component-1.0.1.tgz", + "integrity": "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg==", + "license": "MIT" + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-json-stringify": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/fast-json-stringify/-/fast-json-stringify-6.3.0.tgz", + "integrity": "sha512-oRCntNDY/329HJPlmdNLIdogNtt6Vyjb1WuT01Soss3slIdyUp8kAcDU3saQTOquEK8KFVfwIIF7FebxUAu+yA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "@fastify/merge-json-schemas": "^0.2.0", + "ajv": "^8.12.0", + "ajv-formats": "^3.0.1", + "fast-uri": "^3.0.0", + "json-schema-ref-resolver": "^3.0.0", + "rfdc": "^1.2.0" + } + }, + "node_modules/fast-querystring": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/fast-querystring/-/fast-querystring-1.1.2.tgz", + "integrity": "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg==", + "license": "MIT", + "dependencies": { + "fast-decode-uri-component": "^1.0.1" + } + }, + "node_modules/fast-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", + "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/fastify": { + "version": "5.7.4", + "resolved": "https://registry.npmjs.org/fastify/-/fastify-5.7.4.tgz", + "integrity": "sha512-e6l5NsRdaEP8rdD8VR0ErJASeyaRbzXYpmkrpr2SuvuMq6Si3lvsaVy5C+7gLanEkvjpMDzBXWE5HPeb/hgTxA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "@fastify/ajv-compiler": "^4.0.5", + "@fastify/error": "^4.0.0", + "@fastify/fast-json-stringify-compiler": "^5.0.0", + "@fastify/proxy-addr": "^5.0.0", + "abstract-logging": "^2.0.1", + "avvio": "^9.0.0", + "fast-json-stringify": "^6.0.0", + "find-my-way": "^9.0.0", + "light-my-request": "^6.0.0", + "pino": "^10.1.0", + "process-warning": "^5.0.0", + "rfdc": "^1.3.1", + "secure-json-parse": "^4.0.0", + "semver": "^7.6.0", + "toad-cache": "^3.7.0" + } + }, + "node_modules/fastify-plugin": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/fastify-plugin/-/fastify-plugin-5.1.0.tgz", + "integrity": "sha512-FAIDA8eovSt5qcDgcBvDuX/v0Cjz0ohGhENZ/wpc3y+oZCY2afZ9Baqql3g/lC+OHRnciQol4ww7tuthOb9idw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, + "node_modules/fastq": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", + "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==", + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, "node_modules/fdir": { "version": "6.5.0", "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", @@ -1355,6 +963,20 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/find-my-way": { + "version": "9.5.0", + "resolved": "https://registry.npmjs.org/find-my-way/-/find-my-way-9.5.0.tgz", + "integrity": "sha512-VW2RfnmscZO5KgBY5XVyKREMW5nMZcxDy+buTOsL+zIPnBlbKm+00sgzoQzq1EVh4aALZLfKdwv6atBGcjvjrQ==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-querystring": "^1.0.0", + "safe-regex2": "^5.0.0" + }, + "engines": { + "node": ">=20" + } + }, "node_modules/fix-dts-default-cjs-exports": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/fix-dts-default-cjs-exports/-/fix-dts-default-cjs-exports-1.0.1.tgz", @@ -1435,6 +1057,15 @@ "node": ">=0.8.19" } }, + "node_modules/ipaddr.js": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.3.0.tgz", + "integrity": "sha512-Zv/pA+ciVFbCSBBjGfaKUya/CcGmUHzTydLMaTwrUUEM2DIEO3iZvueGxmacvmN50fGpGVKeTXpb2LcYQxeVdg==", + "license": "MIT", + "engines": { + "node": ">= 10" + } + }, "node_modules/is-plain-obj": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", @@ -1494,6 +1125,68 @@ "dev": true, "license": "MIT" }, + "node_modules/json-schema-ref-resolver": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/json-schema-ref-resolver/-/json-schema-ref-resolver-3.0.0.tgz", + "integrity": "sha512-hOrZIVL5jyYFjzk7+y7n5JDzGlU8rfWDuYyHwGa2WA8/pcmMHezp2xsVwxrebD/Q9t8Nc5DboieySDpCp4WG4A==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "dequal": "^2.0.3" + } + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "license": "MIT" + }, + "node_modules/light-my-request": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/light-my-request/-/light-my-request-6.6.0.tgz", + "integrity": "sha512-CHYbu8RtboSIoVsHZ6Ye4cj4Aw/yg2oAFimlF7mNvfDV192LR7nDiKtSIfCuLT7KokPSTn/9kfVLm5OGN0A28A==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause", + "dependencies": { + "cookie": "^1.0.1", + "process-warning": "^4.0.0", + "set-cookie-parser": "^2.6.0" + } + }, + "node_modules/light-my-request/node_modules/process-warning": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-4.0.1.tgz", + "integrity": "sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, "node_modules/lilconfig": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", @@ -1638,6 +1331,15 @@ "node": ">=0.10.0" } }, + "node_modules/on-exit-leak-free": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz", + "integrity": "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/parse-ms": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-4.0.0.tgz", @@ -1696,6 +1398,43 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/pino": { + "version": "10.3.1", + "resolved": "https://registry.npmjs.org/pino/-/pino-10.3.1.tgz", + "integrity": "sha512-r34yH/GlQpKZbU1BvFFqOjhISRo1MNx1tWYsYvmj6KIRHSPMT2+yHOEb1SG6NMvRoHRF0a07kCOox/9yakl1vg==", + "license": "MIT", + "dependencies": { + "@pinojs/redact": "^0.4.0", + "atomic-sleep": "^1.0.0", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^3.0.0", + "pino-std-serializers": "^7.0.0", + "process-warning": "^5.0.0", + "quick-format-unescaped": "^4.0.3", + "real-require": "^0.2.0", + "safe-stable-stringify": "^2.3.1", + "sonic-boom": "^4.0.1", + "thread-stream": "^4.0.0" + }, + "bin": { + "pino": "bin.js" + } + }, + "node_modules/pino-abstract-transport": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-3.0.0.tgz", + "integrity": "sha512-wlfUczU+n7Hy/Ha5j9a/gZNy7We5+cXp8YL+X+PG8S0KXxw7n/JXA3c46Y0zQznIJ83URJiwy7Lh56WLokNuxg==", + "license": "MIT", + "dependencies": { + "split2": "^4.0.0" + } + }, + "node_modules/pino-std-serializers": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-7.1.0.tgz", + "integrity": "sha512-BndPH67/JxGExRgiX1dX0w1FvZck5Wa4aal9198SrRhZjH3GxKQUKIBnYJTdj2HDN3UQAS06HlfcSbQj2OHmaw==", + "license": "MIT" + }, "node_modules/pirates": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", @@ -1824,6 +1563,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/process-warning": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-5.0.0.tgz", + "integrity": "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, "node_modules/proper-lockfile": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/proper-lockfile/-/proper-lockfile-4.1.2.tgz", @@ -1841,6 +1596,20 @@ "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", "license": "ISC" }, + "node_modules/qrcode-terminal": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/qrcode-terminal/-/qrcode-terminal-0.12.0.tgz", + "integrity": "sha512-EXtzRZmC+YGmGlDFbXKxQiMZNwCLEO6BANKXG4iCtSIM0yqc/pappSx3RIKr4r0uh5JsBckOXeKrB3Iz7mdQpQ==", + "bin": { + "qrcode-terminal": "bin/qrcode-terminal.js" + } + }, + "node_modules/quick-format-unescaped": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz", + "integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==", + "license": "MIT" + }, "node_modules/readdirp": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", @@ -1855,6 +1624,24 @@ "url": "https://paulmillr.com/funding/" } }, + "node_modules/real-require": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz", + "integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==", + "license": "MIT", + "engines": { + "node": ">= 12.13.0" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/resolve-from": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", @@ -1875,6 +1662,15 @@ "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" } }, + "node_modules/ret": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/ret/-/ret-0.5.0.tgz", + "integrity": "sha512-I1XxrZSQ+oErkRR4jYbAyEEu2I0avBvvMM5JN+6EBprOGRCs63ENqZ3vjavq8fBw2+62G5LF5XelKwuJpcvcxw==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, "node_modules/retry": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", @@ -1884,6 +1680,22 @@ "node": ">= 4" } }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rfdc": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", + "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", + "license": "MIT" + }, "node_modules/rollup": { "version": "4.58.0", "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.58.0.tgz", @@ -1929,6 +1741,68 @@ "fsevents": "~2.3.2" } }, + "node_modules/safe-regex2": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/safe-regex2/-/safe-regex2-5.0.0.tgz", + "integrity": "sha512-YwJwe5a51WlK7KbOJREPdjNrpViQBI3p4T50lfwPuDhZnE3XGVTlGvi+aolc5+RvxDD6bnUmjVsU9n1eboLUYw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT", + "dependencies": { + "ret": "~0.5.0" + } + }, + "node_modules/safe-stable-stringify": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz", + "integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/secure-json-parse": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-4.1.0.tgz", + "integrity": "sha512-l4KnYfEyqYJxDwlNVyRfO2E4NTHfMKAWdUuA8J0yve2Dz/E/PdBepY03RvyJpssIpRFwJoCD55wA+mEDs6ByWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/set-cookie-parser": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz", + "integrity": "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==", + "license": "MIT" + }, "node_modules/shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", @@ -1969,6 +1843,15 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/sonic-boom": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.1.tgz", + "integrity": "sha512-w6AxtubXa2wTXAUsZMMWERrsIRAdrK0Sc+FUytWvYAhBJLyuI4llrMIC1DtlNSdI99EI86KZum2MMq3EAZlF9Q==", + "license": "MIT", + "dependencies": { + "atomic-sleep": "^1.0.0" + } + }, "node_modules/source-map": { "version": "0.7.6", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.6.tgz", @@ -1989,6 +1872,15 @@ "node": ">=0.10.0" } }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, "node_modules/stackback": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", @@ -2084,6 +1976,18 @@ "node": ">=0.8" } }, + "node_modules/thread-stream": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-4.0.0.tgz", + "integrity": "sha512-4iMVL6HAINXWf1ZKZjIPcz5wYaOdPhtO8ATvZ+Xqp3BTdaqtAwQkNmKORqcIo5YkQqGXq5cwfswDwMqqQNrpJA==", + "license": "MIT", + "dependencies": { + "real-require": "^0.2.0" + }, + "engines": { + "node": ">=20" + } + }, "node_modules/tinybench": { "version": "2.9.0", "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", @@ -2145,6 +2049,15 @@ "node": ">=14.0.0" } }, + "node_modules/toad-cache": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/toad-cache/-/toad-cache-3.7.0.tgz", + "integrity": "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw==", + "license": "MIT", + "engines": { + "node": ">=12" + } + }, "node_modules/tree-kill": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz", @@ -2491,6 +2404,27 @@ "node": "^20.17.0 || >=22.9.0" } }, + "node_modules/ws": { + "version": "8.19.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz", + "integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/yaml": { "version": "2.8.2", "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz", diff --git a/package.json b/package.json index b4cd8bf..229265e 100644 --- a/package.json +++ b/package.json @@ -45,17 +45,23 @@ ], "license": "MIT", "dependencies": { + "@fastify/cors": "^11.2.0", "commander": "^14.0.0", "cron-parser": "^5.5.0", "execa": "^9.5.2", + "fastify": "^5.7.4", "nanoid": "^5.1.5", "proper-lockfile": "^4.1.2", + "qrcode-terminal": "^0.12.0", "write-file-atomic": "^7.0.0", + "ws": "^8.19.0", "yaml": "^2.7.1" }, "devDependencies": { - "@types/node": "^22.13.4", + "@types/node": "^22.19.13", "@types/proper-lockfile": "^4.1.4", + "@types/qrcode-terminal": "^0.12.2", + "@types/ws": "^8.18.1", "tsup": "^8.4.0", "tsx": "^4.19.3", "typescript": "^5.7.3", diff --git a/src/cli.ts b/src/cli.ts index bfb207a..74a3ecd 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -263,6 +263,50 @@ worktreeCmd await worktreeCreateCommand(options); }); +const serveCmd = program.command('serve').description('Manage the ppg API server'); + +serveCmd + .command('start') + .description('Start the serve daemon in a tmux window') + .option('-p, --port ', 'Port to listen on', parsePort, 3100) + .option('-H, --host ', 'Host to bind to', '127.0.0.1') + .option('--token ', 'Bearer token for authentication') + .option('--json', 'Output as JSON') + .action(async (options) => { + const { serveStartCommand } = await import('./commands/serve.js'); + await serveStartCommand(options); + }); + +serveCmd + .command('stop') + .description('Stop the serve daemon') + .option('--json', 'Output as JSON') + .action(async (options) => { + const { serveStopCommand } = await import('./commands/serve.js'); + await serveStopCommand(options); + }); + +serveCmd + .command('status') + .description('Show serve daemon status and recent log') + .option('-l, --lines ', 'Number of recent log lines to show', (v: string) => Number(v), 20) + .option('--json', 'Output as JSON') + .action(async (options) => { + const { serveStatusCommand } = await import('./commands/serve.js'); + await serveStatusCommand(options); + }); + +serveCmd + .command('_daemon', { hidden: true }) + .description('Internal: run the serve daemon (called by ppg serve start)') + .option('-p, --port ', 'Port to listen on', parsePort, 3100) + .option('-H, --host ', 'Host to bind to', '127.0.0.1') + .option('--token ', 'Bearer token for authentication') + .action(async (options) => { + const { serveDaemonCommand } = await import('./commands/serve.js'); + await serveDaemonCommand(options); + }); + program .command('ui') .alias('dashboard') @@ -372,6 +416,14 @@ function parsePositiveInt(optionName: string) { }; } +function parsePort(v: string): number { + const n = Number(v); + if (!Number.isInteger(n) || n < 1 || n > 65535) { + throw new Error('--port must be an integer between 1 and 65535'); + } + return n; +} + async function main() { try { await program.parseAsync(process.argv); diff --git a/src/commands/init.ts b/src/commands/init.ts index 2cfc672..962fc94 100644 --- a/src/commands/init.ts +++ b/src/commands/init.ts @@ -28,7 +28,6 @@ Never run \`claude\`, \`codex\`, or \`opencode\` directly as bash commands — t - \`ppg spawn --name --prompt "" --json\` — Spawn worktree + agent - \`ppg spawn --name --agent codex --prompt "" --json\` — Use Codex agent - \`ppg spawn --name --agent opencode --prompt "" --json\` — Use OpenCode agent -- \`ppg spawn --branch --prompt "" --json\` — Attach to existing branch (e.g. from a PR) - \`ppg spawn --worktree --agent codex --prompt "review --base main" --json\` — Codex review - \`ppg status --json\` — Check statuses - \`ppg aggregate --all --json\` — Collect results (includes PR URLs) diff --git a/src/commands/kill.ts b/src/commands/kill.ts index 294b6ee..b703d5f 100644 --- a/src/commands/kill.ts +++ b/src/commands/kill.ts @@ -1,13 +1,9 @@ -import { readManifest, updateManifest, findAgent, resolveWorktree } from '../core/manifest.js'; -import { killAgent, killAgents } from '../core/agent.js'; -import { checkPrState } from '../core/pr.js'; +import { performKill, type KillResult } from '../core/operations/kill.js'; +import { getCurrentPaneId } from '../core/self.js'; +import { readManifest } from '../core/manifest.js'; import { getRepoRoot } from '../core/worktree.js'; -import { cleanupWorktree } from '../core/cleanup.js'; -import { getCurrentPaneId, excludeSelf } from '../core/self.js'; -import { listSessionPanes, type PaneInfo } from '../core/tmux.js'; -import { PpgError, NotInitializedError, AgentNotFoundError, WorktreeNotFoundError } from '../lib/errors.js'; +import { listSessionPanes } from '../core/tmux.js'; import { output, success, info, warn } from '../lib/output.js'; -import type { AgentEntry } from '../types/manifest.js'; export interface KillOptions { agent?: string; @@ -22,314 +18,87 @@ export interface KillOptions { export async function killCommand(options: KillOptions): Promise { const projectRoot = await getRepoRoot(); - if (!options.agent && !options.worktree && !options.all) { - throw new PpgError('One of --agent, --worktree, or --all is required', 'INVALID_ARGS'); - } - // Capture self-identification once at the start const selfPaneId = getCurrentPaneId(); - let paneMap: Map | undefined; + let paneMap: Map | undefined; if (selfPaneId) { const manifest = await readManifest(projectRoot); paneMap = await listSessionPanes(manifest.sessionName); } - if (options.agent) { - await killSingleAgent(projectRoot, options.agent, options, selfPaneId, paneMap); - } else if (options.worktree) { - await killWorktreeAgents(projectRoot, options.worktree, options, selfPaneId, paneMap); - } else if (options.all) { - await killAllAgents(projectRoot, options, selfPaneId, paneMap); - } -} - -async function killSingleAgent( - projectRoot: string, - agentId: string, - options: KillOptions, - selfPaneId: string | null, - paneMap?: Map, -): Promise { - const manifest = await readManifest(projectRoot); - const found = findAgent(manifest, agentId); - if (!found) throw new AgentNotFoundError(agentId); - - const { agent } = found; - const isTerminal = agent.status !== 'running'; - - // Self-protection check - if (selfPaneId && paneMap) { - const { skipped } = excludeSelf([agent], selfPaneId, paneMap); - if (skipped.length > 0) { - warn(`Cannot kill agent ${agentId} — it contains the current ppg process`); - if (options.json) { - output({ success: false, skipped: [agentId], reason: 'self-protection' }, true); - } - return; - } - } - - if (options.delete) { - // For --delete: skip kill if already in terminal state, just clean up - if (!isTerminal) { - info(`Killing agent ${agentId}`); - await killAgent(agent); - } - // Kill the tmux pane explicitly (handles already-dead) - await import('../core/tmux.js').then((tmux) => tmux.killPane(agent.tmuxTarget)); - - await updateManifest(projectRoot, (m) => { - const f = findAgent(m, agentId); - if (f) { - delete f.worktree.agents[agentId]; - } - return m; - }); - - if (options.json) { - output({ success: true, killed: [agentId], deleted: [agentId] }, true); - } else { - success(`Deleted agent ${agentId}`); - } - } else { - if (isTerminal) { - if (options.json) { - output({ success: true, killed: [], message: `Agent ${agentId} already ${agent.status}` }, true); - } else { - info(`Agent ${agentId} already ${agent.status}, skipping kill`); - } - return; - } - - info(`Killing agent ${agentId}`); - await killAgent(agent); - - await updateManifest(projectRoot, (m) => { - const f = findAgent(m, agentId); - if (f) { - f.agent.status = 'gone'; - } - return m; - }); - - if (options.json) { - output({ success: true, killed: [agentId] }, true); - } else { - success(`Killed agent ${agentId}`); - } - } -} - -async function killWorktreeAgents( - projectRoot: string, - worktreeRef: string, - options: KillOptions, - selfPaneId: string | null, - paneMap?: Map, -): Promise { - const manifest = await readManifest(projectRoot); - const wt = resolveWorktree(manifest, worktreeRef); - - if (!wt) throw new WorktreeNotFoundError(worktreeRef); - - let toKill = Object.values(wt.agents) - .filter((a) => a.status === 'running'); - - // Self-protection: filter out agents that would kill the current process - const skippedIds: string[] = []; - if (selfPaneId && paneMap) { - const { safe, skipped } = excludeSelf(toKill, selfPaneId, paneMap); - toKill = safe; - for (const a of skipped) { - skippedIds.push(a.id); - warn(`Skipping agent ${a.id} — contains current ppg process`); - } - } - - const killedIds = toKill.map((a) => a.id); - - for (const a of toKill) info(`Killing agent ${a.id}`); - await killAgents(toKill); - - await updateManifest(projectRoot, (m) => { - const mWt = m.worktrees[wt.id]; - if (mWt) { - for (const agent of Object.values(mWt.agents)) { - if (killedIds.includes(agent.id)) { - agent.status = 'gone'; - } - } - } - return m; + const result = await performKill({ + projectRoot, + agent: options.agent, + worktree: options.worktree, + all: options.all, + remove: options.remove, + delete: options.delete, + includeOpenPrs: options.includeOpenPrs, + selfPaneId, + paneMap, }); - // Check for open PR before deleting worktree - let skippedOpenPr = false; - if (options.delete && !options.includeOpenPrs) { - const prState = await checkPrState(wt.branch); - if (prState === 'OPEN') { - skippedOpenPr = true; - warn(`Skipping deletion of worktree ${wt.id} (${wt.name}) — has open PR on branch ${wt.branch}. Use --include-open-prs to override.`); - } - } + formatOutput(result, options); +} - // --delete implies --remove (always clean up worktree) - const shouldRemove = (options.remove || options.delete) && !skippedOpenPr; - if (shouldRemove) { - await removeWorktreeCleanup(projectRoot, wt.id, selfPaneId, paneMap); +function formatOutput(result: KillResult, options: KillOptions): void { + if (options.json) { + output(result, true); + return; } - // --delete also removes the worktree entry from manifest - if (options.delete && !skippedOpenPr) { - await updateManifest(projectRoot, (m) => { - delete m.worktrees[wt.id]; - return m; - }); + // Emit per-agent progress for killed agents + for (const id of result.killed) { + info(`Killing agent ${id}`); } - if (options.json) { - output({ - success: true, - killed: killedIds, - skipped: skippedIds.length > 0 ? skippedIds : undefined, - removed: shouldRemove ? [wt.id] : [], - deleted: (options.delete && !skippedOpenPr) ? [wt.id] : [], - skippedOpenPrs: skippedOpenPr ? [wt.id] : undefined, - }, true); - } else { - success(`Killed ${killedIds.length} agent(s) in worktree ${wt.id}`); - if (skippedIds.length > 0) { - warn(`Skipped ${skippedIds.length} agent(s) due to self-protection`); - } - if (options.delete && !skippedOpenPr) { - success(`Deleted worktree ${wt.id}`); - } else if (options.remove && !skippedOpenPr) { - success(`Removed worktree ${wt.id}`); + if (result.skipped?.length) { + for (const id of result.skipped) { + warn(`Skipping agent ${id} — contains current ppg process`); } } -} -async function killAllAgents( - projectRoot: string, - options: KillOptions, - selfPaneId: string | null, - paneMap?: Map, -): Promise { - const manifest = await readManifest(projectRoot); - let toKill: AgentEntry[] = []; - - for (const wt of Object.values(manifest.worktrees)) { - for (const agent of Object.values(wt.agents)) { - if (agent.status === 'running') { - toKill.push(agent); - } + if (result.skippedOpenPrs?.length) { + for (const id of result.skippedOpenPrs) { + warn(`Skipping deletion of worktree ${id} — has open PR`); } } - // Self-protection: filter out agents that would kill the current process - const skippedIds: string[] = []; - if (selfPaneId && paneMap) { - const { safe, skipped } = excludeSelf(toKill, selfPaneId, paneMap); - toKill = safe; - for (const a of skipped) { - skippedIds.push(a.id); - warn(`Skipping agent ${a.id} — contains current ppg process`); + if (options.agent) { + if (result.deleted?.length) { + success(`Deleted agent ${options.agent}`); + } else if (result.killed.length > 0) { + success(`Killed agent ${options.agent}`); + } else if (result.message) { + info(result.message); } - } - - const killedIds = toKill.map((a) => a.id); - for (const a of toKill) info(`Killing agent ${a.id}`); - await killAgents(toKill); - - // Only track active worktrees for removal (not already merged/cleaned) - const activeWorktreeIds = Object.values(manifest.worktrees) - .filter((wt) => wt.status === 'active') - .map((wt) => wt.id); - - await updateManifest(projectRoot, (m) => { - for (const wt of Object.values(m.worktrees)) { - for (const agent of Object.values(wt.agents)) { - if (killedIds.includes(agent.id)) { - agent.status = 'gone'; - } - } + } else if (options.worktree) { + if (result.killed.length > 0 || !result.skipped?.length) { + success(`Killed ${result.killed.length} agent(s) in worktree ${options.worktree}`); } - return m; - }); - - // Filter out worktrees with open PRs - let worktreesToRemove = activeWorktreeIds; - const openPrWorktreeIds: string[] = []; - if (options.delete && !options.includeOpenPrs) { - worktreesToRemove = []; - for (const wtId of activeWorktreeIds) { - const wt = manifest.worktrees[wtId]; - if (wt) { - const prState = await checkPrState(wt.branch); - if (prState === 'OPEN') { - openPrWorktreeIds.push(wtId); - warn(`Skipping deletion of worktree ${wtId} (${wt.name}) — has open PR`); - } else { - worktreesToRemove.push(wtId); - } - } + if (result.skipped?.length) { + warn(`Skipped ${result.skipped.length} agent(s) due to self-protection`); } - } - - // --delete implies --remove - const shouldRemove = options.remove || options.delete; - if (shouldRemove) { - for (const wtId of worktreesToRemove) { - await removeWorktreeCleanup(projectRoot, wtId, selfPaneId, paneMap); + if (result.deleted?.length) { + success(`Deleted worktree ${options.worktree}`); + } else if (result.removed?.length) { + success(`Removed worktree ${options.worktree}`); } - } - - // --delete also removes worktree entries from manifest - if (options.delete) { - await updateManifest(projectRoot, (m) => { - for (const wtId of worktreesToRemove) { - delete m.worktrees[wtId]; - } - return m; - }); - } - - if (options.json) { - output({ - success: true, - killed: killedIds, - skipped: skippedIds.length > 0 ? skippedIds : undefined, - removed: shouldRemove ? worktreesToRemove : [], - deleted: options.delete ? worktreesToRemove : [], - skippedOpenPrs: openPrWorktreeIds.length > 0 ? openPrWorktreeIds : undefined, - }, true); - } else { - success(`Killed ${killedIds.length} agent(s) across ${activeWorktreeIds.length} worktree(s)`); - if (skippedIds.length > 0) { - warn(`Skipped ${skippedIds.length} agent(s) due to self-protection`); + } else if (options.all) { + const wtMsg = result.worktreeCount !== undefined + ? ` across ${result.worktreeCount} worktree(s)` + : ''; + success(`Killed ${result.killed.length} agent(s)${wtMsg}`); + if (result.skipped?.length) { + warn(`Skipped ${result.skipped.length} agent(s) due to self-protection`); } - if (openPrWorktreeIds.length > 0) { - warn(`Skipped deletion of ${openPrWorktreeIds.length} worktree(s) with open PRs`); + if (result.skippedOpenPrs?.length) { + warn(`Skipped deletion of ${result.skippedOpenPrs.length} worktree(s) with open PRs`); } - if (options.delete) { - success(`Deleted ${worktreesToRemove.length} worktree(s)`); - } else if (options.remove) { - success(`Removed ${worktreesToRemove.length} worktree(s)`); + if (result.deleted?.length) { + success(`Deleted ${result.deleted.length} worktree(s)`); + } else if (result.removed?.length) { + success(`Removed ${result.removed.length} worktree(s)`); } } } - -async function removeWorktreeCleanup( - projectRoot: string, - wtId: string, - selfPaneId: string | null, - paneMap?: Map, -): Promise { - const manifest = await readManifest(projectRoot); - const wt = resolveWorktree(manifest, wtId); - if (!wt) return; - await cleanupWorktree(projectRoot, wt, { - selfPaneId, - paneMap, - }); -} diff --git a/src/commands/list.ts b/src/commands/list.ts index 866e0c3..f1c1e9c 100644 --- a/src/commands/list.ts +++ b/src/commands/list.ts @@ -1,7 +1,6 @@ -import fs from 'node:fs/promises'; -import path from 'node:path'; import { getRepoRoot } from '../core/worktree.js'; import { listTemplatesWithSource } from '../core/template.js'; +import { listPromptsWithSource, enrichEntryMetadata } from '../core/prompt.js'; import { listSwarmsWithSource, loadSwarm } from '../core/swarm.js'; import { templatesDir, promptsDir, globalTemplatesDir, globalPromptsDir } from '../lib/paths.js'; import { PpgError } from '../lib/errors.js'; @@ -34,18 +33,9 @@ async function listTemplatesCommand(options: ListOptions): Promise { } const templates = await Promise.all( - entries.map(async ({ name, source }) => { - const dir = source === 'local' ? templatesDir(projectRoot) : globalTemplatesDir(); - const filePath = path.join(dir, `${name}.md`); - const content = await fs.readFile(filePath, 'utf-8'); - const firstLine = content.split('\n').find((l) => l.trim().length > 0) ?? ''; - const description = firstLine.replace(/^#+\s*/, '').trim(); - - const vars = [...content.matchAll(/\{\{(\w+)\}\}/g)].map((m) => m[1]); - const uniqueVars = [...new Set(vars)]; - - return { name, description, variables: uniqueVars, source }; - }), + entries.map(({ name, source }) => + enrichEntryMetadata(name, source, templatesDir(projectRoot), globalTemplatesDir()), + ), ); if (options.json) { @@ -111,52 +101,10 @@ async function listSwarmsCommand(options: ListOptions): Promise { console.log(formatTable(swarms, columns)); } -interface PromptEntry { - name: string; - source: 'local' | 'global'; -} - -async function listPromptEntries(projectRoot: string): Promise { - const localDir = promptsDir(projectRoot); - const globalDir = globalPromptsDir(); - - let localFiles: string[] = []; - try { - localFiles = (await fs.readdir(localDir)).filter((f) => f.endsWith('.md')).sort(); - } catch { - // directory doesn't exist - } - - let globalFiles: string[] = []; - try { - globalFiles = (await fs.readdir(globalDir)).filter((f) => f.endsWith('.md')).sort(); - } catch { - // directory doesn't exist - } - - const seen = new Set(); - const result: PromptEntry[] = []; - - for (const file of localFiles) { - const name = file.replace(/\.md$/, ''); - seen.add(name); - result.push({ name, source: 'local' }); - } - - for (const file of globalFiles) { - const name = file.replace(/\.md$/, ''); - if (!seen.has(name)) { - result.push({ name, source: 'global' }); - } - } - - return result; -} - async function listPromptsCommand(options: ListOptions): Promise { const projectRoot = await getRepoRoot(); - const entries = await listPromptEntries(projectRoot); + const entries = await listPromptsWithSource(projectRoot); if (entries.length === 0) { if (options.json) { @@ -168,18 +116,9 @@ async function listPromptsCommand(options: ListOptions): Promise { } const prompts = await Promise.all( - entries.map(async ({ name, source }) => { - const dir = source === 'local' ? promptsDir(projectRoot) : globalPromptsDir(); - const filePath = path.join(dir, `${name}.md`); - const content = await fs.readFile(filePath, 'utf-8'); - const firstLine = content.split('\n').find((l) => l.trim().length > 0) ?? ''; - const description = firstLine.replace(/^#+\s*/, '').trim(); - - const vars = [...content.matchAll(/\{\{(\w+)\}\}/g)].map((m) => m[1]); - const uniqueVars = [...new Set(vars)]; - - return { name, description, variables: uniqueVars, source }; - }), + entries.map(({ name, source }) => + enrichEntryMetadata(name, source, promptsDir(projectRoot), globalPromptsDir()), + ), ); if (options.json) { diff --git a/src/commands/merge.ts b/src/commands/merge.ts index 5dca227..53694dc 100644 --- a/src/commands/merge.ts +++ b/src/commands/merge.ts @@ -1,15 +1,8 @@ -import { execa } from 'execa'; -import { requireManifest, updateManifest, resolveWorktree } from '../core/manifest.js'; -import { refreshAllAgentStatuses } from '../core/agent.js'; -import { getRepoRoot, getCurrentBranch } from '../core/worktree.js'; -import { cleanupWorktree } from '../core/cleanup.js'; -import { getCurrentPaneId } from '../core/self.js'; -import { listSessionPanes, type PaneInfo } from '../core/tmux.js'; -import { PpgError, WorktreeNotFoundError, MergeFailedError } from '../lib/errors.js'; +import { performMerge } from '../core/operations/merge.js'; +import { getRepoRoot } from '../core/worktree.js'; import { output, success, info, warn } from '../lib/output.js'; -import { execaEnv } from '../lib/env.js'; -export interface MergeOptions { +export interface MergeCommandOptions { strategy?: 'squash' | 'no-ff'; cleanup?: boolean; dryRun?: boolean; @@ -17,122 +10,48 @@ export interface MergeOptions { json?: boolean; } -export async function mergeCommand(worktreeId: string, options: MergeOptions): Promise { +export async function mergeCommand(worktreeId: string, options: MergeCommandOptions): Promise { const projectRoot = await getRepoRoot(); - await requireManifest(projectRoot); - const manifest = await updateManifest(projectRoot, async (m) => { - return refreshAllAgentStatuses(m, projectRoot); - }); - - const wt = resolveWorktree(manifest, worktreeId); - - if (!wt) throw new WorktreeNotFoundError(worktreeId); - - // Check all agents finished - const agents = Object.values(wt.agents); - const incomplete = agents.filter((a) => a.status === 'running'); - - if (incomplete.length > 0 && !options.force) { - const ids = incomplete.map((a) => a.id).join(', '); - throw new PpgError( - `${incomplete.length} agent(s) still running: ${ids}. Use --force to merge anyway.`, - 'AGENTS_RUNNING', - ); - } - if (options.dryRun) { info('Dry run — no changes will be made'); - info(`Would merge branch ${wt.branch} into ${wt.baseBranch} using ${options.strategy ?? 'squash'} strategy`); - if (options.cleanup !== false) { - info(`Would remove worktree ${wt.id} and delete branch ${wt.branch}`); - } - return; } - // Set worktree status to merging - await updateManifest(projectRoot, (m) => { - if (m.worktrees[wt.id]) { - m.worktrees[wt.id].status = 'merging'; - } - return m; + const result = await performMerge({ + projectRoot, + worktreeRef: worktreeId, + strategy: options.strategy, + cleanup: options.cleanup, + dryRun: options.dryRun, + force: options.force, }); - const strategy = options.strategy ?? 'squash'; - - try { - const currentBranch = await getCurrentBranch(projectRoot); - if (currentBranch !== wt.baseBranch) { - info(`Switching to base branch ${wt.baseBranch}`); - await execa('git', ['checkout', wt.baseBranch], { ...execaEnv, cwd: projectRoot }); - } - - info(`Merging ${wt.branch} into ${wt.baseBranch} (${strategy})`); - - if (strategy === 'squash') { - await execa('git', ['merge', '--squash', wt.branch], { ...execaEnv, cwd: projectRoot }); - await execa('git', ['commit', '-m', `ppg: merge ${wt.name} (${wt.branch})`], { - ...execaEnv, - cwd: projectRoot, - }); - } else { - await execa('git', ['merge', '--no-ff', wt.branch, '-m', `ppg: merge ${wt.name} (${wt.branch})`], { - ...execaEnv, - cwd: projectRoot, - }); + if (result.dryRun) { + info(`Would merge branch ${result.branch} into ${result.baseBranch} using ${result.strategy} strategy`); + if (options.cleanup !== false) { + info(`Would remove worktree ${result.worktreeId} and delete branch ${result.branch}`); } - - success(`Merged ${wt.branch} into ${wt.baseBranch}`); - } catch (err) { - await updateManifest(projectRoot, (m) => { - if (m.worktrees[wt.id]) { - m.worktrees[wt.id].status = 'failed'; - } - return m; - }); - throw new MergeFailedError( - `Merge failed: ${err instanceof Error ? err.message : err}`, - ); + return; } - // Mark as merged - await updateManifest(projectRoot, (m) => { - if (m.worktrees[wt.id]) { - m.worktrees[wt.id].status = 'merged'; - m.worktrees[wt.id].mergedAt = new Date().toISOString(); - } - return m; - }); - - // Cleanup with self-protection - let selfProtected = false; - if (options.cleanup !== false) { - info('Cleaning up...'); - - const selfPaneId = getCurrentPaneId(); - let paneMap: Map | undefined; - if (selfPaneId) { - paneMap = await listSessionPanes(manifest.sessionName); - } - - const cleanupResult = await cleanupWorktree(projectRoot, wt, { selfPaneId, paneMap }); - selfProtected = cleanupResult.selfProtected; + success(`Merged ${result.branch} into ${result.baseBranch}`); - if (selfProtected) { - warn(`Some tmux targets skipped during cleanup — contains current ppg process`); + if (result.cleaned) { + if (result.selfProtected) { + warn('Some tmux targets skipped during cleanup — contains current ppg process'); } - success(`Cleaned up worktree ${wt.id}`); + success(`Cleaned up worktree ${result.worktreeId}`); } if (options.json) { output({ success: true, - worktreeId: wt.id, - branch: wt.branch, - baseBranch: wt.baseBranch, - strategy, - cleaned: options.cleanup !== false, - selfProtected: selfProtected || undefined, + worktreeId: result.worktreeId, + branch: result.branch, + baseBranch: result.baseBranch, + strategy: result.strategy, + cleaned: result.cleaned, + selfProtected: result.selfProtected || undefined, }, true); } } diff --git a/src/commands/pr.ts b/src/commands/pr.ts index aeb559d..534b471 100644 --- a/src/commands/pr.ts +++ b/src/commands/pr.ts @@ -1,13 +1,12 @@ -import { execa } from 'execa'; import { updateManifest, resolveWorktree } from '../core/manifest.js'; import { refreshAllAgentStatuses } from '../core/agent.js'; import { getRepoRoot } from '../core/worktree.js'; -import { PpgError, NotInitializedError, WorktreeNotFoundError, GhNotFoundError } from '../lib/errors.js'; +import { createWorktreePr } from '../core/pr.js'; +import { NotInitializedError, WorktreeNotFoundError } from '../lib/errors.js'; import { output, success, info } from '../lib/output.js'; -import { execaEnv } from '../lib/env.js'; -// GitHub PR body limit is 65536 chars; leave room for truncation notice -const MAX_BODY_LENGTH = 60_000; +// Re-export for backwards compatibility with existing tests/consumers +export { buildBodyFromResults, truncateBody } from '../core/pr.js'; export interface PrOptions { title?: string; @@ -31,82 +30,16 @@ export async function prCommand(worktreeRef: string, options: PrOptions): Promis const wt = resolveWorktree(manifest, worktreeRef); if (!wt) throw new WorktreeNotFoundError(worktreeRef); - // Verify gh is available - try { - await execa('gh', ['--version'], execaEnv); - } catch { - throw new GhNotFoundError(); - } - - // Push the worktree branch - info(`Pushing branch ${wt.branch} to origin`); - try { - await execa('git', ['push', '-u', 'origin', wt.branch], { ...execaEnv, cwd: projectRoot }); - } catch (err) { - throw new PpgError( - `Failed to push branch ${wt.branch}: ${err instanceof Error ? err.message : err}`, - 'INVALID_ARGS', - ); - } - - // Build PR title and body - const title = options.title ?? wt.name; - const body = options.body ?? await buildBodyFromResults(Object.values(wt.agents)); - - // Build gh pr create args - const ghArgs = [ - 'pr', 'create', - '--head', wt.branch, - '--base', wt.baseBranch, - '--title', title, - '--body', body, - ]; - if (options.draft) { - ghArgs.push('--draft'); - } - - info(`Creating PR: ${title}`); - let prUrl: string; - try { - const result = await execa('gh', ghArgs, { ...execaEnv, cwd: projectRoot }); - prUrl = result.stdout.trim(); - } catch (err) { - throw new PpgError( - `Failed to create PR: ${err instanceof Error ? err.message : err}`, - 'INVALID_ARGS', - ); - } - - // Store PR URL in manifest - await updateManifest(projectRoot, (m) => { - if (m.worktrees[wt.id]) { - m.worktrees[wt.id].prUrl = prUrl; - } - return m; + info(`Creating PR for ${wt.branch}`); + const result = await createWorktreePr(projectRoot, wt, { + title: options.title, + body: options.body, + draft: options.draft, }); if (options.json) { - output({ - success: true, - worktreeId: wt.id, - branch: wt.branch, - baseBranch: wt.baseBranch, - prUrl, - }, true); + output({ success: true, ...result }, true); } else { - success(`PR created: ${prUrl}`); + success(`PR created: ${result.prUrl}`); } } - -/** Build PR body from agent prompts, with truncation. */ -export async function buildBodyFromResults(agents: { id: string; prompt: string }[]): Promise { - if (agents.length === 0) return ''; - const sections = agents.map((a) => `## Agent: ${a.id}\n\n${a.prompt}`); - return truncateBody(sections.join('\n\n---\n\n')); -} - -/** Truncate body to stay within GitHub's PR body size limit. */ -export function truncateBody(body: string): string { - if (body.length <= MAX_BODY_LENGTH) return body; - return body.slice(0, MAX_BODY_LENGTH) + '\n\n---\n\n*[Truncated — full results available in `.ppg/results/`]*'; -} diff --git a/src/commands/restart.ts b/src/commands/restart.ts index c2627e5..4411e3e 100644 --- a/src/commands/restart.ts +++ b/src/commands/restart.ts @@ -1,15 +1,6 @@ -import fs from 'node:fs/promises'; -import { requireManifest, updateManifest, findAgent } from '../core/manifest.js'; -import { loadConfig, resolveAgentConfig } from '../core/config.js'; -import { spawnAgent, killAgent } from '../core/agent.js'; -import { getRepoRoot } from '../core/worktree.js'; -import * as tmux from '../core/tmux.js'; +import { performRestart } from '../core/operations/restart.js'; import { openTerminalWindow } from '../core/terminal.js'; -import { agentId as genAgentId, sessionId as genSessionId } from '../lib/id.js'; -import { agentPromptFile } from '../lib/paths.js'; -import { PpgError, AgentNotFoundError } from '../lib/errors.js'; import { output, success, info } from '../lib/output.js'; -import { renderTemplate, type TemplateContext } from '../core/template.js'; export interface RestartOptions { prompt?: string; @@ -19,105 +10,28 @@ export interface RestartOptions { } export async function restartCommand(agentRef: string, options: RestartOptions): Promise { - const projectRoot = await getRepoRoot(); - const config = await loadConfig(projectRoot); - - const manifest = await requireManifest(projectRoot); - - const found = findAgent(manifest, agentRef); - if (!found) throw new AgentNotFoundError(agentRef); - - const { worktree: wt, agent: oldAgent } = found; - - // Kill old agent if still running - if (oldAgent.status === 'running') { - info(`Killing existing agent ${oldAgent.id}`); - await killAgent(oldAgent); - } - - // Read original prompt from prompt file, or use override - let promptText: string; - if (options.prompt) { - promptText = options.prompt; - } else { - const pFile = agentPromptFile(projectRoot, oldAgent.id); - try { - promptText = await fs.readFile(pFile, 'utf-8'); - } catch { - throw new PpgError( - `Could not read original prompt for agent ${oldAgent.id}. Use --prompt to provide one.`, - 'PROMPT_NOT_FOUND', - ); - } - } - - // Resolve agent config - const agentConfig = resolveAgentConfig(config, options.agent ?? oldAgent.agentType); - - // Ensure tmux session - await tmux.ensureSession(manifest.sessionName); - - // Create new tmux window in same worktree - const newAgentId = genAgentId(); - const windowTarget = await tmux.createWindow(manifest.sessionName, `${wt.name}-restart`, wt.path); - - // Render template vars - const ctx: TemplateContext = { - WORKTREE_PATH: wt.path, - BRANCH: wt.branch, - AGENT_ID: newAgentId, - PROJECT_ROOT: projectRoot, - TASK_NAME: wt.name, - PROMPT: promptText, - }; - const renderedPrompt = renderTemplate(promptText, ctx); - - const newSessionId = genSessionId(); - const agentEntry = await spawnAgent({ - agentId: newAgentId, - agentConfig, - prompt: renderedPrompt, - worktreePath: wt.path, - tmuxTarget: windowTarget, - projectRoot, - branch: wt.branch, - sessionId: newSessionId, - }); - - // Update manifest: mark old agent as gone, add new agent - await updateManifest(projectRoot, (m) => { - const mWt = m.worktrees[wt.id]; - if (mWt) { - const mOldAgent = mWt.agents[oldAgent.id]; - if (mOldAgent && mOldAgent.status === 'running') { - mOldAgent.status = 'gone'; - } - mWt.agents[newAgentId] = agentEntry; - } - return m; + const result = await performRestart({ + agentRef, + prompt: options.prompt, + agentType: options.agent, }); // Only open Terminal window when explicitly requested via --open (fire-and-forget) if (options.open === true) { - openTerminalWindow(manifest.sessionName, windowTarget, `${wt.name}-restart`).catch(() => {}); + openTerminalWindow(result.sessionName, result.newAgent.tmuxTarget, `${result.newAgent.worktreeName}-restart`).catch(() => {}); } if (options.json) { output({ success: true, - oldAgentId: oldAgent.id, - newAgent: { - id: newAgentId, - tmuxTarget: windowTarget, - sessionId: newSessionId, - worktreeId: wt.id, - worktreeName: wt.name, - branch: wt.branch, - path: wt.path, - }, + oldAgentId: result.oldAgentId, + newAgent: result.newAgent, }, true); } else { - success(`Restarted agent ${oldAgent.id} → ${newAgentId} in worktree ${wt.name}`); - info(` New agent ${newAgentId} → ${windowTarget}`); + if (result.killedOldAgent) { + info(`Killed existing agent ${result.oldAgentId}`); + } + success(`Restarted agent ${result.oldAgentId} → ${result.newAgent.id} in worktree ${result.newAgent.worktreeName}`); + info(` New agent ${result.newAgent.id} → ${result.newAgent.tmuxTarget}`); } } diff --git a/src/commands/serve.test.ts b/src/commands/serve.test.ts new file mode 100644 index 0000000..51d7b1f --- /dev/null +++ b/src/commands/serve.test.ts @@ -0,0 +1,370 @@ +import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest'; +import fs from 'node:fs/promises'; +import path from 'node:path'; + +// Mock dependencies +vi.mock('../core/worktree.js', () => ({ + getRepoRoot: vi.fn(() => '/fake/project'), +})); + +vi.mock('../core/manifest.js', () => ({ + requireManifest: vi.fn(() => ({ sessionName: 'ppg-test' })), + readManifest: vi.fn(() => ({ sessionName: 'ppg-test' })), +})); + +vi.mock('../core/tmux.js', () => ({ + ensureSession: vi.fn(), + createWindow: vi.fn(() => 'ppg-test:1'), + sendKeys: vi.fn(), + listSessionWindows: vi.fn(() => []), + killWindow: vi.fn(), +})); + +vi.mock('../lib/paths.js', async (importOriginal) => { + const actual = await importOriginal() as Record; + return { + ...actual, + manifestPath: vi.fn((root: string) => path.join(root, '.ppg', 'manifest.json')), + servePidPath: vi.fn((root: string) => path.join(root, '.ppg', 'serve.pid')), + serveJsonPath: vi.fn((root: string) => path.join(root, '.ppg', 'serve.json')), + serveLogPath: vi.fn((root: string) => path.join(root, '.ppg', 'logs', 'serve.log')), + logsDir: vi.fn((root: string) => path.join(root, '.ppg', 'logs')), + }; +}); + +vi.mock('../core/serve.js', async (importOriginal) => { + const actual = await importOriginal() as Record; + return { + ...actual, + isServeRunning: vi.fn(() => false), + getServePid: vi.fn(() => null), + getServeInfo: vi.fn(() => null), + readServeLog: vi.fn(() => []), + runServeDaemon: vi.fn(), + }; +}); + +vi.mock('../lib/output.js', async (importOriginal) => { + const actual = await importOriginal() as Record; + return { + ...actual, + output: vi.fn(), + info: vi.fn(), + success: vi.fn(), + warn: vi.fn(), + }; +}); + +const { serveStartCommand, serveStopCommand, serveStatusCommand, serveDaemonCommand, buildPairingUrl, getLocalIp, verifyToken } = await import('./serve.js'); +const { output, success, warn, info } = await import('../lib/output.js'); +const { isServeRunning, getServePid, getServeInfo, readServeLog, runServeDaemon } = await import('../core/serve.js'); +const { requireManifest } = await import('../core/manifest.js'); +const tmux = await import('../core/tmux.js'); + +beforeEach(() => { + vi.clearAllMocks(); +}); + +afterEach(() => { + vi.restoreAllMocks(); +}); + +describe('serveStartCommand', () => { + test('given no server running, should start daemon in tmux window', async () => { + await serveStartCommand({ port: 3000, host: 'localhost' }); + + expect(requireManifest).toHaveBeenCalledWith('/fake/project'); + expect(tmux.ensureSession).toHaveBeenCalledWith('ppg-test'); + expect(tmux.createWindow).toHaveBeenCalledWith('ppg-test', 'ppg-serve', '/fake/project'); + expect(tmux.sendKeys).toHaveBeenCalledWith('ppg-test:1', 'ppg serve _daemon --port 3000 --host localhost'); + expect(success).toHaveBeenCalledWith('Serve daemon starting in tmux window: ppg-test:1'); + }); + + test('given custom port and host, should pass them to daemon command', async () => { + await serveStartCommand({ port: 8080, host: '0.0.0.0' }); + + expect(tmux.sendKeys).toHaveBeenCalledWith('ppg-test:1', 'ppg serve _daemon --port 8080 --host 0.0.0.0'); + }); + + test('given server already running, should warn and return', async () => { + vi.mocked(isServeRunning).mockResolvedValue(true); + vi.mocked(getServePid).mockResolvedValue(12345); + vi.mocked(getServeInfo).mockResolvedValue({ + pid: 12345, + port: 3000, + host: 'localhost', + startedAt: '2026-01-01T00:00:00.000Z', + }); + + await serveStartCommand({ port: 3000, host: 'localhost' }); + + expect(tmux.createWindow).not.toHaveBeenCalled(); + expect(warn).toHaveBeenCalledWith('Serve daemon is already running (PID: 12345)'); + expect(info).toHaveBeenCalledWith('Listening on localhost:3000'); + }); + + test('given json option, should output JSON on success', async () => { + await serveStartCommand({ port: 3000, host: 'localhost', json: true }); + + expect(output).toHaveBeenCalledWith( + expect.objectContaining({ success: true, port: 3000, host: 'localhost', tmuxWindow: 'ppg-test:1' }), + true, + ); + }); + + test('given json option and already running, should output JSON error', async () => { + vi.mocked(isServeRunning).mockResolvedValue(true); + vi.mocked(getServePid).mockResolvedValue(12345); + vi.mocked(getServeInfo).mockResolvedValue({ + pid: 12345, + port: 3000, + host: 'localhost', + startedAt: '2026-01-01T00:00:00.000Z', + }); + + await serveStartCommand({ port: 3000, host: 'localhost', json: true }); + + expect(output).toHaveBeenCalledWith( + expect.objectContaining({ success: false, error: 'Serve daemon is already running', pid: 12345 }), + true, + ); + }); + + test('given project not initialized, should throw NotInitializedError', async () => { + const err = Object.assign(new Error('Not initialized'), { code: 'NOT_INITIALIZED' }); + vi.mocked(requireManifest).mockRejectedValue(err); + + await expect(serveStartCommand({ port: 3000, host: 'localhost' })).rejects.toThrow('Not initialized'); + expect(tmux.createWindow).not.toHaveBeenCalled(); + }); + + test('given invalid host with shell metacharacters, should throw INVALID_ARGS', async () => { + await expect(serveStartCommand({ port: 3000, host: 'localhost; rm -rf /' })) + .rejects.toThrow('Invalid host'); + }); +}); + +describe('serveStopCommand', () => { + test('given running server, should kill process and clean up', async () => { + vi.mocked(getServePid).mockResolvedValue(99999); + const mockKill = vi.spyOn(process, 'kill').mockImplementation(() => true); + vi.spyOn(fs, 'unlink').mockResolvedValue(undefined); + + await serveStopCommand({}); + + expect(mockKill).toHaveBeenCalledWith(99999, 'SIGTERM'); + expect(fs.unlink).toHaveBeenCalledWith('/fake/project/.ppg/serve.pid'); + expect(fs.unlink).toHaveBeenCalledWith('/fake/project/.ppg/serve.json'); + expect(success).toHaveBeenCalledWith('Serve daemon stopped (PID: 99999)'); + + mockKill.mockRestore(); + }); + + test('given no server running, should warn', async () => { + await serveStopCommand({}); + + expect(warn).toHaveBeenCalledWith('Serve daemon is not running'); + }); + + test('given running server with tmux window, should kill the tmux window', async () => { + vi.mocked(getServePid).mockResolvedValue(99999); + vi.spyOn(process, 'kill').mockImplementation(() => true); + vi.spyOn(fs, 'unlink').mockResolvedValue(undefined); + vi.mocked(tmux.listSessionWindows).mockResolvedValue([ + { index: 0, name: 'bash' }, + { index: 1, name: 'ppg-serve' }, + ]); + + await serveStopCommand({}); + + expect(tmux.killWindow).toHaveBeenCalledWith('ppg-test:1'); + + vi.mocked(process.kill).mockRestore(); + }); + + test('given process already dead when killing, should still clean up files', async () => { + vi.mocked(getServePid).mockResolvedValue(99999); + vi.spyOn(process, 'kill').mockImplementation(() => { throw new Error('ESRCH'); }); + vi.spyOn(fs, 'unlink').mockResolvedValue(undefined); + + await serveStopCommand({}); + + expect(fs.unlink).toHaveBeenCalledWith('/fake/project/.ppg/serve.pid'); + expect(fs.unlink).toHaveBeenCalledWith('/fake/project/.ppg/serve.json'); + expect(success).toHaveBeenCalledWith('Serve daemon stopped (PID: 99999)'); + + vi.mocked(process.kill).mockRestore(); + }); + + test('given json option and not running, should output JSON', async () => { + await serveStopCommand({ json: true }); + + expect(output).toHaveBeenCalledWith( + expect.objectContaining({ success: false, error: 'Serve daemon is not running' }), + true, + ); + }); + + test('given json option and running, should output JSON on success', async () => { + vi.mocked(getServePid).mockResolvedValue(88888); + vi.spyOn(process, 'kill').mockImplementation(() => true); + vi.spyOn(fs, 'unlink').mockResolvedValue(undefined); + + await serveStopCommand({ json: true }); + + expect(output).toHaveBeenCalledWith( + expect.objectContaining({ success: true, pid: 88888 }), + true, + ); + + vi.mocked(process.kill).mockRestore(); + }); +}); + +describe('serveStatusCommand', () => { + test('given running server, should show status with connection info', async () => { + vi.mocked(isServeRunning).mockResolvedValue(true); + vi.mocked(getServePid).mockResolvedValue(12345); + vi.mocked(getServeInfo).mockResolvedValue({ + pid: 12345, + port: 3000, + host: 'localhost', + startedAt: '2026-01-01T00:00:00.000Z', + }); + vi.mocked(readServeLog).mockResolvedValue(['[2026-01-01T00:00:00.000Z] Started']); + + await serveStatusCommand({}); + + expect(success).toHaveBeenCalledWith('Serve daemon is running (PID: 12345)'); + expect(info).toHaveBeenCalledWith('Listening on localhost:3000'); + expect(info).toHaveBeenCalledWith('Started at 2026-01-01T00:00:00.000Z'); + }); + + test('given no server running, should warn', async () => { + await serveStatusCommand({}); + + expect(warn).toHaveBeenCalledWith('Serve daemon is not running'); + }); + + test('given json option and running, should output JSON with connection info', async () => { + vi.mocked(isServeRunning).mockResolvedValue(true); + vi.mocked(getServePid).mockResolvedValue(12345); + vi.mocked(getServeInfo).mockResolvedValue({ + pid: 12345, + port: 3000, + host: 'localhost', + startedAt: '2026-01-01T00:00:00.000Z', + }); + vi.mocked(readServeLog).mockResolvedValue([]); + + await serveStatusCommand({ json: true }); + + expect(output).toHaveBeenCalledWith( + expect.objectContaining({ + running: true, + pid: 12345, + host: 'localhost', + port: 3000, + startedAt: '2026-01-01T00:00:00.000Z', + recentLog: [], + }), + true, + ); + }); + + test('given json option and not running, should output JSON', async () => { + await serveStatusCommand({ json: true }); + + expect(output).toHaveBeenCalledWith( + expect.objectContaining({ running: false, pid: null, recentLog: [] }), + true, + ); + }); + + test('given custom lines option, should pass to readServeLog', async () => { + await serveStatusCommand({ lines: 50 }); + + expect(readServeLog).toHaveBeenCalledWith('/fake/project', 50); + }); +}); + +describe('serveDaemonCommand', () => { + test('given initialized project, should call runServeDaemon with correct args', async () => { + await serveDaemonCommand({ port: 4000, host: '0.0.0.0' }); + + expect(requireManifest).toHaveBeenCalledWith('/fake/project'); + expect(runServeDaemon).toHaveBeenCalledWith('/fake/project', 4000, '0.0.0.0'); + }); + + test('given project not initialized, should throw', async () => { + const err = Object.assign(new Error('Not initialized'), { code: 'NOT_INITIALIZED' }); + vi.mocked(requireManifest).mockRejectedValue(err); + + await expect(serveDaemonCommand({ port: 3000, host: 'localhost' })).rejects.toThrow('Not initialized'); + expect(runServeDaemon).not.toHaveBeenCalled(); + }); +}); + +describe('buildPairingUrl', () => { + test('given valid params, should encode all fields into ppg:// URL', () => { + const url = buildPairingUrl({ + host: '192.168.1.10', + port: 7700, + fingerprint: 'AA:BB:CC', + token: 'test-token-123', + }); + + expect(url).toContain('ppg://connect'); + expect(url).toContain('host=192.168.1.10'); + expect(url).toContain('port=7700'); + expect(url).toContain('ca=AA%3ABB%3ACC'); + expect(url).toContain('token=test-token-123'); + }); + + test('given special characters in token, should URL-encode them', () => { + const url = buildPairingUrl({ + host: '10.0.0.1', + port: 8080, + fingerprint: 'DE:AD:BE:EF', + token: 'a+b/c=d', + }); + + expect(url).toContain('token=a%2Bb%2Fc%3Dd'); + }); +}); + +describe('getLocalIp', () => { + test('should return a non-empty string', () => { + const ip = getLocalIp(); + expect(ip).toBeTruthy(); + expect(typeof ip).toBe('string'); + }); + + test('should return a valid IPv4 address', () => { + const ip = getLocalIp(); + const ipv4Pattern = /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/; + expect(ip).toMatch(ipv4Pattern); + }); +}); + +describe('verifyToken', () => { + test('given matching tokens, should return true', () => { + expect(verifyToken('correct-token', 'correct-token')).toBe(true); + }); + + test('given different tokens of same length, should return false', () => { + expect(verifyToken('aaaa-bbbb-cccc', 'xxxx-yyyy-zzzz')).toBe(false); + }); + + test('given different length tokens, should return false', () => { + expect(verifyToken('short', 'much-longer-token')).toBe(false); + }); + + test('given empty provided token, should return false', () => { + expect(verifyToken('', 'expected-token')).toBe(false); + }); + + test('given both empty, should return true', () => { + expect(verifyToken('', '')).toBe(true); + }); +}); diff --git a/src/commands/serve.ts b/src/commands/serve.ts new file mode 100644 index 0000000..320c87f --- /dev/null +++ b/src/commands/serve.ts @@ -0,0 +1,201 @@ +import fs from 'node:fs/promises'; +import os from 'node:os'; +import { randomBytes, timingSafeEqual } from 'node:crypto'; +import { getRepoRoot } from '../core/worktree.js'; +import { requireManifest, readManifest } from '../core/manifest.js'; +import { runServeDaemon, isServeRunning, getServePid, getServeInfo, readServeLog } from '../core/serve.js'; +import { startServer } from '../server/index.js'; +import * as tmux from '../core/tmux.js'; +import { servePidPath, serveJsonPath } from '../lib/paths.js'; +import { PpgError } from '../lib/errors.js'; +import { output, info, success, warn } from '../lib/output.js'; + +export interface ServeStartOptions { + port?: number; + host?: string; + token?: string; + json?: boolean; +} + +export interface ServeOptions { + json?: boolean; +} + +export interface ServeStatusOptions { + lines?: number; + json?: boolean; +} + +const SERVE_WINDOW_NAME = 'ppg-serve'; +const VALID_HOST = /^[\w.:-]+$/; + +export function buildPairingUrl(params: { + host: string; + port: number; + fingerprint: string; + token: string; +}): string { + const { host, port, fingerprint, token } = params; + const url = new URL('ppg://connect'); + url.searchParams.set('host', host); + url.searchParams.set('port', String(port)); + url.searchParams.set('ca', fingerprint); + url.searchParams.set('token', token); + return url.toString(); +} + +export function getLocalIp(): string { + const interfaces = os.networkInterfaces(); + for (const name of Object.keys(interfaces)) { + for (const iface of interfaces[name] ?? []) { + if (iface.family === 'IPv4' && !iface.internal) { + return iface.address; + } + } + } + return '127.0.0.1'; +} + +export function verifyToken(provided: string, expected: string): boolean { + const a = Buffer.from(provided); + const b = Buffer.from(expected); + if (a.length !== b.length) return false; + return timingSafeEqual(a, b); +} + +export async function serveStartCommand(options: ServeStartOptions): Promise { + const projectRoot = await getRepoRoot(); + await requireManifest(projectRoot); + + const port = options.port!; + const host = options.host!; + + if (!VALID_HOST.test(host)) { + throw new PpgError(`Invalid host: "${host}"`, 'INVALID_ARGS'); + } + + // Check if already running + if (await isServeRunning(projectRoot)) { + const pid = await getServePid(projectRoot); + const serveInfo = await getServeInfo(projectRoot); + if (options.json) { + output({ success: false, error: 'Serve daemon is already running', pid, ...serveInfo }, true); + } else { + warn(`Serve daemon is already running (PID: ${pid})`); + if (serveInfo) { + info(`Listening on ${serveInfo.host}:${serveInfo.port}`); + } + } + return; + } + + // Start daemon in a tmux window + const manifest = await readManifest(projectRoot); + const sessionName = manifest.sessionName; + await tmux.ensureSession(sessionName); + + const windowTarget = await tmux.createWindow(sessionName, SERVE_WINDOW_NAME, projectRoot); + let command = `ppg serve _daemon --port ${port} --host ${host}`; + if (options.token) { + command += ` --token ${options.token}`; + } + await tmux.sendKeys(windowTarget, command); + + if (options.json) { + output({ + success: true, + tmuxWindow: windowTarget, + port, + host, + }, true); + } else { + success(`Serve daemon starting in tmux window: ${windowTarget}`); + info(`Configured for ${host}:${port}`); + info(`Attach: tmux select-window -t ${windowTarget}`); + } +} + +export async function serveStopCommand(options: ServeOptions): Promise { + const projectRoot = await getRepoRoot(); + + const pid = await getServePid(projectRoot); + if (!pid) { + if (options.json) { + output({ success: false, error: 'Serve daemon is not running' }, true); + } else { + warn('Serve daemon is not running'); + } + return; + } + + // Kill the process + try { + process.kill(pid, 'SIGTERM'); + } catch { + // Already dead + } + + // Clean up PID and JSON files (daemon cleanup handler may not have run yet) + try { await fs.unlink(servePidPath(projectRoot)); } catch { /* already gone */ } + try { await fs.unlink(serveJsonPath(projectRoot)); } catch { /* already gone */ } + + // Try to kill the tmux window too + try { + const manifest = await readManifest(projectRoot); + const windows = await tmux.listSessionWindows(manifest.sessionName); + const serveWindow = windows.find((w) => w.name === SERVE_WINDOW_NAME); + if (serveWindow) { + await tmux.killWindow(`${manifest.sessionName}:${serveWindow.index}`); + } + } catch { /* best effort */ } + + if (options.json) { + output({ success: true, pid }, true); + } else { + success(`Serve daemon stopped (PID: ${pid})`); + } +} + +export async function serveStatusCommand(options: ServeStatusOptions): Promise { + const projectRoot = await getRepoRoot(); + + const running = await isServeRunning(projectRoot); + const pid = running ? await getServePid(projectRoot) : null; + const serveInfo = running ? await getServeInfo(projectRoot) : null; + const recentLines = await readServeLog(projectRoot, options.lines ?? 20); + + if (options.json) { + output({ + running, + pid, + ...(serveInfo ? { host: serveInfo.host, port: serveInfo.port, startedAt: serveInfo.startedAt } : {}), + recentLog: recentLines, + }, true); + return; + } + + if (running) { + success(`Serve daemon is running (PID: ${pid})`); + if (serveInfo) { + info(`Listening on ${serveInfo.host}:${serveInfo.port}`); + info(`Started at ${serveInfo.startedAt}`); + } + } else { + warn('Serve daemon is not running'); + } + + if (recentLines.length > 0) { + console.log('\nRecent log:'); + for (const line of recentLines) { + console.log(` ${line}`); + } + } else { + info('No serve log entries yet'); + } +} + +export async function serveDaemonCommand(options: { port: number; host: string; token?: string }): Promise { + const projectRoot = await getRepoRoot(); + await requireManifest(projectRoot); + await startServer({ projectRoot, port: options.port, host: options.host, token: options.token }); +} diff --git a/src/commands/spawn.test.ts b/src/commands/spawn.test.ts index ee642c7..1efcc23 100644 --- a/src/commands/spawn.test.ts +++ b/src/commands/spawn.test.ts @@ -1,56 +1,11 @@ -import { access } from 'node:fs/promises'; -import { beforeEach, describe, expect, test, vi } from 'vitest'; +import { describe, expect, test, vi, beforeEach } from 'vitest'; import { spawnCommand } from './spawn.js'; -import { loadConfig, resolveAgentConfig } from '../core/config.js'; -import { readManifest, resolveWorktree, updateManifest } from '../core/manifest.js'; -import { spawnAgent } from '../core/agent.js'; -import { getRepoRoot } from '../core/worktree.js'; -import { agentId, sessionId } from '../lib/id.js'; -import * as tmux from '../core/tmux.js'; - -vi.mock('node:fs/promises', async () => { - const actual = await vi.importActual('node:fs/promises'); - const mockedAccess = vi.fn(); - return { - ...actual, - access: mockedAccess, - default: { - ...actual, - access: mockedAccess, - }, - }; -}); +import { performSpawn } from '../core/operations/spawn.js'; +import type { SpawnResult } from '../core/operations/spawn.js'; +import type { AgentEntry } from '../types/manifest.js'; -vi.mock('../core/config.js', () => ({ - loadConfig: vi.fn(), - resolveAgentConfig: vi.fn(), -})); - -vi.mock('../core/manifest.js', () => ({ - readManifest: vi.fn(), - updateManifest: vi.fn(), - resolveWorktree: vi.fn(), -})); - -vi.mock('../core/agent.js', () => ({ - spawnAgent: vi.fn(), -})); - -vi.mock('../core/worktree.js', () => ({ - getRepoRoot: vi.fn(), - getCurrentBranch: vi.fn(), - createWorktree: vi.fn(), - adoptWorktree: vi.fn(), -})); - -vi.mock('../core/tmux.js', () => ({ - ensureSession: vi.fn(), - createWindow: vi.fn(), - splitPane: vi.fn(), -})); - -vi.mock('../core/terminal.js', () => ({ - openTerminalWindow: vi.fn(), +vi.mock('../core/operations/spawn.js', () => ({ + performSpawn: vi.fn(), })); vi.mock('../lib/output.js', () => ({ @@ -59,137 +14,85 @@ vi.mock('../lib/output.js', () => ({ info: vi.fn(), })); -vi.mock('../lib/id.js', () => ({ - worktreeId: vi.fn(), - agentId: vi.fn(), - sessionId: vi.fn(), -})); +const mockedPerformSpawn = vi.mocked(performSpawn); +const { output, success, info } = await import('../lib/output.js'); + +function makeAgent(id: string, target: string): AgentEntry { + return { + id, + name: 'claude', + agentType: 'claude', + status: 'running', + tmuxTarget: target, + prompt: 'Do work', + startedAt: '2026-02-27T00:00:00.000Z', + sessionId: 'session-1', + }; +} -const mockedAccess = vi.mocked(access); -const mockedLoadConfig = vi.mocked(loadConfig); -const mockedResolveAgentConfig = vi.mocked(resolveAgentConfig); -const mockedReadManifest = vi.mocked(readManifest); -const mockedUpdateManifest = vi.mocked(updateManifest); -const mockedResolveWorktree = vi.mocked(resolveWorktree); -const mockedSpawnAgent = vi.mocked(spawnAgent); -const mockedGetRepoRoot = vi.mocked(getRepoRoot); -const mockedAgentId = vi.mocked(agentId); -const mockedSessionId = vi.mocked(sessionId); -const mockedEnsureSession = vi.mocked(tmux.ensureSession); -const mockedCreateWindow = vi.mocked(tmux.createWindow); -const mockedSplitPane = vi.mocked(tmux.splitPane); - -function createManifest(tmuxWindow = '') { +function makeResult(overrides?: Partial): SpawnResult { return { - version: 1 as const, - projectRoot: '/tmp/repo', - sessionName: 'ppg-test', - worktrees: { - wt1: { - id: 'wt1', - name: 'feature', - path: '/tmp/repo/.ppg/worktrees/wt1', - branch: 'ppg/feature', - baseBranch: 'main', - status: 'active' as const, - tmuxWindow, - agents: {} as Record, - createdAt: '2026-02-27T00:00:00.000Z', - }, + worktree: { + id: 'wt1', + name: 'feature', + branch: 'ppg/feature', + path: '/tmp/repo/.worktrees/wt1', + tmuxWindow: 'ppg-test:1', }, - createdAt: '2026-02-27T00:00:00.000Z', - updatedAt: '2026-02-27T00:00:00.000Z', + agents: [makeAgent('ag-1', 'ppg-test:1')], + ...overrides, }; } describe('spawnCommand', () => { - let manifestState = createManifest(); - let nextAgent = 1; - let nextSession = 1; - beforeEach(() => { vi.clearAllMocks(); - manifestState = createManifest(); - nextAgent = 1; - nextSession = 1; - - mockedAccess.mockResolvedValue(undefined); - mockedGetRepoRoot.mockResolvedValue('/tmp/repo'); - mockedLoadConfig.mockResolvedValue({ - sessionName: 'ppg-test', - defaultAgent: 'claude', - agents: { - claude: { - name: 'claude', - command: 'claude', - interactive: true, - }, - }, - envFiles: [], - symlinkNodeModules: false, - }); - mockedResolveAgentConfig.mockReturnValue({ - name: 'claude', - command: 'claude', - interactive: true, - }); - mockedReadManifest.mockImplementation(async () => structuredClone(manifestState)); - mockedResolveWorktree.mockImplementation((manifest, ref) => (manifest as any).worktrees[ref as string]); - mockedUpdateManifest.mockImplementation(async (_projectRoot, updater) => { - manifestState = await updater(structuredClone(manifestState)); - return manifestState as any; - }); - mockedAgentId.mockImplementation(() => `ag-${nextAgent++}`); - mockedSessionId.mockImplementation(() => `session-${nextSession++}`); - mockedSpawnAgent.mockImplementation(async (opts: any) => ({ - id: opts.agentId, - name: opts.agentConfig.name, - agentType: opts.agentConfig.name, - status: 'running', - tmuxTarget: opts.tmuxTarget, - prompt: opts.prompt, - startedAt: '2026-02-27T00:00:00.000Z', - sessionId: opts.sessionId, - })); - mockedSplitPane.mockResolvedValue({ target: 'ppg-test:1.1' } as any); + mockedPerformSpawn.mockResolvedValue(makeResult()); }); - test('given lazy tmux window and spawn failure, should persist tmux window before agent writes', async () => { - mockedCreateWindow - .mockResolvedValueOnce('ppg-test:7') - .mockResolvedValueOnce('ppg-test:8'); - mockedSpawnAgent.mockRejectedValueOnce(new Error('spawn failed')); + test('given basic options, should call performSpawn and output success', async () => { + await spawnCommand({ prompt: 'Do work', count: 1 }); + + expect(mockedPerformSpawn).toHaveBeenCalledWith({ prompt: 'Do work', count: 1 }); + expect(success).toHaveBeenCalledWith(expect.stringContaining('Spawned worktree wt1')); + expect(info).toHaveBeenCalledWith(expect.stringContaining('Agent ag-1')); + }); + + test('given json option, should output JSON', async () => { + await spawnCommand({ prompt: 'Do work', count: 1, json: true }); + + expect(output).toHaveBeenCalledWith( + expect.objectContaining({ success: true, worktree: expect.objectContaining({ id: 'wt1' }) }), + true, + ); + }); + + test('given worktree option, should show added message', async () => { + await spawnCommand({ worktree: 'wt1', prompt: 'Do work', count: 1 }); + + expect(success).toHaveBeenCalledWith(expect.stringContaining('Added 1 agent(s) to worktree')); + }); + + test('given performSpawn failure, should propagate error', async () => { + mockedPerformSpawn.mockRejectedValueOnce(new Error('spawn failed')); await expect( - spawnCommand({ - worktree: 'wt1', - prompt: 'Do work', - count: 1, - }), + spawnCommand({ prompt: 'Do work', count: 1 }), ).rejects.toThrow('spawn failed'); - - expect(manifestState.worktrees.wt1.tmuxWindow).toBe('ppg-test:7'); - expect(Object.keys(manifestState.worktrees.wt1.agents)).toHaveLength(0); - expect(mockedUpdateManifest).toHaveBeenCalledTimes(1); - expect(mockedEnsureSession).toHaveBeenCalledTimes(1); }); - test('given existing worktree, should update manifest after each spawned agent', async () => { - manifestState = createManifest('ppg-test:1'); - mockedCreateWindow - .mockResolvedValueOnce('ppg-test:2') - .mockResolvedValueOnce('ppg-test:3'); - - await spawnCommand({ - worktree: 'wt1', - prompt: 'Do work', - count: 2, - }); - - expect(mockedUpdateManifest).toHaveBeenCalledTimes(2); - expect(Object.keys(manifestState.worktrees.wt1.agents)).toEqual(['ag-1', 'ag-2']); - expect(manifestState.worktrees.wt1.agents['ag-1'].tmuxTarget).toBe('ppg-test:2'); - expect(manifestState.worktrees.wt1.agents['ag-2'].tmuxTarget).toBe('ppg-test:3'); - expect(mockedEnsureSession).not.toHaveBeenCalled(); + test('given multiple agents, should show all agents', async () => { + mockedPerformSpawn.mockResolvedValue(makeResult({ + agents: [ + makeAgent('ag-1', 'ppg-test:1'), + makeAgent('ag-2', 'ppg-test:2'), + ], + })); + + await spawnCommand({ prompt: 'Do work', count: 2 }); + + expect(success).toHaveBeenCalledWith(expect.stringContaining('2 agent(s)')); + expect(info).toHaveBeenCalledWith(expect.stringContaining('ag-1')); + expect(info).toHaveBeenCalledWith(expect.stringContaining('ag-2')); }); }); diff --git a/src/commands/spawn.ts b/src/commands/spawn.ts index 873aaa3..ec1453d 100644 --- a/src/commands/spawn.ts +++ b/src/commands/spawn.ts @@ -1,495 +1,44 @@ -import fs from 'node:fs/promises'; -import { loadConfig, resolveAgentConfig } from '../core/config.js'; -import { readManifest, updateManifest, resolveWorktree } from '../core/manifest.js'; -import { getRepoRoot, getCurrentBranch, createWorktree, adoptWorktree } from '../core/worktree.js'; -import { setupWorktreeEnv } from '../core/env.js'; -import { loadTemplate, renderTemplate, type TemplateContext } from '../core/template.js'; -import { spawnAgent } from '../core/agent.js'; -import * as tmux from '../core/tmux.js'; -import { openTerminalWindow } from '../core/terminal.js'; -import { worktreeId as genWorktreeId, agentId as genAgentId, sessionId as genSessionId } from '../lib/id.js'; -import { manifestPath } from '../lib/paths.js'; -import { PpgError, NotInitializedError, WorktreeNotFoundError } from '../lib/errors.js'; +import { performSpawn, type PerformSpawnOptions, type SpawnResult } from '../core/operations/spawn.js'; import { output, success, info } from '../lib/output.js'; -import { normalizeName } from '../lib/name.js'; -import { parseVars } from '../lib/vars.js'; -import type { WorktreeEntry, AgentEntry } from '../types/manifest.js'; -import type { Config, AgentConfig } from '../types/config.js'; -export interface SpawnOptions { - name?: string; - agent?: string; - prompt?: string; - promptFile?: string; - template?: string; - var?: string[]; - base?: string; - branch?: string; - worktree?: string; - count?: number; - split?: boolean; - open?: boolean; +export interface SpawnOptions extends PerformSpawnOptions { json?: boolean; } export async function spawnCommand(options: SpawnOptions): Promise { - const projectRoot = await getRepoRoot(); - const config = await loadConfig(projectRoot); + const { json, ...spawnOpts } = options; - // Verify initialized (lightweight file check instead of full manifest read) - try { - await fs.access(manifestPath(projectRoot)); - } catch { - throw new NotInitializedError(projectRoot); - } - - const agentConfig = resolveAgentConfig(config, options.agent); - const count = options.count ?? 1; - - // Validate vars early — before any side effects (worktree/tmux creation) - const userVars = parseVars(options.var ?? []); - - // Resolve prompt - const promptText = await resolvePrompt(options, projectRoot); - - // Validate conflicting flags - if (options.branch && options.worktree) { - throw new PpgError('--branch and --worktree are mutually exclusive', 'INVALID_ARGS'); - } - if (options.branch && options.base) { - throw new PpgError('--branch and --base are mutually exclusive (--base is for new branches)', 'INVALID_ARGS'); - } - - if (options.worktree) { - // Add agent(s) to existing worktree - await spawnIntoExistingWorktree( - projectRoot, - agentConfig, - options.worktree, - promptText, - count, - options, - userVars, - ); - } else if (options.branch) { - // Create worktree from existing branch - await spawnOnExistingBranch( - projectRoot, - config, - agentConfig, - options.branch, - promptText, - count, - options, - userVars, - ); - } else { - // Create new worktree + agent(s) - await spawnNewWorktree( - projectRoot, - config, - agentConfig, - promptText, - count, - options, - userVars, - ); - } -} - -async function resolvePrompt(options: SpawnOptions, projectRoot: string): Promise { - if (options.prompt) return options.prompt; - - if (options.promptFile) { - return fs.readFile(options.promptFile, 'utf-8'); - } - - if (options.template) { - return loadTemplate(projectRoot, options.template); - } + const result = await performSpawn(spawnOpts); - throw new PpgError('One of --prompt, --prompt-file, or --template is required', 'INVALID_ARGS'); + emitSpawnResult(result, options); } -interface SpawnBatchOptions { - projectRoot: string; - agentConfig: AgentConfig; - promptText: string; - userVars: Record; - count: number; - split: boolean; - worktreePath: string; - branch: string; - taskName: string; - sessionName: string; - windowTarget: string; - windowNamePrefix: string; - reuseWindowForFirstAgent: boolean; - onAgentSpawned?: (agent: AgentEntry) => Promise; -} - -interface SpawnTargetOptions { - index: number; - split: boolean; - reuseWindowForFirstAgent: boolean; - windowTarget: string; - sessionName: string; - windowNamePrefix: string; - worktreePath: string; -} - -async function resolveAgentTarget(opts: SpawnTargetOptions): Promise { - if (opts.index === 0 && opts.reuseWindowForFirstAgent) { - return opts.windowTarget; - } - if (opts.split) { - const direction = opts.index % 2 === 1 ? 'horizontal' : 'vertical'; - const pane = await tmux.splitPane(opts.windowTarget, direction, opts.worktreePath); - return pane.target; - } - return tmux.createWindow(opts.sessionName, `${opts.windowNamePrefix}-${opts.index}`, opts.worktreePath); -} - -async function spawnAgentBatch(opts: SpawnBatchOptions): Promise { - const agents: AgentEntry[] = []; - for (let i = 0; i < opts.count; i++) { - const aId = genAgentId(); - const target = await resolveAgentTarget({ - index: i, - split: opts.split, - reuseWindowForFirstAgent: opts.reuseWindowForFirstAgent, - windowTarget: opts.windowTarget, - sessionName: opts.sessionName, - windowNamePrefix: opts.windowNamePrefix, - worktreePath: opts.worktreePath, - }); - - const ctx: TemplateContext = { - WORKTREE_PATH: opts.worktreePath, - BRANCH: opts.branch, - AGENT_ID: aId, - PROJECT_ROOT: opts.projectRoot, - TASK_NAME: opts.taskName, - PROMPT: opts.promptText, - ...opts.userVars, - }; - - const agentEntry = await spawnAgent({ - agentId: aId, - agentConfig: opts.agentConfig, - prompt: renderTemplate(opts.promptText, ctx), - worktreePath: opts.worktreePath, - tmuxTarget: target, - projectRoot: opts.projectRoot, - branch: opts.branch, - sessionId: genSessionId(), - }); - - agents.push(agentEntry); - if (opts.onAgentSpawned) { - await opts.onAgentSpawned(agentEntry); - } - } - - return agents; -} - -interface EmitSpawnResultOptions { - json: boolean | undefined; - successMessage: string; - worktree: { - id: string; - name: string; - branch: string; - path: string; - tmuxWindow: string; - }; - agents: AgentEntry[]; - attachRef?: string; -} - -function emitSpawnResult(opts: EmitSpawnResultOptions): void { - if (opts.json) { +function emitSpawnResult(result: SpawnResult, options: SpawnOptions): void { + if (options.json) { output({ success: true, - worktree: opts.worktree, - agents: opts.agents.map((a) => ({ - id: a.id, - tmuxTarget: a.tmuxTarget, - sessionId: a.sessionId, - })), + worktree: result.worktree, + agents: result.agents, }, true); return; } - success(opts.successMessage); - for (const a of opts.agents) { - info(` Agent ${a.id} → ${a.tmuxTarget}`); - } - if (opts.attachRef) { - info(`Attach: ppg attach ${opts.attachRef}`); - } -} - -async function spawnNewWorktree( - projectRoot: string, - config: Config, - agentConfig: AgentConfig, - promptText: string, - count: number, - options: SpawnOptions, - userVars: Record, -): Promise { - const baseBranch = options.base ?? await getCurrentBranch(projectRoot); - const wtId = genWorktreeId(); - const name = options.name ? normalizeName(options.name, wtId) : wtId; - const branchName = `ppg/${name}`; - - // Create git worktree - info(`Creating worktree ${wtId} on branch ${branchName}`); - const wtPath = await createWorktree(projectRoot, wtId, { - branch: branchName, - base: baseBranch, - }); - - // Setup env - await setupWorktreeEnv(projectRoot, wtPath, config); - - // Ensure tmux session (manifest is the source of truth for session name) - const manifest = await readManifest(projectRoot); - const sessionName = manifest.sessionName; - await tmux.ensureSession(sessionName); - - // Create tmux window - const windowTarget = await tmux.createWindow(sessionName, name, wtPath); - - // Register skeleton worktree in manifest before spawning agents - // so partial failures leave a record for cleanup - const worktreeEntry: WorktreeEntry = { - id: wtId, - name, - path: wtPath, - branch: branchName, - baseBranch, - status: 'active', - tmuxWindow: windowTarget, - agents: {}, - createdAt: new Date().toISOString(), - }; - - await updateManifest(projectRoot, (m) => { - m.worktrees[wtId] = worktreeEntry; - return m; - }); - - // Spawn agents — one tmux window per agent (default), or split panes (--split) - const agents = await spawnAgentBatch({ - projectRoot, - agentConfig, - promptText, - userVars, - count, - split: options.split === true, - worktreePath: wtPath, - branch: branchName, - taskName: name, - sessionName, - windowTarget, - windowNamePrefix: name, - reuseWindowForFirstAgent: true, - onAgentSpawned: async (agentEntry) => { - // Update manifest incrementally after each agent spawn. - await updateManifest(projectRoot, (m) => { - if (m.worktrees[wtId]) { - m.worktrees[wtId].agents[agentEntry.id] = agentEntry; - } - return m; - }); - }, - }); - - // Only open Terminal window when explicitly requested via --open (fire-and-forget) - if (options.open === true) { - openTerminalWindow(sessionName, windowTarget, name).catch(() => {}); - } - - emitSpawnResult({ - json: options.json, - successMessage: `Spawned worktree ${wtId} with ${agents.length} agent(s)`, - worktree: { - id: wtId, - name, - branch: branchName, - path: wtPath, - tmuxWindow: windowTarget, - }, - agents, - attachRef: wtId, - }); -} - -async function spawnOnExistingBranch( - projectRoot: string, - config: Config, - agentConfig: AgentConfig, - branch: string, - promptText: string, - count: number, - options: SpawnOptions, - userVars: Record, -): Promise { - const baseBranch = await getCurrentBranch(projectRoot); - const wtId = genWorktreeId(); - - // Derive name from branch if --name not provided (strip ppg/ prefix if present) - const derivedName = branch.startsWith('ppg/') ? branch.slice(4) : branch; - const name = options.name ? normalizeName(options.name, wtId) : normalizeName(derivedName, wtId); - - // Create git worktree from existing branch (no -b flag) - info(`Creating worktree ${wtId} from existing branch ${branch}`); - const wtPath = await adoptWorktree(projectRoot, wtId, branch); + const agentCount = result.agents.length; - // Setup env - await setupWorktreeEnv(projectRoot, wtPath, config); - - // Ensure tmux session - const manifest = await readManifest(projectRoot); - const sessionName = manifest.sessionName; - await tmux.ensureSession(sessionName); - - // Create tmux window - const windowTarget = await tmux.createWindow(sessionName, name, wtPath); - - // Register worktree in manifest - const worktreeEntry: WorktreeEntry = { - id: wtId, - name, - path: wtPath, - branch, - baseBranch, - status: 'active', - tmuxWindow: windowTarget, - agents: {}, - createdAt: new Date().toISOString(), - }; - - await updateManifest(projectRoot, (m) => { - m.worktrees[wtId] = worktreeEntry; - return m; - }); - - const agents = await spawnAgentBatch({ - projectRoot, - agentConfig, - promptText, - userVars, - count, - split: options.split === true, - worktreePath: wtPath, - branch, - taskName: name, - sessionName, - windowTarget, - windowNamePrefix: name, - reuseWindowForFirstAgent: true, - onAgentSpawned: async (agentEntry) => { - await updateManifest(projectRoot, (m) => { - if (m.worktrees[wtId]) { - m.worktrees[wtId].agents[agentEntry.id] = agentEntry; - } - return m; - }); - }, - }); - - if (options.open === true) { - openTerminalWindow(sessionName, windowTarget, name).catch(() => {}); + if (options.worktree) { + success(`Added ${agentCount} agent(s) to worktree ${result.worktree.id}`); + } else if (options.branch) { + success(`Spawned worktree ${result.worktree.id} from branch ${options.branch} with ${agentCount} agent(s)`); + } else { + success(`Spawned worktree ${result.worktree.id} with ${agentCount} agent(s)`); } - emitSpawnResult({ - json: options.json, - successMessage: `Spawned worktree ${wtId} from branch ${branch} with ${agents.length} agent(s)`, - worktree: { - id: wtId, - name, - branch, - path: wtPath, - tmuxWindow: windowTarget, - }, - agents, - attachRef: wtId, - }); -} - -async function spawnIntoExistingWorktree( - projectRoot: string, - agentConfig: AgentConfig, - worktreeRef: string, - promptText: string, - count: number, - options: SpawnOptions, - userVars: Record, -): Promise { - const manifest = await readManifest(projectRoot); - const wt = resolveWorktree(manifest, worktreeRef); - - if (!wt) throw new WorktreeNotFoundError(worktreeRef); - - // Lazily create tmux window if worktree has none (standalone worktree) - let windowTarget = wt.tmuxWindow; - if (!windowTarget) { - await tmux.ensureSession(manifest.sessionName); - windowTarget = await tmux.createWindow(manifest.sessionName, wt.name, wt.path); - - // Persist tmux window before spawning agents so partial failures are tracked. - await updateManifest(projectRoot, (m) => { - const mWt = m.worktrees[wt.id]; - if (!mWt) return m; - mWt.tmuxWindow = windowTarget; - return m; - }); + for (const a of result.agents) { + info(` Agent ${a.id} → ${a.tmuxTarget}`); } - const agents = await spawnAgentBatch({ - projectRoot, - agentConfig, - promptText, - userVars, - count, - split: options.split === true, - worktreePath: wt.path, - branch: wt.branch, - taskName: wt.name, - sessionName: manifest.sessionName, - windowTarget, - windowNamePrefix: `${wt.name}-agent`, - // For existing worktrees, only reuse the primary pane when explicitly splitting. - reuseWindowForFirstAgent: options.split === true, - onAgentSpawned: async (agentEntry) => { - await updateManifest(projectRoot, (m) => { - const mWt = m.worktrees[wt.id]; - if (!mWt) return m; - mWt.agents[agentEntry.id] = agentEntry; - return m; - }); - }, - }); - - // Only open Terminal window when explicitly requested via --open (fire-and-forget) - if (options.open === true) { - openTerminalWindow(manifest.sessionName, windowTarget, wt.name).catch(() => {}); + // Only show attach hint for newly created worktrees, not when adding to existing + if (!options.worktree) { + info(`Attach: ppg attach ${result.worktree.id}`); } - - emitSpawnResult({ - json: options.json, - successMessage: `Added ${agents.length} agent(s) to worktree ${wt.id}`, - worktree: { - id: wt.id, - name: wt.name, - branch: wt.branch, - path: wt.path, - tmuxWindow: windowTarget, - }, - agents, - }); } diff --git a/src/commands/status.ts b/src/commands/status.ts index 326139d..ff66132 100644 --- a/src/commands/status.ts +++ b/src/commands/status.ts @@ -4,6 +4,8 @@ import { refreshAllAgentStatuses } from '../core/agent.js'; import { getRepoRoot } from '../core/worktree.js'; import { output, formatStatus, formatTable, type Column } from '../lib/output.js'; import type { AgentEntry, WorktreeEntry } from '../types/manifest.js'; +import { computeLifecycle } from '../core/lifecycle.js'; +export { computeLifecycle, type WorktreeLifecycle } from '../core/lifecycle.js'; export interface StatusOptions { json?: boolean; @@ -104,20 +106,6 @@ function printWorktreeStatus(wt: WorktreeEntry): void { console.log(table.split('\n').map((l) => ` ${l}`).join('\n')); } -export type WorktreeLifecycle = 'merged' | 'cleaned' | 'busy' | 'shipped' | 'idle'; - -export function computeLifecycle(wt: WorktreeEntry): WorktreeLifecycle { - if (wt.status === 'merged') return 'merged'; - if (wt.status === 'cleaned') return 'cleaned'; - - const agents = Object.values(wt.agents); - - if (agents.some((a) => a.status === 'running')) return 'busy'; - if (wt.prUrl) return 'shipped'; - - return 'idle'; -} - function formatTime(iso: string): string { if (!iso) return '—'; const d = new Date(iso); diff --git a/src/core/agent.ts b/src/core/agent.ts index be24e82..def1c23 100644 --- a/src/core/agent.ts +++ b/src/core/agent.ts @@ -3,7 +3,9 @@ import { agentPromptFile, agentPromptsDir } from '../lib/paths.js'; import { getPaneInfo, listSessionPanes, type PaneInfo } from './tmux.js'; import { updateManifest } from './manifest.js'; import { PpgError } from '../lib/errors.js'; -import type { AgentEntry, AgentStatus } from '../types/manifest.js'; +import { agentId as genAgentId, sessionId as genSessionId } from '../lib/id.js'; +import { renderTemplate, type TemplateContext } from './template.js'; +import type { AgentEntry, AgentStatus, WorktreeEntry } from '../types/manifest.js'; import type { AgentConfig } from '../types/config.js'; import * as tmux from './tmux.js'; @@ -242,6 +244,88 @@ export async function killAgents(agents: AgentEntry[]): Promise { })); } +export interface RestartAgentOptions { + projectRoot: string; + agentId: string; + worktree: WorktreeEntry; + oldAgent: AgentEntry; + sessionName: string; + agentConfig: AgentConfig; + promptText: string; +} + +export interface RestartAgentResult { + oldAgentId: string; + newAgentId: string; + tmuxTarget: string; + sessionId: string; + worktreeId: string; + worktreeName: string; + branch: string; + path: string; +} + +/** + * Restart an agent: kill old, spawn new in a fresh tmux window, update manifest. + */ +export async function restartAgent(opts: RestartAgentOptions): Promise { + const { projectRoot, worktree: wt, oldAgent, sessionName, agentConfig, promptText } = opts; + + // Kill old agent if still running + if (oldAgent.status === 'running') { + await killAgent(oldAgent); + } + + await tmux.ensureSession(sessionName); + const newAgentId = genAgentId(); + const windowTarget = await tmux.createWindow(sessionName, `${wt.name}-restart`, wt.path); + + const ctx: TemplateContext = { + WORKTREE_PATH: wt.path, + BRANCH: wt.branch, + AGENT_ID: newAgentId, + PROJECT_ROOT: projectRoot, + TASK_NAME: wt.name, + PROMPT: promptText, + }; + const renderedPrompt = renderTemplate(promptText, ctx); + + const newSessionId = genSessionId(); + const agentEntry = await spawnAgent({ + agentId: newAgentId, + agentConfig, + prompt: renderedPrompt, + worktreePath: wt.path, + tmuxTarget: windowTarget, + projectRoot, + branch: wt.branch, + sessionId: newSessionId, + }); + + await updateManifest(projectRoot, (m) => { + const mWt = m.worktrees[wt.id]; + if (mWt) { + const mOldAgent = mWt.agents[oldAgent.id]; + if (mOldAgent && mOldAgent.status === 'running') { + mOldAgent.status = 'gone'; + } + mWt.agents[newAgentId] = agentEntry; + } + return m; + }); + + return { + oldAgentId: oldAgent.id, + newAgentId, + tmuxTarget: windowTarget, + sessionId: newSessionId, + worktreeId: wt.id, + worktreeName: wt.name, + branch: wt.branch, + path: wt.path, + }; +} + async function fileExists(filePath: string): Promise { try { await fs.access(filePath); diff --git a/src/core/kill.test.ts b/src/core/kill.test.ts new file mode 100644 index 0000000..a6db7d1 --- /dev/null +++ b/src/core/kill.test.ts @@ -0,0 +1,74 @@ +import { describe, test, expect, vi, beforeEach } from 'vitest'; +import { makeWorktree, makeAgent } from '../test-fixtures.js'; +import type { Manifest } from '../types/manifest.js'; + +// ---- Mocks ---- + +let manifestState: Manifest; + +vi.mock('./manifest.js', () => ({ + updateManifest: vi.fn(async (_root: string, updater: (m: Manifest) => Manifest | Promise) => { + manifestState = await updater(structuredClone(manifestState)); + return manifestState; + }), +})); + +vi.mock('./agent.js', () => ({ + killAgents: vi.fn(), +})); + +// ---- Imports (after mocks) ---- + +import { killWorktreeAgents } from './kill.js'; +import { killAgents } from './agent.js'; + +describe('killWorktreeAgents', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + test('given worktree with running agents, should kill running agents and set status to gone', async () => { + const agent1 = makeAgent({ id: 'ag-run00001', status: 'running' }); + const agent2 = makeAgent({ id: 'ag-idle0001', status: 'idle' }); + const wt = makeWorktree({ + id: 'wt-abc123', + agents: { 'ag-run00001': agent1, 'ag-idle0001': agent2 }, + }); + manifestState = { + version: 1, + projectRoot: '/tmp/project', + sessionName: 'ppg', + worktrees: { 'wt-abc123': structuredClone(wt) }, + createdAt: '2026-01-01T00:00:00.000Z', + updatedAt: '2026-01-01T00:00:00.000Z', + }; + + const result = await killWorktreeAgents('/tmp/project', wt); + + expect(result.killed).toEqual(['ag-run00001']); + expect(vi.mocked(killAgents)).toHaveBeenCalledWith([agent1]); + expect(manifestState.worktrees['wt-abc123'].agents['ag-run00001'].status).toBe('gone'); + expect(manifestState.worktrees['wt-abc123'].agents['ag-idle0001'].status).toBe('idle'); + }); + + test('given worktree with no running agents, should return empty killed list', async () => { + const agent = makeAgent({ id: 'ag-done0001', status: 'exited' }); + const wt = makeWorktree({ + id: 'wt-abc123', + agents: { 'ag-done0001': agent }, + }); + manifestState = { + version: 1, + projectRoot: '/tmp/project', + sessionName: 'ppg', + worktrees: { 'wt-abc123': structuredClone(wt) }, + createdAt: '2026-01-01T00:00:00.000Z', + updatedAt: '2026-01-01T00:00:00.000Z', + }; + + const result = await killWorktreeAgents('/tmp/project', wt); + + expect(result.killed).toEqual([]); + expect(vi.mocked(killAgents)).toHaveBeenCalledWith([]); + }); +}); diff --git a/src/core/kill.ts b/src/core/kill.ts new file mode 100644 index 0000000..ef26e67 --- /dev/null +++ b/src/core/kill.ts @@ -0,0 +1,36 @@ +import { updateManifest } from './manifest.js'; +import { killAgents } from './agent.js'; +import type { WorktreeEntry } from '../types/manifest.js'; + +export interface KillWorktreeResult { + worktreeId: string; + killed: string[]; +} + +/** Kill all running agents in a worktree and set their status to 'gone'. */ +export async function killWorktreeAgents( + projectRoot: string, + wt: WorktreeEntry, +): Promise { + const toKill = Object.values(wt.agents).filter((a) => a.status === 'running'); + const killedIds = toKill.map((a) => a.id); + + await killAgents(toKill); + + await updateManifest(projectRoot, (m) => { + const mWt = m.worktrees[wt.id]; + if (mWt) { + for (const agent of Object.values(mWt.agents)) { + if (killedIds.includes(agent.id)) { + agent.status = 'gone'; + } + } + } + return m; + }); + + return { + worktreeId: wt.id, + killed: killedIds, + }; +} diff --git a/src/core/lifecycle.ts b/src/core/lifecycle.ts new file mode 100644 index 0000000..5fa282d --- /dev/null +++ b/src/core/lifecycle.ts @@ -0,0 +1,15 @@ +import type { WorktreeEntry } from '../types/manifest.js'; + +export type WorktreeLifecycle = 'merged' | 'cleaned' | 'busy' | 'shipped' | 'idle'; + +export function computeLifecycle(wt: WorktreeEntry): WorktreeLifecycle { + if (wt.status === 'merged') return 'merged'; + if (wt.status === 'cleaned') return 'cleaned'; + + const agents = Object.values(wt.agents); + + if (agents.some((a) => a.status === 'running')) return 'busy'; + if (wt.prUrl) return 'shipped'; + + return 'idle'; +} diff --git a/src/core/merge.test.ts b/src/core/merge.test.ts new file mode 100644 index 0000000..99eb75a --- /dev/null +++ b/src/core/merge.test.ts @@ -0,0 +1,119 @@ +import { describe, test, expect, vi, beforeEach } from 'vitest'; +import { makeWorktree, makeAgent } from '../test-fixtures.js'; +import type { Manifest } from '../types/manifest.js'; + +// ---- Mocks ---- + +let manifestState: Manifest; + +vi.mock('./manifest.js', () => ({ + updateManifest: vi.fn(async (_root: string, updater: (m: Manifest) => Manifest | Promise) => { + manifestState = await updater(structuredClone(manifestState)); + return manifestState; + }), +})); + +vi.mock('./worktree.js', () => ({ + getCurrentBranch: vi.fn(() => 'main'), +})); + +vi.mock('./cleanup.js', () => ({ + cleanupWorktree: vi.fn(async () => ({ selfProtected: false, selfProtectedTargets: [] })), +})); + +vi.mock('execa', () => ({ + execa: vi.fn(), +})); + +vi.mock('../lib/env.js', () => ({ + execaEnv: {}, +})); + +// ---- Imports (after mocks) ---- + +import { mergeWorktree } from './merge.js'; +import { getCurrentBranch } from './worktree.js'; +import { cleanupWorktree } from './cleanup.js'; +import { execa } from 'execa'; + +describe('mergeWorktree', () => { + beforeEach(() => { + vi.clearAllMocks(); + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + manifestState = { + version: 1, + projectRoot: '/tmp/project', + sessionName: 'ppg', + worktrees: { 'wt-abc123': wt }, + createdAt: '2026-01-01T00:00:00.000Z', + updatedAt: '2026-01-01T00:00:00.000Z', + }; + }); + + test('given valid worktree, should merge with squash and update manifest to merged', async () => { + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + + const result = await mergeWorktree('/tmp/project', wt); + + expect(result.strategy).toBe('squash'); + expect(result.cleaned).toBe(true); + expect(manifestState.worktrees['wt-abc123'].status).toBe('merged'); + expect(manifestState.worktrees['wt-abc123'].mergedAt).toBeDefined(); + }); + + test('given no-ff strategy, should call git merge --no-ff', async () => { + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + + await mergeWorktree('/tmp/project', wt, { strategy: 'no-ff' }); + + const calls = vi.mocked(execa).mock.calls; + const mergeCall = calls.find((c) => c[0] === 'git' && (c[1] as string[])?.[0] === 'merge'); + expect(mergeCall).toBeDefined(); + expect((mergeCall![1] as string[])).toContain('--no-ff'); + }); + + test('given different current branch, should checkout base branch first', async () => { + vi.mocked(getCurrentBranch).mockResolvedValueOnce('feature-x'); + const wt = makeWorktree({ id: 'wt-abc123', baseBranch: 'main', agents: {} }); + + await mergeWorktree('/tmp/project', wt); + + const calls = vi.mocked(execa).mock.calls; + const checkoutCall = calls.find((c) => c[0] === 'git' && (c[1] as string[])?.[0] === 'checkout'); + expect(checkoutCall).toBeDefined(); + expect((checkoutCall![1] as string[])).toContain('main'); + }); + + test('given running agents without force, should throw AGENTS_RUNNING', async () => { + const agent = makeAgent({ id: 'ag-running1', status: 'running' }); + const wt = makeWorktree({ id: 'wt-abc123', agents: { 'ag-running1': agent } }); + + await expect(mergeWorktree('/tmp/project', wt)).rejects.toThrow('agent(s) still running'); + }); + + test('given running agents with force, should merge anyway', async () => { + const agent = makeAgent({ id: 'ag-running1', status: 'running' }); + const wt = makeWorktree({ id: 'wt-abc123', agents: { 'ag-running1': agent } }); + + const result = await mergeWorktree('/tmp/project', wt, { force: true }); + + expect(result.worktreeId).toBe('wt-abc123'); + }); + + test('given cleanup false, should skip cleanup', async () => { + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + + const result = await mergeWorktree('/tmp/project', wt, { cleanup: false }); + + expect(result.cleaned).toBe(false); + expect(vi.mocked(cleanupWorktree)).not.toHaveBeenCalled(); + }); + + test('given git merge failure, should set status to failed and throw', async () => { + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + vi.mocked(execa).mockRejectedValueOnce(new Error('conflict')); + + await expect(mergeWorktree('/tmp/project', wt)).rejects.toThrow('Merge failed'); + expect(manifestState.worktrees['wt-abc123'].status).toBe('failed'); + }); +}); diff --git a/src/core/merge.ts b/src/core/merge.ts new file mode 100644 index 0000000..ad98701 --- /dev/null +++ b/src/core/merge.ts @@ -0,0 +1,105 @@ +import { execa } from 'execa'; +import { updateManifest } from './manifest.js'; +import { getCurrentBranch } from './worktree.js'; +import { cleanupWorktree, type CleanupOptions } from './cleanup.js'; +import { PpgError, MergeFailedError } from '../lib/errors.js'; +import { execaEnv } from '../lib/env.js'; +import type { WorktreeEntry } from '../types/manifest.js'; + +export interface MergeWorktreeOptions { + strategy?: 'squash' | 'no-ff'; + cleanup?: boolean; + force?: boolean; + cleanupOptions?: CleanupOptions; +} + +export interface MergeWorktreeResult { + worktreeId: string; + branch: string; + baseBranch: string; + strategy: 'squash' | 'no-ff'; + cleaned: boolean; + selfProtected: boolean; +} + +/** Merge a worktree branch into its base branch. Updates manifest status throughout. */ +export async function mergeWorktree( + projectRoot: string, + wt: WorktreeEntry, + options: MergeWorktreeOptions = {}, +): Promise { + const { strategy = 'squash', cleanup = true, force = false } = options; + + // Check all agents finished + const incomplete = Object.values(wt.agents).filter((a) => a.status === 'running'); + if (incomplete.length > 0 && !force) { + const ids = incomplete.map((a) => a.id).join(', '); + throw new PpgError( + `${incomplete.length} agent(s) still running: ${ids}. Use --force to merge anyway.`, + 'AGENTS_RUNNING', + ); + } + + // Set worktree status to merging + await updateManifest(projectRoot, (m) => { + if (m.worktrees[wt.id]) { + m.worktrees[wt.id].status = 'merging'; + } + return m; + }); + + try { + const currentBranch = await getCurrentBranch(projectRoot); + if (currentBranch !== wt.baseBranch) { + await execa('git', ['checkout', wt.baseBranch], { ...execaEnv, cwd: projectRoot }); + } + + if (strategy === 'squash') { + await execa('git', ['merge', '--squash', wt.branch], { ...execaEnv, cwd: projectRoot }); + await execa('git', ['commit', '-m', `ppg: merge ${wt.name} (${wt.branch})`], { + ...execaEnv, + cwd: projectRoot, + }); + } else { + await execa('git', ['merge', '--no-ff', wt.branch, '-m', `ppg: merge ${wt.name} (${wt.branch})`], { + ...execaEnv, + cwd: projectRoot, + }); + } + } catch (err) { + await updateManifest(projectRoot, (m) => { + if (m.worktrees[wt.id]) { + m.worktrees[wt.id].status = 'failed'; + } + return m; + }); + throw new MergeFailedError( + `Merge failed: ${err instanceof Error ? err.message : err}`, + ); + } + + // Mark as merged + await updateManifest(projectRoot, (m) => { + if (m.worktrees[wt.id]) { + m.worktrees[wt.id].status = 'merged'; + m.worktrees[wt.id].mergedAt = new Date().toISOString(); + } + return m; + }); + + // Cleanup + let selfProtected = false; + if (cleanup) { + const cleanupResult = await cleanupWorktree(projectRoot, wt, options.cleanupOptions); + selfProtected = cleanupResult.selfProtected; + } + + return { + worktreeId: wt.id, + branch: wt.branch, + baseBranch: wt.baseBranch, + strategy, + cleaned: cleanup, + selfProtected, + }; +} diff --git a/src/core/operations/kill.test.ts b/src/core/operations/kill.test.ts new file mode 100644 index 0000000..d09fb64 --- /dev/null +++ b/src/core/operations/kill.test.ts @@ -0,0 +1,478 @@ +import { describe, test, expect, vi, beforeEach } from 'vitest'; +import type { Manifest, AgentEntry, WorktreeEntry } from '../../types/manifest.js'; +import type { PaneInfo } from '../tmux.js'; + +// --- Mocks --- + +vi.mock('../manifest.js', () => ({ + readManifest: vi.fn(), + updateManifest: vi.fn(), + findAgent: vi.fn(), + resolveWorktree: vi.fn(), +})); + +vi.mock('../agent.js', () => ({ + killAgent: vi.fn(async () => {}), + killAgents: vi.fn(async () => {}), +})); + +vi.mock('../pr.js', () => ({ + checkPrState: vi.fn(async () => 'UNKNOWN'), +})); + +vi.mock('../cleanup.js', () => ({ + cleanupWorktree: vi.fn(async () => ({ + worktreeId: 'wt-abc123', + manifestUpdated: true, + tmuxKilled: 1, + tmuxSkipped: 0, + tmuxFailed: 0, + selfProtected: false, + selfProtectedTargets: [], + })), +})); + +vi.mock('../self.js', () => ({ + excludeSelf: vi.fn(), +})); + +vi.mock('../tmux.js', () => ({ + killPane: vi.fn(async () => {}), +})); + +import { performKill } from './kill.js'; +import { readManifest, updateManifest, findAgent, resolveWorktree } from '../manifest.js'; +import { killAgent, killAgents } from '../agent.js'; +import { checkPrState } from '../pr.js'; +import { cleanupWorktree } from '../cleanup.js'; +import { excludeSelf } from '../self.js'; +import { killPane } from '../tmux.js'; +import { PpgError } from '../../lib/errors.js'; + +// --- Helpers --- + +function makeAgent(id: string, overrides: Partial = {}): AgentEntry { + return { + id, + name: 'test', + agentType: 'claude', + status: 'running', + tmuxTarget: 'ppg:1.0', + prompt: 'do stuff', + startedAt: new Date().toISOString(), + ...overrides, + }; +} + +function makeWorktree(overrides: Partial = {}): WorktreeEntry { + return { + id: 'wt-abc123', + name: 'test-wt', + path: '/tmp/wt', + branch: 'ppg/test-wt', + baseBranch: 'main', + status: 'active', + tmuxWindow: 'ppg:1', + agents: {}, + createdAt: new Date().toISOString(), + ...overrides, + }; +} + +function makeManifest(worktrees: Record = {}): Manifest { + return { + version: 1, + projectRoot: '/project', + sessionName: 'ppg', + worktrees, + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + }; +} + +// Shared manifest reference for updateManifest mock +let _manifest: Manifest; +function currentManifest(): Manifest { + return JSON.parse(JSON.stringify(_manifest)); +} + +// --- Tests --- + +describe('performKill', () => { + beforeEach(() => { + vi.clearAllMocks(); + _manifest = makeManifest(); + // Establish defaults for all mocks after clearing + vi.mocked(readManifest).mockResolvedValue(_manifest); + vi.mocked(updateManifest).mockImplementation(async (_root: string, updater: (m: any) => any) => { + return updater(currentManifest()); + }); + vi.mocked(findAgent).mockReturnValue(undefined); + vi.mocked(resolveWorktree).mockReturnValue(undefined); + vi.mocked(checkPrState).mockResolvedValue('UNKNOWN'); + vi.mocked(excludeSelf).mockImplementation((agents: AgentEntry[]) => ({ safe: agents, skipped: [] })); + }); + + test('throws INVALID_ARGS when no target specified', async () => { + const err = await performKill({ projectRoot: '/project' }).catch((e) => e); + expect(err).toBeInstanceOf(PpgError); + expect((err as PpgError).code).toBe('INVALID_ARGS'); + }); + + describe('single agent kill', () => { + beforeEach(() => { + const agent = makeAgent('ag-12345678'); + const wt = makeWorktree({ agents: { 'ag-12345678': agent } }); + _manifest = makeManifest({ 'wt-abc123': wt }); + vi.mocked(readManifest).mockResolvedValue(_manifest); + vi.mocked(findAgent).mockReturnValue({ worktree: wt, agent }); + }); + + test('kills a running agent and updates manifest', async () => { + const result = await performKill({ + projectRoot: '/project', + agent: 'ag-12345678', + }); + + expect(killAgent).toHaveBeenCalled(); + expect(updateManifest).toHaveBeenCalled(); + expect(result.success).toBe(true); + expect(result.killed).toEqual(['ag-12345678']); + }); + + test('manifest updater sets agent status to gone', async () => { + await performKill({ + projectRoot: '/project', + agent: 'ag-12345678', + }); + + // Verify the updater was called and inspect what it does + const updaterCall = vi.mocked(updateManifest).mock.calls[0]; + expect(updaterCall[0]).toBe('/project'); + const updater = updaterCall[1]; + + // Run the updater against a test manifest to verify the mutation + const testManifest = makeManifest({ + 'wt-abc123': makeWorktree({ + agents: { 'ag-12345678': makeAgent('ag-12345678') }, + }), + }); + // findAgent mock returns matching agent from test manifest + vi.mocked(findAgent).mockReturnValue({ + worktree: testManifest.worktrees['wt-abc123'], + agent: testManifest.worktrees['wt-abc123'].agents['ag-12345678'], + }); + const result = updater(testManifest) as Manifest; + expect(result.worktrees['wt-abc123'].agents['ag-12345678'].status).toBe('gone'); + }); + + test('skips kill for terminal-state agent', async () => { + const goneAgent = makeAgent('ag-12345678', { status: 'gone' }); + const wt = makeWorktree({ agents: { 'ag-12345678': goneAgent } }); + vi.mocked(findAgent).mockReturnValue({ worktree: wt, agent: goneAgent }); + + const result = await performKill({ + projectRoot: '/project', + agent: 'ag-12345678', + }); + + expect(killAgent).not.toHaveBeenCalled(); + expect(result.success).toBe(true); + expect(result.killed).toEqual([]); + expect(result.message).toContain('already gone'); + }); + + test('throws AgentNotFoundError for unknown agent', async () => { + vi.mocked(findAgent).mockReturnValue(undefined); + + const err = await performKill({ projectRoot: '/project', agent: 'ag-unknown' }).catch((e) => e); + expect(err).toBeInstanceOf(PpgError); + expect((err as PpgError).code).toBe('AGENT_NOT_FOUND'); + }); + + test('self-protection returns success:false with skipped', async () => { + const agent = makeAgent('ag-12345678'); + vi.mocked(excludeSelf).mockReturnValue({ + safe: [], + skipped: [agent], + }); + + const result = await performKill({ + projectRoot: '/project', + agent: 'ag-12345678', + selfPaneId: '%5', + paneMap: new Map(), + }); + + expect(killAgent).not.toHaveBeenCalled(); + expect(result.success).toBe(false); + expect(result.killed).toEqual([]); + expect(result.skipped).toEqual(['ag-12345678']); + }); + + test('--delete removes agent from manifest', async () => { + const result = await performKill({ + projectRoot: '/project', + agent: 'ag-12345678', + delete: true, + }); + + expect(killAgent).toHaveBeenCalled(); + expect(killPane).toHaveBeenCalled(); + expect(updateManifest).toHaveBeenCalled(); + expect(result.success).toBe(true); + expect(result.deleted).toEqual(['ag-12345678']); + }); + + test('--delete manifest updater removes agent entry', async () => { + await performKill({ + projectRoot: '/project', + agent: 'ag-12345678', + delete: true, + }); + + const updaterCall = vi.mocked(updateManifest).mock.calls[0]; + const updater = updaterCall[1]; + + const testManifest = makeManifest({ + 'wt-abc123': makeWorktree({ + agents: { 'ag-12345678': makeAgent('ag-12345678') }, + }), + }); + vi.mocked(findAgent).mockReturnValue({ + worktree: testManifest.worktrees['wt-abc123'], + agent: testManifest.worktrees['wt-abc123'].agents['ag-12345678'], + }); + const result = updater(testManifest) as Manifest; + expect(result.worktrees['wt-abc123'].agents['ag-12345678']).toBeUndefined(); + }); + + test('--delete on terminal agent skips kill but still deletes', async () => { + const idleAgent = makeAgent('ag-12345678', { status: 'idle' }); + const wt = makeWorktree({ agents: { 'ag-12345678': idleAgent } }); + vi.mocked(findAgent).mockReturnValue({ worktree: wt, agent: idleAgent }); + + const result = await performKill({ + projectRoot: '/project', + agent: 'ag-12345678', + delete: true, + }); + + expect(killAgent).not.toHaveBeenCalled(); + expect(killPane).toHaveBeenCalled(); + expect(result.success).toBe(true); + expect(result.deleted).toEqual(['ag-12345678']); + expect(result.killed).toEqual([]); + }); + + test('propagates killAgent errors', async () => { + vi.mocked(killAgent).mockRejectedValue(new Error('tmux crash')); + + const err = await performKill({ + projectRoot: '/project', + agent: 'ag-12345678', + }).catch((e) => e); + + expect(err).toBeInstanceOf(Error); + expect(err.message).toBe('tmux crash'); + }); + }); + + describe('worktree kill', () => { + let agent1: AgentEntry; + let agent2: AgentEntry; + let wt: WorktreeEntry; + + beforeEach(() => { + agent1 = makeAgent('ag-aaaaaaaa', { tmuxTarget: 'ppg:1.0' }); + agent2 = makeAgent('ag-bbbbbbbb', { tmuxTarget: 'ppg:1.1' }); + wt = makeWorktree({ + agents: { + 'ag-aaaaaaaa': agent1, + 'ag-bbbbbbbb': agent2, + }, + }); + _manifest = makeManifest({ 'wt-abc123': wt }); + vi.mocked(readManifest).mockResolvedValue(_manifest); + vi.mocked(resolveWorktree).mockReturnValue(wt); + }); + + test('kills all running agents in worktree', async () => { + const result = await performKill({ + projectRoot: '/project', + worktree: 'wt-abc123', + }); + + expect(killAgents).toHaveBeenCalledWith([agent1, agent2]); + expect(result.success).toBe(true); + expect(result.killed).toEqual(['ag-aaaaaaaa', 'ag-bbbbbbbb']); + }); + + test('throws WorktreeNotFoundError for unknown worktree', async () => { + vi.mocked(resolveWorktree).mockReturnValue(undefined); + + const err = await performKill({ projectRoot: '/project', worktree: 'wt-unknown' }).catch((e) => e); + expect(err).toBeInstanceOf(PpgError); + expect((err as PpgError).code).toBe('WORKTREE_NOT_FOUND'); + }); + + test('--remove triggers worktree cleanup', async () => { + await performKill({ + projectRoot: '/project', + worktree: 'wt-abc123', + remove: true, + }); + + expect(cleanupWorktree).toHaveBeenCalled(); + }); + + test('--delete removes worktree from manifest', async () => { + const result = await performKill({ + projectRoot: '/project', + worktree: 'wt-abc123', + delete: true, + }); + + expect(cleanupWorktree).toHaveBeenCalled(); + expect(result.deleted).toEqual(['wt-abc123']); + }); + + test('--delete skips worktree with open PR', async () => { + vi.mocked(checkPrState).mockResolvedValue('OPEN'); + + const result = await performKill({ + projectRoot: '/project', + worktree: 'wt-abc123', + delete: true, + }); + + expect(cleanupWorktree).not.toHaveBeenCalled(); + expect(result.deleted).toEqual([]); + expect(result.skippedOpenPrs).toEqual(['wt-abc123']); + }); + + test('--delete --include-open-prs overrides PR check', async () => { + vi.mocked(checkPrState).mockResolvedValue('OPEN'); + + const result = await performKill({ + projectRoot: '/project', + worktree: 'wt-abc123', + delete: true, + includeOpenPrs: true, + }); + + expect(cleanupWorktree).toHaveBeenCalled(); + expect(result.deleted).toEqual(['wt-abc123']); + }); + + test('filters non-running agents', async () => { + const idleAgent = makeAgent('ag-cccccccc', { status: 'idle' }); + const wtMixed = makeWorktree({ + agents: { + 'ag-aaaaaaaa': agent1, + 'ag-cccccccc': idleAgent, + }, + }); + vi.mocked(resolveWorktree).mockReturnValue(wtMixed); + + const result = await performKill({ + projectRoot: '/project', + worktree: 'wt-abc123', + }); + + expect(result.killed).toEqual(['ag-aaaaaaaa']); + }); + }); + + describe('kill all', () => { + let agent1: AgentEntry; + let agent2: AgentEntry; + let wt1: WorktreeEntry; + let wt2: WorktreeEntry; + + beforeEach(() => { + agent1 = makeAgent('ag-aaaaaaaa'); + agent2 = makeAgent('ag-bbbbbbbb'); + wt1 = makeWorktree({ + id: 'wt-111111', + agents: { 'ag-aaaaaaaa': agent1 }, + }); + wt2 = makeWorktree({ + id: 'wt-222222', + name: 'other-wt', + agents: { 'ag-bbbbbbbb': agent2 }, + }); + _manifest = makeManifest({ 'wt-111111': wt1, 'wt-222222': wt2 }); + vi.mocked(readManifest).mockResolvedValue(_manifest); + // resolveWorktree is called inside removeWorktreeCleanup for --delete/--remove + vi.mocked(resolveWorktree).mockImplementation((_m, ref) => { + if (ref === 'wt-111111') return wt1; + if (ref === 'wt-222222') return wt2; + return undefined; + }); + }); + + test('kills agents across all worktrees', async () => { + const result = await performKill({ + projectRoot: '/project', + all: true, + }); + + expect(killAgents).toHaveBeenCalled(); + expect(result.success).toBe(true); + expect(result.killed).toHaveLength(2); + expect(result.killed).toContain('ag-aaaaaaaa'); + expect(result.killed).toContain('ag-bbbbbbbb'); + }); + + test('includes worktreeCount in result', async () => { + const result = await performKill({ + projectRoot: '/project', + all: true, + }); + + expect(result.worktreeCount).toBe(2); + }); + + test('--delete removes all active worktrees', async () => { + const result = await performKill({ + projectRoot: '/project', + all: true, + delete: true, + }); + + expect(cleanupWorktree).toHaveBeenCalledTimes(2); + expect(result.deleted).toHaveLength(2); + }); + + test('--remove triggers cleanup without manifest deletion', async () => { + const result = await performKill({ + projectRoot: '/project', + all: true, + remove: true, + }); + + expect(cleanupWorktree).toHaveBeenCalledTimes(2); + expect(result.removed).toHaveLength(2); + // --remove without --delete: cleanup happens but entries stay in manifest + expect(result.deleted).toEqual([]); + }); + + test('self-protection filters agents', async () => { + vi.mocked(excludeSelf).mockReturnValue({ + safe: [agent2], + skipped: [agent1], + }); + + const result = await performKill({ + projectRoot: '/project', + all: true, + selfPaneId: '%5', + paneMap: new Map(), + }); + + expect(result.killed).toEqual(['ag-bbbbbbbb']); + expect(result.skipped).toEqual(['ag-aaaaaaaa']); + }); + }); +}); diff --git a/src/core/operations/kill.ts b/src/core/operations/kill.ts new file mode 100644 index 0000000..4ad9433 --- /dev/null +++ b/src/core/operations/kill.ts @@ -0,0 +1,262 @@ +import { readManifest, updateManifest, findAgent, resolveWorktree } from '../manifest.js'; +import { killAgent, killAgents } from '../agent.js'; +import { checkPrState } from '../pr.js'; +import { cleanupWorktree } from '../cleanup.js'; +import { excludeSelf } from '../self.js'; +import { killPane, type PaneInfo } from '../tmux.js'; +import { PpgError, AgentNotFoundError, WorktreeNotFoundError } from '../../lib/errors.js'; +import type { AgentEntry } from '../../types/manifest.js'; + +export interface KillInput { + projectRoot: string; + agent?: string; + worktree?: string; + all?: boolean; + remove?: boolean; + delete?: boolean; + includeOpenPrs?: boolean; + selfPaneId?: string | null; + paneMap?: Map; +} + +export interface KillResult { + success: boolean; + killed: string[]; + skipped?: string[]; + removed?: string[]; + deleted?: string[]; + skippedOpenPrs?: string[]; + worktreeCount?: number; + message?: string; +} + +export async function performKill(input: KillInput): Promise { + const { projectRoot } = input; + + if (!input.agent && !input.worktree && !input.all) { + throw new PpgError('One of --agent, --worktree, or --all is required', 'INVALID_ARGS'); + } + + if (input.agent) { + return killSingleAgent(projectRoot, input.agent, input); + } else if (input.worktree) { + return killWorktreeAgents(projectRoot, input.worktree, input); + } else { + return killAllAgents(projectRoot, input); + } +} + +async function killSingleAgent( + projectRoot: string, + agentId: string, + input: KillInput, +): Promise { + const manifest = await readManifest(projectRoot); + const found = findAgent(manifest, agentId); + if (!found) throw new AgentNotFoundError(agentId); + + const { agent } = found; + const isTerminal = agent.status !== 'running'; + + // Self-protection check + if (input.selfPaneId && input.paneMap) { + const { skipped } = excludeSelf([agent], input.selfPaneId, input.paneMap); + if (skipped.length > 0) { + return { success: false, killed: [], skipped: [agentId], message: 'self-protection' }; + } + } + + if (input.delete) { + if (!isTerminal) { + await killAgent(agent); + } + await killPane(agent.tmuxTarget); + + await updateManifest(projectRoot, (m) => { + const f = findAgent(m, agentId); + if (f) { + delete f.worktree.agents[agentId]; + } + return m; + }); + + return { success: true, killed: isTerminal ? [] : [agentId], deleted: [agentId] }; + } + + if (isTerminal) { + return { success: true, killed: [], message: `Agent ${agentId} already ${agent.status}` }; + } + + await killAgent(agent); + + await updateManifest(projectRoot, (m) => { + const f = findAgent(m, agentId); + if (f) { + f.agent.status = 'gone'; + } + return m; + }); + + return { success: true, killed: [agentId] }; +} + +async function killWorktreeAgents( + projectRoot: string, + worktreeRef: string, + input: KillInput, +): Promise { + const manifest = await readManifest(projectRoot); + const wt = resolveWorktree(manifest, worktreeRef); + if (!wt) throw new WorktreeNotFoundError(worktreeRef); + + let toKill = Object.values(wt.agents).filter((a) => a.status === 'running'); + + const skippedIds: string[] = []; + if (input.selfPaneId && input.paneMap) { + const { safe, skipped } = excludeSelf(toKill, input.selfPaneId, input.paneMap); + toKill = safe; + for (const a of skipped) skippedIds.push(a.id); + } + + const killedIds = toKill.map((a) => a.id); + await killAgents(toKill); + + await updateManifest(projectRoot, (m) => { + const mWt = m.worktrees[wt.id]; + if (mWt) { + for (const agent of Object.values(mWt.agents)) { + if (killedIds.includes(agent.id)) { + agent.status = 'gone'; + } + } + } + return m; + }); + + // Check for open PR before deleting worktree + let skippedOpenPr = false; + if (input.delete && !input.includeOpenPrs) { + const prState = await checkPrState(wt.branch); + if (prState === 'OPEN') { + skippedOpenPr = true; + } + } + + const shouldRemove = (input.remove || input.delete) && !skippedOpenPr; + if (shouldRemove) { + await removeWorktreeCleanup(projectRoot, wt.id, input.selfPaneId ?? null, input.paneMap); + } + + if (input.delete && !skippedOpenPr) { + await updateManifest(projectRoot, (m) => { + delete m.worktrees[wt.id]; + return m; + }); + } + + return { + success: true, + killed: killedIds, + skipped: skippedIds.length > 0 ? skippedIds : undefined, + removed: shouldRemove ? [wt.id] : [], + deleted: (input.delete && !skippedOpenPr) ? [wt.id] : [], + skippedOpenPrs: skippedOpenPr ? [wt.id] : undefined, + }; +} + +async function killAllAgents( + projectRoot: string, + input: KillInput, +): Promise { + const manifest = await readManifest(projectRoot); + let toKill: AgentEntry[] = []; + + for (const wt of Object.values(manifest.worktrees)) { + for (const agent of Object.values(wt.agents)) { + if (agent.status === 'running') { + toKill.push(agent); + } + } + } + + const skippedIds: string[] = []; + if (input.selfPaneId && input.paneMap) { + const { safe, skipped } = excludeSelf(toKill, input.selfPaneId, input.paneMap); + toKill = safe; + for (const a of skipped) skippedIds.push(a.id); + } + + const killedIds = toKill.map((a) => a.id); + await killAgents(toKill); + + const activeWorktreeIds = Object.values(manifest.worktrees) + .filter((wt) => wt.status === 'active') + .map((wt) => wt.id); + + await updateManifest(projectRoot, (m) => { + for (const wt of Object.values(m.worktrees)) { + for (const agent of Object.values(wt.agents)) { + if (killedIds.includes(agent.id)) { + agent.status = 'gone'; + } + } + } + return m; + }); + + // Filter out worktrees with open PRs + let worktreesToRemove = activeWorktreeIds; + const openPrWorktreeIds: string[] = []; + if (input.delete && !input.includeOpenPrs) { + worktreesToRemove = []; + for (const wtId of activeWorktreeIds) { + const wt = manifest.worktrees[wtId]; + if (wt) { + const prState = await checkPrState(wt.branch); + if (prState === 'OPEN') { + openPrWorktreeIds.push(wtId); + } else { + worktreesToRemove.push(wtId); + } + } + } + } + + const shouldRemove = input.remove || input.delete; + if (shouldRemove) { + for (const wtId of worktreesToRemove) { + await removeWorktreeCleanup(projectRoot, wtId, input.selfPaneId ?? null, input.paneMap); + } + } + + if (input.delete) { + await updateManifest(projectRoot, (m) => { + for (const wtId of worktreesToRemove) { + delete m.worktrees[wtId]; + } + return m; + }); + } + + return { + success: true, + killed: killedIds, + skipped: skippedIds.length > 0 ? skippedIds : undefined, + removed: shouldRemove ? worktreesToRemove : [], + deleted: input.delete ? worktreesToRemove : [], + skippedOpenPrs: openPrWorktreeIds.length > 0 ? openPrWorktreeIds : undefined, + worktreeCount: activeWorktreeIds.length, + }; +} + +async function removeWorktreeCleanup( + projectRoot: string, + wtId: string, + selfPaneId: string | null, + paneMap?: Map, +): Promise { + const manifest = await readManifest(projectRoot); + const wt = resolveWorktree(manifest, wtId); + if (!wt) return; + await cleanupWorktree(projectRoot, wt, { selfPaneId, paneMap }); +} diff --git a/src/core/operations/merge.test.ts b/src/core/operations/merge.test.ts new file mode 100644 index 0000000..d3c21e8 --- /dev/null +++ b/src/core/operations/merge.test.ts @@ -0,0 +1,330 @@ +import { describe, test, expect, vi, beforeEach } from 'vitest'; +import type { Manifest, WorktreeEntry } from '../../types/manifest.js'; + +// --- Mocks --- + +const mockExeca = vi.fn(async () => ({ stdout: 'main', stderr: '' })); +vi.mock('execa', () => ({ + execa: (...args: unknown[]) => (mockExeca as Function)(...args), +})); + +const mockManifest = (): Manifest => ({ + version: 1, + projectRoot: '/project', + sessionName: 'ppg', + worktrees: { + 'wt-abc123': makeWorktree(), + }, + createdAt: '2025-01-01T00:00:00.000Z', + updatedAt: '2025-01-01T00:00:00.000Z', +}); + +let latestManifest: Manifest; + +vi.mock('../manifest.js', () => ({ + requireManifest: vi.fn(async () => latestManifest), + updateManifest: vi.fn(async (_root: string, updater: (m: Manifest) => Manifest | Promise) => { + latestManifest = await updater(latestManifest); + return latestManifest; + }), + resolveWorktree: vi.fn((manifest: Manifest, ref: string) => { + return manifest.worktrees[ref] ?? + Object.values(manifest.worktrees).find((wt) => wt.name === ref || wt.branch === ref); + }), +})); + +vi.mock('../agent.js', () => ({ + refreshAllAgentStatuses: vi.fn(async (m: Manifest) => m), +})); + +vi.mock('../worktree.js', () => ({ + getCurrentBranch: vi.fn(async () => 'main'), +})); + +vi.mock('../cleanup.js', () => ({ + cleanupWorktree: vi.fn(async () => ({ + worktreeId: 'wt-abc123', + manifestUpdated: true, + tmuxKilled: 1, + tmuxSkipped: 0, + tmuxFailed: 0, + selfProtected: false, + selfProtectedTargets: [], + })), +})); + +vi.mock('../self.js', () => ({ + getCurrentPaneId: vi.fn(() => null), +})); + +vi.mock('../tmux.js', () => ({ + listSessionPanes: vi.fn(async () => new Map()), +})); + +vi.mock('../../lib/env.js', () => ({ + execaEnv: { env: { PATH: '/usr/bin' } }, +})); + +import { performMerge } from './merge.js'; +import { updateManifest } from '../manifest.js'; +import { getCurrentBranch } from '../worktree.js'; +import { cleanupWorktree } from '../cleanup.js'; +import { getCurrentPaneId } from '../self.js'; +import { listSessionPanes } from '../tmux.js'; +import { PpgError, MergeFailedError, WorktreeNotFoundError } from '../../lib/errors.js'; + +function makeWorktree(overrides: Partial = {}): WorktreeEntry { + return { + id: 'wt-abc123', + name: 'test-feature', + path: '/project/.worktrees/wt-abc123', + branch: 'ppg/test-feature', + baseBranch: 'main', + status: 'active', + tmuxWindow: 'ppg:1', + agents: { + 'ag-00000001': { + id: 'ag-00000001', + name: 'claude-1', + agentType: 'claude', + status: 'exited', + tmuxTarget: 'ppg:1.0', + prompt: 'do stuff', + startedAt: '2025-01-01T00:00:00.000Z', + }, + }, + createdAt: '2025-01-01T00:00:00.000Z', + ...overrides, + }; +} + +describe('performMerge', () => { + beforeEach(() => { + vi.clearAllMocks(); + latestManifest = mockManifest(); + mockExeca.mockResolvedValue({ stdout: 'main', stderr: '' }); + }); + + test('performs squash merge and returns result', async () => { + const result = await performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-abc123', + strategy: 'squash', + }); + + expect(result.merged).toBe(true); + expect(result.strategy).toBe('squash'); + expect(result.worktreeId).toBe('wt-abc123'); + expect(result.branch).toBe('ppg/test-feature'); + expect(result.baseBranch).toBe('main'); + expect(result.cleaned).toBe(true); + expect(result.dryRun).toBe(false); + }); + + test('defaults to squash strategy', async () => { + const result = await performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-abc123', + }); + + expect(result.strategy).toBe('squash'); + // Verify git merge --squash was called + expect(mockExeca).toHaveBeenCalledWith( + 'git', ['merge', '--squash', 'ppg/test-feature'], + expect.objectContaining({ cwd: '/project' }), + ); + }); + + test('supports no-ff merge strategy', async () => { + await performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-abc123', + strategy: 'no-ff', + }); + + expect(mockExeca).toHaveBeenCalledWith( + 'git', ['merge', '--no-ff', 'ppg/test-feature', '-m', 'ppg: merge test-feature (ppg/test-feature)'], + expect.objectContaining({ cwd: '/project' }), + ); + }); + + test('state transitions: active → merging → merged → cleaned', async () => { + const statusLog: string[] = []; + vi.mocked(updateManifest).mockImplementation(async (_root, updater) => { + latestManifest = await updater(latestManifest); + const wt = latestManifest.worktrees['wt-abc123']; + if (wt) statusLog.push(wt.status); + return latestManifest; + }); + + await performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-abc123', + }); + + // Call order: refreshAllAgentStatuses (active) → set merging → set merged + // Note: cleanup's manifest update is mocked, so 'cleaned' is not tracked here + expect(statusLog).toContain('merging'); + expect(statusLog).toContain('merged'); + expect(statusLog.indexOf('merging')).toBeLessThan(statusLog.indexOf('merged')); + }); + + test('sets status to failed on git merge error', async () => { + mockExeca.mockImplementation(async (...args: unknown[]) => { + const cmdArgs = args[1] as string[]; + if (cmdArgs[0] === 'merge') { + throw new Error('CONFLICT (content): Merge conflict in file.ts'); + } + return { stdout: 'main', stderr: '' }; + }); + + await expect( + performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-abc123', + }), + ).rejects.toThrow(MergeFailedError); + + expect(latestManifest.worktrees['wt-abc123'].status).toBe('failed'); + }); + + test('throws AGENTS_RUNNING when agents still running', async () => { + latestManifest.worktrees['wt-abc123'].agents['ag-00000001'].status = 'running'; + + const err = await performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-abc123', + }).catch((e) => e); + + expect(err).toBeInstanceOf(PpgError); + expect(err.code).toBe('AGENTS_RUNNING'); + }); + + test('force bypasses running agent check', async () => { + latestManifest.worktrees['wt-abc123'].agents['ag-00000001'].status = 'running'; + + const result = await performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-abc123', + force: true, + }); + + expect(result.merged).toBe(true); + }); + + test('throws WorktreeNotFoundError for invalid ref', async () => { + await expect( + performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-nonexistent', + }), + ).rejects.toThrow(WorktreeNotFoundError); + }); + + test('dry run returns early without modifying state', async () => { + const result = await performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-abc123', + dryRun: true, + }); + + expect(result.dryRun).toBe(true); + expect(result.merged).toBe(false); + expect(result.cleaned).toBe(false); + // Should not have called git merge + expect(mockExeca).not.toHaveBeenCalledWith( + 'git', expect.arrayContaining(['merge']), + expect.anything(), + ); + // Worktree status unchanged + expect(latestManifest.worktrees['wt-abc123'].status).toBe('active'); + }); + + test('skips cleanup when cleanup=false', async () => { + const result = await performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-abc123', + cleanup: false, + }); + + expect(result.merged).toBe(true); + expect(result.cleaned).toBe(false); + expect(cleanupWorktree).not.toHaveBeenCalled(); + }); + + test('switches to base branch if not already on it', async () => { + vi.mocked(getCurrentBranch).mockResolvedValueOnce('some-other-branch'); + + await performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-abc123', + }); + + expect(mockExeca).toHaveBeenCalledWith( + 'git', ['checkout', 'main'], + expect.objectContaining({ cwd: '/project' }), + ); + }); + + test('skips checkout when already on base branch', async () => { + vi.mocked(getCurrentBranch).mockResolvedValueOnce('main'); + + await performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-abc123', + }); + + expect(mockExeca).not.toHaveBeenCalledWith( + 'git', ['checkout', 'main'], + expect.anything(), + ); + }); + + test('passes self-protection context to cleanup', async () => { + vi.mocked(getCurrentPaneId).mockReturnValueOnce('%5'); + const paneMap = new Map(); + vi.mocked(listSessionPanes).mockResolvedValueOnce(paneMap); + + await performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-abc123', + }); + + expect(listSessionPanes).toHaveBeenCalledWith('ppg'); + expect(cleanupWorktree).toHaveBeenCalledWith( + '/project', + expect.objectContaining({ id: 'wt-abc123' }), + { selfPaneId: '%5', paneMap }, + ); + }); + + test('reports selfProtected when cleanup skips targets', async () => { + vi.mocked(cleanupWorktree).mockResolvedValueOnce({ + worktreeId: 'wt-abc123', + manifestUpdated: true, + tmuxKilled: 0, + tmuxSkipped: 0, + tmuxFailed: 0, + selfProtected: true, + selfProtectedTargets: ['ppg:1'], + }); + + const result = await performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-abc123', + }); + + expect(result.selfProtected).toBe(true); + }); + + test('sets mergedAt timestamp on successful merge', async () => { + await performMerge({ + projectRoot: '/project', + worktreeRef: 'wt-abc123', + }); + + expect(latestManifest.worktrees['wt-abc123'].mergedAt).toBeDefined(); + // Should be a valid ISO date + expect(() => new Date(latestManifest.worktrees['wt-abc123'].mergedAt!)).not.toThrow(); + }); +}); diff --git a/src/core/operations/merge.ts b/src/core/operations/merge.ts new file mode 100644 index 0000000..4997833 --- /dev/null +++ b/src/core/operations/merge.ts @@ -0,0 +1,152 @@ +import { execa } from 'execa'; +import { requireManifest, updateManifest, resolveWorktree } from '../manifest.js'; +import { refreshAllAgentStatuses } from '../agent.js'; +import { getCurrentBranch } from '../worktree.js'; +import { cleanupWorktree } from '../cleanup.js'; +import { getCurrentPaneId } from '../self.js'; +import { listSessionPanes, type PaneInfo } from '../tmux.js'; +import { PpgError, WorktreeNotFoundError, MergeFailedError } from '../../lib/errors.js'; +import { execaEnv } from '../../lib/env.js'; + +export type MergeStrategy = 'squash' | 'no-ff'; + +export interface MergeOptions { + projectRoot: string; + worktreeRef: string; + strategy?: MergeStrategy; + cleanup?: boolean; + dryRun?: boolean; + force?: boolean; +} + +export interface MergeResult { + worktreeId: string; + branch: string; + baseBranch: string; + strategy: MergeStrategy; + dryRun: boolean; + merged: boolean; + cleaned: boolean; + selfProtected: boolean; +} + +/** + * Perform a merge operation: resolve worktree, validate agents, run git merge, + * and optionally clean up. + * + * State machine: active → merging → merged → cleaned + * On failure: active → merging → failed + */ +export async function performMerge(options: MergeOptions): Promise { + const { projectRoot, worktreeRef, force = false, dryRun = false } = options; + const strategy = options.strategy ?? 'squash'; + const shouldCleanup = options.cleanup !== false; + + // Load and refresh manifest + await requireManifest(projectRoot); + const manifest = await updateManifest(projectRoot, async (m) => { + return refreshAllAgentStatuses(m, projectRoot); + }); + + const wt = resolveWorktree(manifest, worktreeRef); + if (!wt) throw new WorktreeNotFoundError(worktreeRef); + + // Validate: no running agents unless forced + const agents = Object.values(wt.agents); + const incomplete = agents.filter((a) => a.status === 'running'); + + if (incomplete.length > 0 && !force) { + const ids = incomplete.map((a) => a.id).join(', '); + throw new PpgError( + `${incomplete.length} agent(s) still running: ${ids}. Use --force to merge anyway.`, + 'AGENTS_RUNNING', + ); + } + + // Dry run: return early without changes + if (dryRun) { + return { + worktreeId: wt.id, + branch: wt.branch, + baseBranch: wt.baseBranch, + strategy, + dryRun: true, + merged: false, + cleaned: false, + selfProtected: false, + }; + } + + // Transition: active → merging + await updateManifest(projectRoot, (m) => { + if (m.worktrees[wt.id]) { + m.worktrees[wt.id].status = 'merging'; + } + return m; + }); + + // Perform git merge + try { + const currentBranch = await getCurrentBranch(projectRoot); + if (currentBranch !== wt.baseBranch) { + await execa('git', ['checkout', wt.baseBranch], { ...execaEnv, cwd: projectRoot }); + } + + if (strategy === 'squash') { + await execa('git', ['merge', '--squash', wt.branch], { ...execaEnv, cwd: projectRoot }); + await execa('git', ['commit', '-m', `ppg: merge ${wt.name} (${wt.branch})`], { + ...execaEnv, + cwd: projectRoot, + }); + } else { + await execa('git', ['merge', '--no-ff', wt.branch, '-m', `ppg: merge ${wt.name} (${wt.branch})`], { + ...execaEnv, + cwd: projectRoot, + }); + } + } catch (err) { + // Transition: merging → failed + await updateManifest(projectRoot, (m) => { + if (m.worktrees[wt.id]) { + m.worktrees[wt.id].status = 'failed'; + } + return m; + }); + throw new MergeFailedError( + `Merge failed: ${err instanceof Error ? err.message : err}`, + ); + } + + // Transition: merging → merged + await updateManifest(projectRoot, (m) => { + if (m.worktrees[wt.id]) { + m.worktrees[wt.id].status = 'merged'; + m.worktrees[wt.id].mergedAt = new Date().toISOString(); + } + return m; + }); + + // Cleanup (merged → cleaned) + let selfProtected = false; + if (shouldCleanup) { + const selfPaneId = getCurrentPaneId(); + let paneMap: Map | undefined; + if (selfPaneId) { + paneMap = await listSessionPanes(manifest.sessionName); + } + + const cleanupResult = await cleanupWorktree(projectRoot, wt, { selfPaneId, paneMap }); + selfProtected = cleanupResult.selfProtected; + } + + return { + worktreeId: wt.id, + branch: wt.branch, + baseBranch: wt.baseBranch, + strategy, + dryRun: false, + merged: true, + cleaned: shouldCleanup, + selfProtected, + }; +} diff --git a/src/core/operations/restart.test.ts b/src/core/operations/restart.test.ts new file mode 100644 index 0000000..43944a1 --- /dev/null +++ b/src/core/operations/restart.test.ts @@ -0,0 +1,277 @@ +import { describe, test, expect, vi, beforeEach } from 'vitest'; +import { makeAgent, makeWorktree } from '../../test-fixtures.js'; +import type { Manifest } from '../../types/manifest.js'; +import type { AgentStatus } from '../../types/manifest.js'; + +// Mock node:fs/promises +vi.mock('node:fs/promises', () => ({ + default: { + readFile: vi.fn(), + mkdir: vi.fn(), + writeFile: vi.fn(), + }, +})); + +// Mock core modules +vi.mock('../worktree.js', () => ({ + getRepoRoot: vi.fn().mockResolvedValue('/tmp/project'), +})); + +vi.mock('../config.js', () => ({ + loadConfig: vi.fn().mockResolvedValue({ + sessionName: 'ppg', + defaultAgent: 'claude', + agents: { + claude: { name: 'claude', command: 'claude --dangerously-skip-permissions', interactive: true }, + }, + }), + resolveAgentConfig: vi.fn().mockReturnValue({ + name: 'claude', + command: 'claude --dangerously-skip-permissions', + interactive: true, + }), +})); + +vi.mock('../manifest.js', () => ({ + requireManifest: vi.fn(), + updateManifest: vi.fn(), + findAgent: vi.fn(), +})); + +vi.mock('../agent.js', () => ({ + spawnAgent: vi.fn(), + killAgent: vi.fn(), +})); + +vi.mock('../tmux.js', () => ({ + ensureSession: vi.fn(), + createWindow: vi.fn(), +})); + +vi.mock('../template.js', () => ({ + renderTemplate: vi.fn((content: string) => content), +})); + +vi.mock('../../lib/id.js', () => ({ + agentId: vi.fn().mockReturnValue('ag-newagent'), + sessionId: vi.fn().mockReturnValue('sess-new123'), +})); + +vi.mock('../../lib/paths.js', () => ({ + agentPromptFile: vi.fn().mockReturnValue('/tmp/project/.ppg/agent-prompts/ag-test1234.md'), +})); + +vi.mock('../../lib/errors.js', async () => { + const actual = await vi.importActual('../../lib/errors.js'); + return actual; +}); + +import fs from 'node:fs/promises'; +import { requireManifest, updateManifest, findAgent } from '../manifest.js'; +import { spawnAgent, killAgent } from '../agent.js'; +import * as tmux from '../tmux.js'; +import { performRestart } from './restart.js'; + +const mockedFindAgent = vi.mocked(findAgent); +const mockedRequireManifest = vi.mocked(requireManifest); +const mockedUpdateManifest = vi.mocked(updateManifest); +const mockedSpawnAgent = vi.mocked(spawnAgent); +const mockedKillAgent = vi.mocked(killAgent); +const mockedEnsureSession = vi.mocked(tmux.ensureSession); +const mockedCreateWindow = vi.mocked(tmux.createWindow); +const mockedReadFile = vi.mocked(fs.readFile); + +const PROJECT_ROOT = '/tmp/project'; + +function makeManifest(overrides?: Partial): Manifest { + return { + version: 1, + projectRoot: PROJECT_ROOT, + sessionName: 'ppg', + worktrees: {}, + createdAt: '2026-01-01T00:00:00.000Z', + updatedAt: '2026-01-01T00:00:00.000Z', + ...overrides, + }; +} + +beforeEach(() => { + vi.clearAllMocks(); +}); + +describe('performRestart', () => { + function setupDefaults(agentOverrides?: { status?: AgentStatus }) { + const status = agentOverrides?.status ?? 'running'; + const agent = makeAgent({ id: 'ag-oldagent', status }); + const wt = makeWorktree({ + id: 'wt-abc123', + name: 'feature-auth', + agents: { 'ag-oldagent': agent }, + }); + const manifest = makeManifest({ worktrees: { [wt.id]: wt } }); + mockedRequireManifest.mockResolvedValue(manifest); + mockedFindAgent.mockReturnValue({ worktree: wt, agent }); + mockedCreateWindow.mockResolvedValue('ppg:2'); + mockedReadFile.mockResolvedValue('original prompt' as unknown as never); + mockedSpawnAgent.mockResolvedValue(makeAgent({ + id: 'ag-newagent', + tmuxTarget: 'ppg:2', + sessionId: 'sess-new123', + })); + mockedUpdateManifest.mockImplementation(async (_root, updater) => { + const m = JSON.parse(JSON.stringify(manifest)) as Manifest; + return updater(m); + }); + return { agent, wt, manifest }; + } + + test('given running agent, should kill old agent before restarting', async () => { + const { agent } = setupDefaults({ status: 'running' }); + + await performRestart({ agentRef: 'ag-oldagent' }); + + expect(mockedKillAgent).toHaveBeenCalledWith(agent); + }); + + test('given running agent, should return killedOldAgent true', async () => { + setupDefaults({ status: 'running' }); + + const result = await performRestart({ agentRef: 'ag-oldagent' }); + + expect(result.killedOldAgent).toBe(true); + }); + + test('given idle agent, should not kill old agent', async () => { + setupDefaults({ status: 'idle' }); + + await performRestart({ agentRef: 'ag-oldagent' }); + + expect(mockedKillAgent).not.toHaveBeenCalled(); + }); + + test('given exited agent, should not kill old agent', async () => { + setupDefaults({ status: 'exited' }); + + await performRestart({ agentRef: 'ag-oldagent' }); + + expect(mockedKillAgent).not.toHaveBeenCalled(); + }); + + test('given gone agent, should not kill old agent', async () => { + setupDefaults({ status: 'gone' }); + + await performRestart({ agentRef: 'ag-oldagent' }); + + expect(mockedKillAgent).not.toHaveBeenCalled(); + }); + + test('given non-running agent, should return killedOldAgent false', async () => { + setupDefaults({ status: 'idle' }); + + const result = await performRestart({ agentRef: 'ag-oldagent' }); + + expect(result.killedOldAgent).toBe(false); + }); + + test('should create tmux window in same worktree', async () => { + const { wt } = setupDefaults(); + + await performRestart({ agentRef: 'ag-oldagent' }); + + expect(mockedEnsureSession).toHaveBeenCalledWith('ppg'); + expect(mockedCreateWindow).toHaveBeenCalledWith('ppg', 'feature-auth-restart', wt.path); + }); + + test('should spawn agent with correct options', async () => { + const { wt } = setupDefaults(); + + await performRestart({ agentRef: 'ag-oldagent' }); + + expect(mockedSpawnAgent).toHaveBeenCalledWith({ + agentId: 'ag-newagent', + agentConfig: { + name: 'claude', + command: 'claude --dangerously-skip-permissions', + interactive: true, + }, + prompt: 'original prompt', + worktreePath: wt.path, + tmuxTarget: 'ppg:2', + projectRoot: PROJECT_ROOT, + branch: wt.branch, + sessionId: 'sess-new123', + }); + }); + + test('should update manifest with new agent and mark old as gone', async () => { + const { wt } = setupDefaults(); + + await performRestart({ agentRef: 'ag-oldagent' }); + + expect(mockedUpdateManifest).toHaveBeenCalledWith(PROJECT_ROOT, expect.any(Function)); + + // Verify the updater function marks old agent gone and adds new agent + const updater = mockedUpdateManifest.mock.calls[0][1]; + const testManifest = makeManifest({ + worktrees: { + [wt.id]: { + ...wt, + agents: { + 'ag-oldagent': makeAgent({ id: 'ag-oldagent', status: 'running' }), + }, + }, + }, + }); + const updated = await updater(testManifest); + const updatedWt = updated.worktrees[wt.id]; + + expect(updatedWt.agents['ag-oldagent'].status).toBe('gone'); + expect(updatedWt.agents['ag-newagent']).toBeDefined(); + }); + + test('should return old and new agent info', async () => { + setupDefaults(); + + const result = await performRestart({ agentRef: 'ag-oldagent' }); + + expect(result.oldAgentId).toBe('ag-oldagent'); + expect(result.newAgent.id).toBe('ag-newagent'); + expect(result.newAgent.tmuxTarget).toBe('ppg:2'); + expect(result.newAgent.sessionId).toBe('sess-new123'); + expect(result.newAgent.worktreeId).toBe('wt-abc123'); + expect(result.newAgent.worktreeName).toBe('feature-auth'); + }); + + test('given prompt override, should use it instead of reading file', async () => { + setupDefaults(); + + await performRestart({ agentRef: 'ag-oldagent', prompt: 'custom prompt' }); + + expect(mockedReadFile).not.toHaveBeenCalled(); + }); + + test('given no prompt and missing prompt file, should throw PromptNotFoundError', async () => { + setupDefaults(); + mockedReadFile.mockRejectedValue(new Error('ENOENT')); + + await expect(performRestart({ agentRef: 'ag-oldagent' })).rejects.toThrow('Could not read original prompt'); + }); + + test('given unknown agent ref, should throw AgentNotFoundError', async () => { + const manifest = makeManifest(); + mockedRequireManifest.mockResolvedValue(manifest); + mockedFindAgent.mockReturnValue(undefined); + + await expect(performRestart({ agentRef: 'ag-nonexist' })).rejects.toThrow('Agent not found'); + }); + + test('given explicit projectRoot, should use it instead of getRepoRoot', async () => { + setupDefaults(); + + await performRestart({ agentRef: 'ag-oldagent', projectRoot: PROJECT_ROOT }); + + // getRepoRoot is mocked — if projectRoot is passed, the operation still works + // (verifiable because requireManifest receives the correct root) + expect(mockedUpdateManifest).toHaveBeenCalledWith(PROJECT_ROOT, expect.any(Function)); + }); +}); diff --git a/src/core/operations/restart.ts b/src/core/operations/restart.ts new file mode 100644 index 0000000..50ebcc8 --- /dev/null +++ b/src/core/operations/restart.ts @@ -0,0 +1,126 @@ +import fs from 'node:fs/promises'; +import { requireManifest, updateManifest, findAgent } from '../manifest.js'; +import { loadConfig, resolveAgentConfig } from '../config.js'; +import { spawnAgent, killAgent } from '../agent.js'; +import { getRepoRoot } from '../worktree.js'; +import * as tmux from '../tmux.js'; +import { agentId as genAgentId, sessionId as genSessionId } from '../../lib/id.js'; +import { agentPromptFile } from '../../lib/paths.js'; +import { AgentNotFoundError, PromptNotFoundError } from '../../lib/errors.js'; +import { renderTemplate, type TemplateContext } from '../template.js'; + +export interface RestartParams { + agentRef: string; + prompt?: string; + agentType?: string; + projectRoot?: string; +} + +export interface RestartResult { + oldAgentId: string; + killedOldAgent: boolean; + newAgent: { + id: string; + tmuxTarget: string; + sessionId: string; + worktreeId: string; + worktreeName: string; + branch: string; + path: string; + }; + sessionName: string; +} + +export async function performRestart(params: RestartParams): Promise { + const { agentRef, prompt: promptOverride, agentType } = params; + + const projectRoot = params.projectRoot ?? await getRepoRoot(); + const config = await loadConfig(projectRoot); + const manifest = await requireManifest(projectRoot); + + const found = findAgent(manifest, agentRef); + if (!found) throw new AgentNotFoundError(agentRef); + + const { worktree: wt, agent: oldAgent } = found; + + // Kill old agent if still running + let killedOldAgent = false; + if (oldAgent.status === 'running') { + await killAgent(oldAgent); + killedOldAgent = true; + } + + // Read original prompt from prompt file, or use override + let promptText: string; + if (promptOverride) { + promptText = promptOverride; + } else { + const pFile = agentPromptFile(projectRoot, oldAgent.id); + try { + promptText = await fs.readFile(pFile, 'utf-8'); + } catch { + throw new PromptNotFoundError(oldAgent.id); + } + } + + // Resolve agent config + const agentConfig = resolveAgentConfig(config, agentType ?? oldAgent.agentType); + + // Ensure tmux session + await tmux.ensureSession(manifest.sessionName); + + // Create new tmux window in same worktree + const newAgentId = genAgentId(); + const windowTarget = await tmux.createWindow(manifest.sessionName, `${wt.name}-restart`, wt.path); + + // Render template vars + const ctx: TemplateContext = { + WORKTREE_PATH: wt.path, + BRANCH: wt.branch, + AGENT_ID: newAgentId, + PROJECT_ROOT: projectRoot, + TASK_NAME: wt.name, + PROMPT: promptText, + }; + const renderedPrompt = renderTemplate(promptText, ctx); + + const newSessionId = genSessionId(); + const agentEntry = await spawnAgent({ + agentId: newAgentId, + agentConfig, + prompt: renderedPrompt, + worktreePath: wt.path, + tmuxTarget: windowTarget, + projectRoot, + branch: wt.branch, + sessionId: newSessionId, + }); + + // Update manifest: mark old agent as gone, add new agent + await updateManifest(projectRoot, (m) => { + const mWt = m.worktrees[wt.id]; + if (mWt) { + const mOldAgent = mWt.agents[oldAgent.id]; + if (mOldAgent && mOldAgent.status === 'running') { + mOldAgent.status = 'gone'; + } + mWt.agents[newAgentId] = agentEntry; + } + return m; + }); + + return { + oldAgentId: oldAgent.id, + killedOldAgent, + newAgent: { + id: newAgentId, + tmuxTarget: windowTarget, + sessionId: newSessionId, + worktreeId: wt.id, + worktreeName: wt.name, + branch: wt.branch, + path: wt.path, + }, + sessionName: manifest.sessionName, + }; +} diff --git a/src/core/operations/spawn.test.ts b/src/core/operations/spawn.test.ts new file mode 100644 index 0000000..1c81e33 --- /dev/null +++ b/src/core/operations/spawn.test.ts @@ -0,0 +1,446 @@ +import { describe, test, expect, vi, beforeEach } from 'vitest'; +import type { Manifest, WorktreeEntry } from '../../types/manifest.js'; +import type { Config } from '../../types/config.js'; + +// --- Mocks --- + +vi.mock('node:fs/promises', () => ({ + default: { + access: vi.fn(), + readFile: vi.fn(), + mkdir: vi.fn(), + writeFile: vi.fn(), + }, +})); + +vi.mock('../config.js', () => ({ + loadConfig: vi.fn(), + resolveAgentConfig: vi.fn(), +})); + +vi.mock('../manifest.js', () => ({ + readManifest: vi.fn(), + updateManifest: vi.fn(), + resolveWorktree: vi.fn(), +})); + +vi.mock('../worktree.js', () => ({ + getRepoRoot: vi.fn(), + getCurrentBranch: vi.fn(), + createWorktree: vi.fn(), + adoptWorktree: vi.fn(), +})); + +vi.mock('../env.js', () => ({ + setupWorktreeEnv: vi.fn(), +})); + +vi.mock('../template.js', () => ({ + loadTemplate: vi.fn(), + renderTemplate: vi.fn((content: string) => content), +})); + +vi.mock('../agent.js', () => ({ + spawnAgent: vi.fn(), +})); + +vi.mock('../tmux.js', () => ({ + ensureSession: vi.fn(), + createWindow: vi.fn(), + splitPane: vi.fn(), + sendKeys: vi.fn(), +})); + +vi.mock('../terminal.js', () => ({ + openTerminalWindow: vi.fn(), +})); + +vi.mock('../../lib/id.js', () => ({ + worktreeId: vi.fn(), + agentId: vi.fn(), + sessionId: vi.fn(), +})); + +vi.mock('../../lib/paths.js', () => ({ + manifestPath: vi.fn((root: string) => `${root}/.ppg/manifest.json`), +})); + +vi.mock('../../lib/name.js', () => ({ + normalizeName: vi.fn((name: string) => name), +})); + +vi.mock('../../lib/vars.js', () => ({ + parseVars: vi.fn(() => ({})), +})); + +// --- Imports (after mocks) --- + +import fs from 'node:fs/promises'; +import { loadConfig, resolveAgentConfig } from '../config.js'; +import { readManifest, updateManifest, resolveWorktree } from '../manifest.js'; +import { getRepoRoot, getCurrentBranch, createWorktree, adoptWorktree } from '../worktree.js'; +import { setupWorktreeEnv } from '../env.js'; +import { loadTemplate } from '../template.js'; +import { spawnAgent } from '../agent.js'; +import * as tmux from '../tmux.js'; +import { openTerminalWindow } from '../terminal.js'; +import { worktreeId as genWorktreeId, agentId as genAgentId, sessionId as genSessionId } from '../../lib/id.js'; +import { performSpawn } from './spawn.js'; + +const mockedFs = vi.mocked(fs); +const mockedLoadConfig = vi.mocked(loadConfig); +const mockedResolveAgentConfig = vi.mocked(resolveAgentConfig); +const mockedReadManifest = vi.mocked(readManifest); +const mockedUpdateManifest = vi.mocked(updateManifest); +const mockedResolveWorktree = vi.mocked(resolveWorktree); +const mockedCreateWorktree = vi.mocked(createWorktree); +const mockedSpawnAgent = vi.mocked(spawnAgent); +const mockedEnsureSession = vi.mocked(tmux.ensureSession); +const mockedCreateWindow = vi.mocked(tmux.createWindow); +const mockedSplitPane = vi.mocked(tmux.splitPane); +const mockedLoadTemplate = vi.mocked(loadTemplate); + +const PROJECT_ROOT = '/tmp/project'; +const SESSION_NAME = 'ppg-test'; + +const DEFAULT_CONFIG: Config = { + sessionName: SESSION_NAME, + defaultAgent: 'claude', + agents: { + claude: { name: 'claude', command: 'claude', interactive: true }, + }, + envFiles: ['.env'], + symlinkNodeModules: true, +}; + +const AGENT_CONFIG = { name: 'claude', command: 'claude', interactive: true }; + +const DEFAULT_MANIFEST: Manifest = { + version: 1, + projectRoot: PROJECT_ROOT, + sessionName: SESSION_NAME, + worktrees: {}, + createdAt: '2026-01-01T00:00:00.000Z', + updatedAt: '2026-01-01T00:00:00.000Z', +}; + +function makeManifestState(): Manifest { + return structuredClone(DEFAULT_MANIFEST); +} + +function setupDefaultMocks() { + vi.mocked(getRepoRoot).mockResolvedValue(PROJECT_ROOT); + mockedLoadConfig.mockResolvedValue(DEFAULT_CONFIG); + mockedResolveAgentConfig.mockReturnValue(AGENT_CONFIG); + mockedFs.access.mockResolvedValue(undefined); + mockedReadManifest.mockResolvedValue(makeManifestState()); + mockedUpdateManifest.mockImplementation(async (_root, updater) => { + return updater(makeManifestState()); + }); + vi.mocked(getCurrentBranch).mockResolvedValue('main'); + vi.mocked(genWorktreeId).mockReturnValue('wt-abc123'); + vi.mocked(genAgentId).mockReturnValue('ag-test0001'); + vi.mocked(genSessionId).mockReturnValue('session-uuid-1'); + mockedCreateWorktree.mockResolvedValue(`${PROJECT_ROOT}/.worktrees/wt-abc123`); + vi.mocked(adoptWorktree).mockResolvedValue(`${PROJECT_ROOT}/.worktrees/wt-abc123`); + mockedEnsureSession.mockResolvedValue(undefined); + mockedCreateWindow.mockResolvedValue(`${SESSION_NAME}:1`); + vi.mocked(setupWorktreeEnv).mockResolvedValue(undefined); + mockedSpawnAgent.mockResolvedValue({ + id: 'ag-test0001', + name: 'claude', + agentType: 'claude', + status: 'running', + tmuxTarget: `${SESSION_NAME}:1`, + prompt: 'Do the task', + startedAt: '2026-01-01T00:00:00.000Z', + sessionId: 'session-uuid-1', + }); +} + +beforeEach(() => { + vi.clearAllMocks(); + setupDefaultMocks(); +}); + +describe('performSpawn', () => { + describe('new worktree (default path)', () => { + test('given prompt option, should create worktree, setup env, create tmux, spawn agent, return result', async () => { + const result = await performSpawn({ prompt: 'Do the task', name: 'feature-x' }); + + expect(mockedCreateWorktree).toHaveBeenCalledWith(PROJECT_ROOT, 'wt-abc123', { + branch: 'ppg/feature-x', + base: 'main', + }); + expect(vi.mocked(setupWorktreeEnv)).toHaveBeenCalledWith( + PROJECT_ROOT, + `${PROJECT_ROOT}/.worktrees/wt-abc123`, + DEFAULT_CONFIG, + ); + expect(mockedEnsureSession).toHaveBeenCalledWith(SESSION_NAME); + expect(mockedCreateWindow).toHaveBeenCalledWith( + SESSION_NAME, + 'feature-x', + `${PROJECT_ROOT}/.worktrees/wt-abc123`, + ); + expect(mockedSpawnAgent).toHaveBeenCalledWith(expect.objectContaining({ + agentId: 'ag-test0001', + agentConfig: AGENT_CONFIG, + projectRoot: PROJECT_ROOT, + })); + + expect(result).toEqual({ + worktree: { + id: 'wt-abc123', + name: 'feature-x', + branch: 'ppg/feature-x', + path: `${PROJECT_ROOT}/.worktrees/wt-abc123`, + tmuxWindow: `${SESSION_NAME}:1`, + }, + agents: [{ + id: 'ag-test0001', + tmuxTarget: `${SESSION_NAME}:1`, + sessionId: 'session-uuid-1', + }], + }); + }); + + test('given no name, should use worktree ID as name', async () => { + await performSpawn({ prompt: 'Do the task' }); + + expect(mockedCreateWorktree).toHaveBeenCalledWith(PROJECT_ROOT, 'wt-abc123', { + branch: 'ppg/wt-abc123', + base: 'main', + }); + }); + + test('given --base option, should use it instead of current branch', async () => { + await performSpawn({ prompt: 'Do the task', base: 'develop' }); + + expect(mockedCreateWorktree).toHaveBeenCalledWith(PROJECT_ROOT, 'wt-abc123', { + branch: 'ppg/wt-abc123', + base: 'develop', + }); + expect(vi.mocked(getCurrentBranch)).not.toHaveBeenCalled(); + }); + + test('given --open, should call openTerminalWindow', async () => { + vi.mocked(openTerminalWindow).mockResolvedValue(undefined); + + await performSpawn({ prompt: 'Do the task', open: true }); + + expect(vi.mocked(openTerminalWindow)).toHaveBeenCalledWith( + SESSION_NAME, + `${SESSION_NAME}:1`, + 'wt-abc123', + ); + }); + + test('given count=2 with --split, should split pane for second agent', async () => { + let agentCallCount = 0; + vi.mocked(genAgentId).mockImplementation(() => { + agentCallCount++; + return `ag-test000${agentCallCount}`; + }); + mockedSplitPane.mockResolvedValue({ paneId: '%2', target: `${SESSION_NAME}:1.1` }); + mockedSpawnAgent + .mockResolvedValueOnce({ + id: 'ag-test0001', name: 'claude', agentType: 'claude', status: 'running', + tmuxTarget: `${SESSION_NAME}:1`, prompt: 'Do the task', startedAt: '2026-01-01T00:00:00.000Z', + sessionId: 'session-uuid-1', + }) + .mockResolvedValueOnce({ + id: 'ag-test0002', name: 'claude', agentType: 'claude', status: 'running', + tmuxTarget: `${SESSION_NAME}:1.1`, prompt: 'Do the task', startedAt: '2026-01-01T00:00:00.000Z', + sessionId: 'session-uuid-1', + }); + + const result = await performSpawn({ prompt: 'Do the task', count: 2, split: true }); + + expect(mockedSplitPane).toHaveBeenCalledWith(`${SESSION_NAME}:1`, 'horizontal', expect.any(String)); + expect(result.agents).toHaveLength(2); + }); + + test('given new worktree, should register skeleton in manifest before spawning agents', async () => { + // Capture the updater functions to inspect what each one does in isolation + const updaters: Array<(m: Manifest) => Manifest | Promise> = []; + mockedUpdateManifest.mockImplementation(async (_root, updater) => { + updaters.push(updater); + const m = makeManifestState(); + return updater(m); + }); + + await performSpawn({ prompt: 'Do the task', name: 'feature-x' }); + + // First updater should register the skeleton worktree (no agents yet) + const skeletonResult = await updaters[0](makeManifestState()); + expect(skeletonResult.worktrees['wt-abc123']).toBeDefined(); + expect(Object.keys(skeletonResult.worktrees['wt-abc123'].agents)).toHaveLength(0); + + // Second updater should add agent to an existing worktree entry + const withWorktree = makeManifestState(); + withWorktree.worktrees['wt-abc123'] = structuredClone(skeletonResult.worktrees['wt-abc123']); + const agentResult = await updaters[1](withWorktree); + expect(agentResult.worktrees['wt-abc123'].agents['ag-test0001']).toBeDefined(); + }); + }); + + describe('existing branch (--branch)', () => { + test('given --branch, should adopt worktree from existing branch', async () => { + const result = await performSpawn({ prompt: 'Do the task', branch: 'ppg/fix-bug' }); + + expect(vi.mocked(adoptWorktree)).toHaveBeenCalledWith(PROJECT_ROOT, 'wt-abc123', 'ppg/fix-bug'); + expect(mockedCreateWorktree).not.toHaveBeenCalled(); + expect(result.worktree.branch).toBe('ppg/fix-bug'); + }); + }); + + describe('existing worktree (--worktree)', () => { + test('given --worktree, should add agent to existing worktree', async () => { + const existingWt: WorktreeEntry = { + id: 'wt-exist1', + name: 'existing', + path: `${PROJECT_ROOT}/.worktrees/wt-exist1`, + branch: 'ppg/existing', + baseBranch: 'main', + status: 'active', + tmuxWindow: `${SESSION_NAME}:2`, + agents: {}, + createdAt: '2026-01-01T00:00:00.000Z', + }; + mockedResolveWorktree.mockReturnValue(existingWt); + + mockedCreateWindow.mockResolvedValue(`${SESSION_NAME}:3`); + mockedSpawnAgent.mockResolvedValue({ + id: 'ag-test0001', name: 'claude', agentType: 'claude', status: 'running', + tmuxTarget: `${SESSION_NAME}:3`, prompt: 'Do the task', startedAt: '2026-01-01T00:00:00.000Z', + sessionId: 'session-uuid-1', + }); + + const result = await performSpawn({ prompt: 'Do the task', worktree: 'wt-exist1' }); + + expect(mockedCreateWorktree).not.toHaveBeenCalled(); + expect(vi.mocked(adoptWorktree)).not.toHaveBeenCalled(); + expect(result.worktree.id).toBe('wt-exist1'); + expect(result.agents).toHaveLength(1); + }); + + test('given --worktree with no tmux window, should lazily create one and persist before spawning', async () => { + const existingWt: WorktreeEntry = { + id: 'wt-exist1', + name: 'existing', + path: `${PROJECT_ROOT}/.worktrees/wt-exist1`, + branch: 'ppg/existing', + baseBranch: 'main', + status: 'active', + tmuxWindow: '', + agents: {}, + createdAt: '2026-01-01T00:00:00.000Z', + }; + mockedResolveWorktree.mockReturnValue(existingWt); + mockedCreateWindow.mockResolvedValue(`${SESSION_NAME}:5`); + + // Capture updater functions to verify ordering + const updaters: Array<(m: Manifest) => Manifest | Promise> = []; + mockedUpdateManifest.mockImplementation(async (_root, updater) => { + updaters.push(updater); + const m = makeManifestState(); + m.worktrees['wt-exist1'] = structuredClone(existingWt); + return updater(m); + }); + + mockedSpawnAgent.mockResolvedValue({ + id: 'ag-test0001', name: 'claude', agentType: 'claude', status: 'running', + tmuxTarget: `${SESSION_NAME}:5`, prompt: 'Do the task', startedAt: '2026-01-01T00:00:00.000Z', + sessionId: 'session-uuid-1', + }); + + const result = await performSpawn({ prompt: 'Do the task', worktree: 'wt-exist1' }); + + expect(mockedEnsureSession).toHaveBeenCalledWith(SESSION_NAME); + expect(mockedCreateWindow).toHaveBeenCalledWith(SESSION_NAME, 'existing', existingWt.path); + expect(result.worktree.tmuxWindow).toBe(`${SESSION_NAME}:5`); + + // First updater should persist the tmux window (before agent spawn) + const windowInput = makeManifestState(); + windowInput.worktrees['wt-exist1'] = structuredClone(existingWt); + const windowResult = await updaters[0](windowInput); + expect(windowResult.worktrees['wt-exist1'].tmuxWindow).toBe(`${SESSION_NAME}:5`); + expect(Object.keys(windowResult.worktrees['wt-exist1'].agents)).toHaveLength(0); + }); + + test('given spawn failure on existing worktree with lazy window, should persist tmux window but no agents', async () => { + const existingWt: WorktreeEntry = { + id: 'wt-exist1', + name: 'existing', + path: `${PROJECT_ROOT}/.worktrees/wt-exist1`, + branch: 'ppg/existing', + baseBranch: 'main', + status: 'active', + tmuxWindow: '', + agents: {}, + createdAt: '2026-01-01T00:00:00.000Z', + }; + mockedResolveWorktree.mockReturnValue(existingWt); + mockedCreateWindow.mockResolvedValue(`${SESSION_NAME}:7`); + + let persistedTmuxWindow = ''; + mockedUpdateManifest.mockImplementation(async (_root, updater) => { + const m = makeManifestState(); + m.worktrees['wt-exist1'] = structuredClone(existingWt); + const result = await updater(m); + persistedTmuxWindow = result.worktrees['wt-exist1']?.tmuxWindow ?? ''; + return result; + }); + + mockedSpawnAgent.mockRejectedValueOnce(new Error('spawn failed')); + + await expect(performSpawn({ prompt: 'Do work', worktree: 'wt-exist1' })) + .rejects.toThrow('spawn failed'); + + // tmux window should have been persisted before the spawn failure + expect(persistedTmuxWindow).toBe(`${SESSION_NAME}:7`); + expect(mockedUpdateManifest).toHaveBeenCalledTimes(1); + }); + + test('given unknown worktree ref, should throw WorktreeNotFoundError', async () => { + mockedResolveWorktree.mockReturnValue(undefined); + + await expect(performSpawn({ prompt: 'Do the task', worktree: 'nonexistent' })) + .rejects.toThrow('Worktree not found: nonexistent'); + }); + }); + + describe('prompt resolution', () => { + test('given --branch and --worktree, should throw INVALID_ARGS', async () => { + await expect(performSpawn({ prompt: 'Do the task', branch: 'foo', worktree: 'bar' })) + .rejects.toThrow('--branch and --worktree are mutually exclusive'); + }); + + test('given --branch and --base, should throw INVALID_ARGS', async () => { + await expect(performSpawn({ prompt: 'Do the task', branch: 'foo', base: 'bar' })) + .rejects.toThrow('--branch and --base are mutually exclusive'); + }); + + test('given no prompt/promptFile/template, should throw INVALID_ARGS', async () => { + await expect(performSpawn({})) + .rejects.toThrow('One of --prompt, --prompt-file, or --template is required'); + }); + + test('given --prompt-file, should read prompt from file', async () => { + mockedFs.readFile.mockResolvedValue('File prompt content'); + + await performSpawn({ promptFile: '/tmp/prompt.md' }); + + expect(mockedFs.readFile).toHaveBeenCalledWith('/tmp/prompt.md', 'utf-8'); + }); + + test('given --template, should load template by name', async () => { + mockedLoadTemplate.mockResolvedValue('Template content with {{BRANCH}}'); + + await performSpawn({ template: 'my-template' }); + + expect(mockedLoadTemplate).toHaveBeenCalledWith(PROJECT_ROOT, 'my-template'); + }); + }); +}); diff --git a/src/core/operations/spawn.ts b/src/core/operations/spawn.ts new file mode 100644 index 0000000..c4a3225 --- /dev/null +++ b/src/core/operations/spawn.ts @@ -0,0 +1,453 @@ +import fs from 'node:fs/promises'; +import { loadConfig, resolveAgentConfig } from '../config.js'; +import { readManifest, updateManifest, resolveWorktree } from '../manifest.js'; +import { getRepoRoot, getCurrentBranch, createWorktree, adoptWorktree } from '../worktree.js'; +import { setupWorktreeEnv } from '../env.js'; +import { loadTemplate, renderTemplate, type TemplateContext } from '../template.js'; +import { spawnAgent } from '../agent.js'; +import * as tmux from '../tmux.js'; +import { openTerminalWindow } from '../terminal.js'; +import { worktreeId as genWorktreeId, agentId as genAgentId, sessionId as genSessionId } from '../../lib/id.js'; +import { manifestPath } from '../../lib/paths.js'; +import { PpgError, NotInitializedError, WorktreeNotFoundError } from '../../lib/errors.js'; +import { normalizeName } from '../../lib/name.js'; +import { parseVars } from '../../lib/vars.js'; +import type { WorktreeEntry, AgentEntry } from '../../types/manifest.js'; +import type { Config, AgentConfig } from '../../types/config.js'; + +export interface PerformSpawnOptions { + name?: string; + agent?: string; + prompt?: string; + promptFile?: string; + template?: string; + var?: string[]; + base?: string; + branch?: string; + worktree?: string; + count?: number; + split?: boolean; + open?: boolean; +} + +export interface SpawnResult { + worktree: { + id: string; + name: string; + branch: string; + path: string; + tmuxWindow: string; + }; + agents: Array<{ + id: string; + tmuxTarget: string; + sessionId?: string; + }>; +} + +export async function performSpawn(options: PerformSpawnOptions): Promise { + const projectRoot = await getRepoRoot(); + const config = await loadConfig(projectRoot); + + // Verify initialized (lightweight file check instead of full manifest read) + try { + await fs.access(manifestPath(projectRoot)); + } catch { + throw new NotInitializedError(projectRoot); + } + + const agentConfig = resolveAgentConfig(config, options.agent); + const count = options.count ?? 1; + + // Validate vars early — before any side effects (worktree/tmux creation) + const userVars = parseVars(options.var ?? []); + + // Resolve prompt + const promptText = await resolvePrompt(options, projectRoot); + + // Validate conflicting flags + if (options.branch && options.worktree) { + throw new PpgError('--branch and --worktree are mutually exclusive', 'INVALID_ARGS'); + } + if (options.branch && options.base) { + throw new PpgError('--branch and --base are mutually exclusive (--base is for new branches)', 'INVALID_ARGS'); + } + + if (options.worktree) { + return spawnIntoExistingWorktree( + projectRoot, + agentConfig, + options.worktree, + promptText, + count, + options, + userVars, + ); + } else if (options.branch) { + return spawnOnExistingBranch( + projectRoot, + config, + agentConfig, + options.branch, + promptText, + count, + options, + userVars, + ); + } else { + return spawnNewWorktree( + projectRoot, + config, + agentConfig, + promptText, + count, + options, + userVars, + ); + } +} + +async function resolvePrompt(options: PerformSpawnOptions, projectRoot: string): Promise { + if (options.prompt) return options.prompt; + + if (options.promptFile) { + return fs.readFile(options.promptFile, 'utf-8'); + } + + if (options.template) { + return loadTemplate(projectRoot, options.template); + } + + throw new PpgError('One of --prompt, --prompt-file, or --template is required', 'INVALID_ARGS'); +} + +interface SpawnBatchOptions { + projectRoot: string; + agentConfig: AgentConfig; + promptText: string; + userVars: Record; + count: number; + split: boolean; + worktreePath: string; + branch: string; + taskName: string; + sessionName: string; + windowTarget: string; + windowNamePrefix: string; + reuseWindowForFirstAgent: boolean; + onAgentSpawned?: (agent: AgentEntry) => Promise; +} + +interface SpawnTargetOptions { + index: number; + split: boolean; + reuseWindowForFirstAgent: boolean; + windowTarget: string; + sessionName: string; + windowNamePrefix: string; + worktreePath: string; +} + +async function resolveAgentTarget(opts: SpawnTargetOptions): Promise { + if (opts.index === 0 && opts.reuseWindowForFirstAgent) { + return opts.windowTarget; + } + if (opts.split) { + const direction = opts.index % 2 === 1 ? 'horizontal' : 'vertical'; + const pane = await tmux.splitPane(opts.windowTarget, direction, opts.worktreePath); + return pane.target; + } + return tmux.createWindow(opts.sessionName, `${opts.windowNamePrefix}-${opts.index}`, opts.worktreePath); +} + +async function spawnAgentBatch(opts: SpawnBatchOptions): Promise { + const agents: AgentEntry[] = []; + for (let i = 0; i < opts.count; i++) { + const aId = genAgentId(); + const target = await resolveAgentTarget({ + index: i, + split: opts.split, + reuseWindowForFirstAgent: opts.reuseWindowForFirstAgent, + windowTarget: opts.windowTarget, + sessionName: opts.sessionName, + windowNamePrefix: opts.windowNamePrefix, + worktreePath: opts.worktreePath, + }); + + const ctx: TemplateContext = { + WORKTREE_PATH: opts.worktreePath, + BRANCH: opts.branch, + AGENT_ID: aId, + PROJECT_ROOT: opts.projectRoot, + TASK_NAME: opts.taskName, + PROMPT: opts.promptText, + ...opts.userVars, + }; + + const agentEntry = await spawnAgent({ + agentId: aId, + agentConfig: opts.agentConfig, + prompt: renderTemplate(opts.promptText, ctx), + worktreePath: opts.worktreePath, + tmuxTarget: target, + projectRoot: opts.projectRoot, + branch: opts.branch, + sessionId: genSessionId(), + }); + + agents.push(agentEntry); + if (opts.onAgentSpawned) { + await opts.onAgentSpawned(agentEntry); + } + } + + return agents; +} + +function toSpawnResult( + worktree: { id: string; name: string; branch: string; path: string; tmuxWindow: string }, + agents: AgentEntry[], +): SpawnResult { + return { + worktree, + agents: agents.map((a) => ({ + id: a.id, + tmuxTarget: a.tmuxTarget, + sessionId: a.sessionId, + })), + }; +} + +async function spawnNewWorktree( + projectRoot: string, + config: Config, + agentConfig: AgentConfig, + promptText: string, + count: number, + options: PerformSpawnOptions, + userVars: Record, +): Promise { + const baseBranch = options.base ?? await getCurrentBranch(projectRoot); + const wtId = genWorktreeId(); + const name = options.name ? normalizeName(options.name, wtId) : wtId; + const branchName = `ppg/${name}`; + + // Create git worktree + const wtPath = await createWorktree(projectRoot, wtId, { + branch: branchName, + base: baseBranch, + }); + + // Setup env + await setupWorktreeEnv(projectRoot, wtPath, config); + + // Ensure tmux session (manifest is the source of truth for session name) + const manifest = await readManifest(projectRoot); + const sessionName = manifest.sessionName; + await tmux.ensureSession(sessionName); + + // Create tmux window + const windowTarget = await tmux.createWindow(sessionName, name, wtPath); + + // Register skeleton worktree in manifest before spawning agents + // so partial failures leave a record for cleanup + const worktreeEntry: WorktreeEntry = { + id: wtId, + name, + path: wtPath, + branch: branchName, + baseBranch, + status: 'active', + tmuxWindow: windowTarget, + agents: {}, + createdAt: new Date().toISOString(), + }; + + await updateManifest(projectRoot, (m) => { + m.worktrees[wtId] = worktreeEntry; + return m; + }); + + // Spawn agents — one tmux window per agent (default), or split panes (--split) + const agents = await spawnAgentBatch({ + projectRoot, + agentConfig, + promptText, + userVars, + count, + split: options.split === true, + worktreePath: wtPath, + branch: branchName, + taskName: name, + sessionName, + windowTarget, + windowNamePrefix: name, + reuseWindowForFirstAgent: true, + onAgentSpawned: async (agentEntry) => { + await updateManifest(projectRoot, (m) => { + if (m.worktrees[wtId]) { + m.worktrees[wtId].agents[agentEntry.id] = agentEntry; + } + return m; + }); + }, + }); + + // Only open Terminal window when explicitly requested via --open (fire-and-forget) + if (options.open === true) { + openTerminalWindow(sessionName, windowTarget, name).catch(() => {}); + } + + return toSpawnResult( + { id: wtId, name, branch: branchName, path: wtPath, tmuxWindow: windowTarget }, + agents, + ); +} + +async function spawnOnExistingBranch( + projectRoot: string, + config: Config, + agentConfig: AgentConfig, + branch: string, + promptText: string, + count: number, + options: PerformSpawnOptions, + userVars: Record, +): Promise { + const baseBranch = await getCurrentBranch(projectRoot); + const wtId = genWorktreeId(); + + // Derive name from branch if --name not provided (strip ppg/ prefix if present) + const derivedName = branch.startsWith('ppg/') ? branch.slice(4) : branch; + const name = options.name ? normalizeName(options.name, wtId) : normalizeName(derivedName, wtId); + + // Create git worktree from existing branch (no -b flag) + const wtPath = await adoptWorktree(projectRoot, wtId, branch); + + // Setup env + await setupWorktreeEnv(projectRoot, wtPath, config); + + // Ensure tmux session + const manifest = await readManifest(projectRoot); + const sessionName = manifest.sessionName; + await tmux.ensureSession(sessionName); + + // Create tmux window + const windowTarget = await tmux.createWindow(sessionName, name, wtPath); + + // Register worktree in manifest + const worktreeEntry: WorktreeEntry = { + id: wtId, + name, + path: wtPath, + branch, + baseBranch, + status: 'active', + tmuxWindow: windowTarget, + agents: {}, + createdAt: new Date().toISOString(), + }; + + await updateManifest(projectRoot, (m) => { + m.worktrees[wtId] = worktreeEntry; + return m; + }); + + const agents = await spawnAgentBatch({ + projectRoot, + agentConfig, + promptText, + userVars, + count, + split: options.split === true, + worktreePath: wtPath, + branch, + taskName: name, + sessionName, + windowTarget, + windowNamePrefix: name, + reuseWindowForFirstAgent: true, + onAgentSpawned: async (agentEntry) => { + await updateManifest(projectRoot, (m) => { + if (m.worktrees[wtId]) { + m.worktrees[wtId].agents[agentEntry.id] = agentEntry; + } + return m; + }); + }, + }); + + if (options.open === true) { + openTerminalWindow(sessionName, windowTarget, name).catch(() => {}); + } + + return toSpawnResult( + { id: wtId, name, branch, path: wtPath, tmuxWindow: windowTarget }, + agents, + ); +} + +async function spawnIntoExistingWorktree( + projectRoot: string, + agentConfig: AgentConfig, + worktreeRef: string, + promptText: string, + count: number, + options: PerformSpawnOptions, + userVars: Record, +): Promise { + const manifest = await readManifest(projectRoot); + const wt = resolveWorktree(manifest, worktreeRef); + + if (!wt) throw new WorktreeNotFoundError(worktreeRef); + + // Lazily create tmux window if worktree has none (standalone worktree) + let windowTarget = wt.tmuxWindow; + if (!windowTarget) { + await tmux.ensureSession(manifest.sessionName); + windowTarget = await tmux.createWindow(manifest.sessionName, wt.name, wt.path); + + // Persist tmux window before spawning agents so partial failures are tracked. + await updateManifest(projectRoot, (m) => { + const mWt = m.worktrees[wt.id]; + if (!mWt) return m; + mWt.tmuxWindow = windowTarget; + return m; + }); + } + + const agents = await spawnAgentBatch({ + projectRoot, + agentConfig, + promptText, + userVars, + count, + split: options.split === true, + worktreePath: wt.path, + branch: wt.branch, + taskName: wt.name, + sessionName: manifest.sessionName, + windowTarget, + windowNamePrefix: `${wt.name}-agent`, + // For existing worktrees, only reuse the primary pane when explicitly splitting. + reuseWindowForFirstAgent: options.split === true, + onAgentSpawned: async (agentEntry) => { + await updateManifest(projectRoot, (m) => { + const mWt = m.worktrees[wt.id]; + if (!mWt) return m; + mWt.agents[agentEntry.id] = agentEntry; + return m; + }); + }, + }); + + // Only open Terminal window when explicitly requested via --open (fire-and-forget) + if (options.open === true) { + openTerminalWindow(manifest.sessionName, windowTarget, wt.name).catch(() => {}); + } + + return toSpawnResult( + { id: wt.id, name: wt.name, branch: wt.branch, path: wt.path, tmuxWindow: windowTarget }, + agents, + ); +} diff --git a/src/core/pr.ts b/src/core/pr.ts index 2849401..1411c43 100644 --- a/src/core/pr.ts +++ b/src/core/pr.ts @@ -1,8 +1,106 @@ import { execa } from 'execa'; import { execaEnv } from '../lib/env.js'; +import { PpgError, GhNotFoundError } from '../lib/errors.js'; +import { updateManifest } from './manifest.js'; +import type { WorktreeEntry } from '../types/manifest.js'; export type PrState = 'MERGED' | 'OPEN' | 'CLOSED' | 'UNKNOWN'; +// GitHub PR body limit is 65536 chars; leave room for truncation notice +const MAX_BODY_LENGTH = 60_000; + +/** Build PR body from agent prompts, with truncation. */ +export async function buildBodyFromResults(agents: { id: string; prompt: string }[]): Promise { + if (agents.length === 0) return ''; + const sections = agents.map((a) => `## Agent: ${a.id}\n\n${a.prompt}`); + return truncateBody(sections.join('\n\n---\n\n')); +} + +/** Truncate body to stay within GitHub's PR body size limit. */ +export function truncateBody(body: string): string { + if (body.length <= MAX_BODY_LENGTH) return body; + return body.slice(0, MAX_BODY_LENGTH) + '\n\n---\n\n*[Truncated — full results available in `.ppg/results/`]*'; +} + +export interface CreatePrOptions { + title?: string; + body?: string; + draft?: boolean; +} + +export interface CreatePrResult { + worktreeId: string; + branch: string; + baseBranch: string; + prUrl: string; +} + +/** Push branch and create a GitHub PR for a worktree. Stores prUrl in manifest. */ +export async function createWorktreePr( + projectRoot: string, + wt: WorktreeEntry, + options: CreatePrOptions = {}, +): Promise { + // Verify gh is available + try { + await execa('gh', ['--version'], execaEnv); + } catch { + throw new GhNotFoundError(); + } + + // Push the worktree branch + try { + await execa('git', ['push', '-u', 'origin', wt.branch], { ...execaEnv, cwd: projectRoot }); + } catch (err) { + throw new PpgError( + `Failed to push branch ${wt.branch}: ${err instanceof Error ? err.message : err}`, + 'INVALID_ARGS', + ); + } + + // Build PR title and body + const prTitle = options.title ?? wt.name; + const prBody = options.body ?? await buildBodyFromResults(Object.values(wt.agents)); + + // Build gh pr create args + const ghArgs = [ + 'pr', 'create', + '--head', wt.branch, + '--base', wt.baseBranch, + '--title', prTitle, + '--body', prBody, + ]; + if (options.draft) { + ghArgs.push('--draft'); + } + + let prUrl: string; + try { + const result = await execa('gh', ghArgs, { ...execaEnv, cwd: projectRoot }); + prUrl = result.stdout.trim(); + } catch (err) { + throw new PpgError( + `Failed to create PR: ${err instanceof Error ? err.message : err}`, + 'INVALID_ARGS', + ); + } + + // Store PR URL in manifest + await updateManifest(projectRoot, (m) => { + if (m.worktrees[wt.id]) { + m.worktrees[wt.id].prUrl = prUrl; + } + return m; + }); + + return { + worktreeId: wt.id, + branch: wt.branch, + baseBranch: wt.baseBranch, + prUrl, + }; +} + /** * Check the GitHub PR state for a given branch. * Uses `gh pr view` to query the PR associated with the branch. diff --git a/src/core/prompt.test.ts b/src/core/prompt.test.ts new file mode 100644 index 0000000..4857088 --- /dev/null +++ b/src/core/prompt.test.ts @@ -0,0 +1,127 @@ +import fs from 'node:fs/promises'; +import os from 'node:os'; +import path from 'node:path'; +import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest'; + +let tmpDir: string; +let globalDir: string; + +vi.mock('../lib/paths.js', async () => { + const actual = await vi.importActual('../lib/paths.js'); + return { + ...actual, + globalPromptsDir: () => path.join(globalDir, 'prompts'), + }; +}); + +// Dynamic import after mock setup +const { listPromptsWithSource, enrichEntryMetadata } = await import('./prompt.js'); + +beforeEach(async () => { + tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'ppg-prompt-')); + globalDir = path.join(tmpDir, 'global'); + await fs.mkdir(path.join(globalDir, 'prompts'), { recursive: true }); +}); + +afterEach(async () => { + await fs.rm(tmpDir, { recursive: true, force: true }); +}); + +describe('listPromptsWithSource', () => { + test('given no directories, should return empty array', async () => { + const entries = await listPromptsWithSource(tmpDir); + expect(entries).toEqual([]); + }); + + test('given local prompts, should return with local source', async () => { + const localDir = path.join(tmpDir, '.ppg', 'prompts'); + await fs.mkdir(localDir, { recursive: true }); + await fs.writeFile(path.join(localDir, 'review.md'), '# Review\n'); + await fs.writeFile(path.join(localDir, 'fix.md'), '# Fix\n'); + + const entries = await listPromptsWithSource(tmpDir); + expect(entries).toEqual([ + { name: 'fix', source: 'local' }, + { name: 'review', source: 'local' }, + ]); + }); + + test('given global prompts, should return with global source', async () => { + await fs.writeFile(path.join(globalDir, 'prompts', 'shared.md'), '# Shared\n'); + + const entries = await listPromptsWithSource(tmpDir); + expect(entries).toEqual([{ name: 'shared', source: 'global' }]); + }); + + test('given same name in local and global, should prefer local', async () => { + const localDir = path.join(tmpDir, '.ppg', 'prompts'); + await fs.mkdir(localDir, { recursive: true }); + await fs.writeFile(path.join(localDir, 'shared.md'), '# Local\n'); + await fs.writeFile(path.join(globalDir, 'prompts', 'shared.md'), '# Global\n'); + + const entries = await listPromptsWithSource(tmpDir); + expect(entries).toEqual([{ name: 'shared', source: 'local' }]); + }); + + test('given non-.md files, should ignore them', async () => { + const localDir = path.join(tmpDir, '.ppg', 'prompts'); + await fs.mkdir(localDir, { recursive: true }); + await fs.writeFile(path.join(localDir, 'valid.md'), '# Valid\n'); + await fs.writeFile(path.join(localDir, 'readme.txt'), 'not a prompt'); + + const entries = await listPromptsWithSource(tmpDir); + expect(entries).toEqual([{ name: 'valid', source: 'local' }]); + }); +}); + +describe('enrichEntryMetadata', () => { + test('given markdown file, should extract description from first line', async () => { + const dir = path.join(tmpDir, 'md'); + await fs.mkdir(dir, { recursive: true }); + await fs.writeFile(path.join(dir, 'task.md'), '# My Task\n\nBody here\n'); + + const result = await enrichEntryMetadata('task', 'local', dir, dir); + expect(result.description).toBe('My Task'); + }); + + test('given template variables, should extract unique vars', async () => { + const dir = path.join(tmpDir, 'md'); + await fs.mkdir(dir, { recursive: true }); + await fs.writeFile( + path.join(dir, 'task.md'), + '{{NAME}} and {{NAME}} and {{OTHER}}\n', + ); + + const result = await enrichEntryMetadata('task', 'local', dir, dir); + expect(result.variables).toEqual(['NAME', 'OTHER']); + }); + + test('given no variables, should return empty array', async () => { + const dir = path.join(tmpDir, 'md'); + await fs.mkdir(dir, { recursive: true }); + await fs.writeFile(path.join(dir, 'plain.md'), '# Plain text\n'); + + const result = await enrichEntryMetadata('plain', 'local', dir, dir); + expect(result.variables).toEqual([]); + }); + + test('given global source, should read from global dir', async () => { + const localDir = path.join(tmpDir, 'local'); + const gDir = path.join(tmpDir, 'gbl'); + await fs.mkdir(gDir, { recursive: true }); + await fs.writeFile(path.join(gDir, 'task.md'), '# Global Task\n'); + + const result = await enrichEntryMetadata('task', 'global', localDir, gDir); + expect(result.description).toBe('Global Task'); + expect(result.source).toBe('global'); + }); + + test('given empty first line, should skip to first non-empty line', async () => { + const dir = path.join(tmpDir, 'md'); + await fs.mkdir(dir, { recursive: true }); + await fs.writeFile(path.join(dir, 'task.md'), '\n\n# Actual Title\n'); + + const result = await enrichEntryMetadata('task', 'local', dir, dir); + expect(result.description).toBe('Actual Title'); + }); +}); diff --git a/src/core/prompt.ts b/src/core/prompt.ts new file mode 100644 index 0000000..8371fb1 --- /dev/null +++ b/src/core/prompt.ts @@ -0,0 +1,63 @@ +import fs from 'node:fs/promises'; +import path from 'node:path'; +import { promptsDir, globalPromptsDir } from '../lib/paths.js'; + +export interface PromptEntry { + name: string; + source: 'local' | 'global'; +} + +export interface EnrichedEntry { + name: string; + description: string; + variables: string[]; + source: 'local' | 'global'; + [key: string]: unknown; +} + +async function readMdNames(dir: string): Promise { + try { + const files = await fs.readdir(dir); + return files.filter((f) => f.endsWith('.md')).map((f) => f.replace(/\.md$/, '')).sort(); + } catch { + return []; + } +} + +export async function listPromptsWithSource(projectRoot: string): Promise { + const localNames = await readMdNames(promptsDir(projectRoot)); + const globalNames = await readMdNames(globalPromptsDir()); + + const seen = new Set(); + const result: PromptEntry[] = []; + + for (const name of localNames) { + seen.add(name); + result.push({ name, source: 'local' }); + } + + for (const name of globalNames) { + if (!seen.has(name)) { + result.push({ name, source: 'global' }); + } + } + + return result; +} + +export async function enrichEntryMetadata( + name: string, + source: 'local' | 'global', + localDir: string, + globalDir: string, +): Promise { + const dir = source === 'local' ? localDir : globalDir; + const filePath = path.join(dir, `${name}.md`); + const content = await fs.readFile(filePath, 'utf-8'); + const firstLine = content.split('\n').find((l) => l.trim().length > 0) ?? ''; + const description = firstLine.replace(/^#+\s*/, '').trim(); + const vars = [...content.matchAll(/\{\{(\w+)\}\}/g)].map((m) => m[1]); + const uniqueVars = [...new Set(vars)]; + + return { name, description, variables: uniqueVars, source }; +} diff --git a/src/core/serve.test.ts b/src/core/serve.test.ts new file mode 100644 index 0000000..1cee2ce --- /dev/null +++ b/src/core/serve.test.ts @@ -0,0 +1,103 @@ +import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest'; +import fs from 'node:fs/promises'; +import path from 'node:path'; + +vi.mock('../lib/paths.js', async (importOriginal) => { + const actual = await importOriginal() as Record; + return { + ...actual, + servePidPath: vi.fn((root: string) => path.join(root, '.ppg', 'serve.pid')), + serveJsonPath: vi.fn((root: string) => path.join(root, '.ppg', 'serve.json')), + serveLogPath: vi.fn((root: string) => path.join(root, '.ppg', 'logs', 'serve.log')), + logsDir: vi.fn((root: string) => path.join(root, '.ppg', 'logs')), + }; +}); + +const { getServePid, isServeRunning, getServeInfo } = await import('./serve.js'); + +beforeEach(() => { + vi.clearAllMocks(); +}); + +afterEach(() => { + vi.restoreAllMocks(); +}); + +describe('getServePid', () => { + test('given no PID file, should return null', async () => { + vi.spyOn(fs, 'readFile').mockRejectedValue(Object.assign(new Error('ENOENT'), { code: 'ENOENT' })); + + const pid = await getServePid('/fake/project'); + expect(pid).toBeNull(); + }); + + test('given PID file with valid alive PID, should return the PID', async () => { + vi.spyOn(fs, 'readFile').mockResolvedValue(String(process.pid)); + + const pid = await getServePid('/fake/project'); + expect(pid).toBe(process.pid); + }); + + test('given PID file with dead process, should clean up and return null', async () => { + vi.spyOn(fs, 'readFile').mockResolvedValue('999999999'); + vi.spyOn(fs, 'unlink').mockResolvedValue(undefined); + + const pid = await getServePid('/fake/project'); + expect(pid).toBeNull(); + expect(fs.unlink).toHaveBeenCalledWith('/fake/project/.ppg/serve.pid'); + }); + + test('given PID file with non-numeric content, should clean up and return null', async () => { + vi.spyOn(fs, 'readFile').mockResolvedValue('not-a-number'); + vi.spyOn(fs, 'unlink').mockResolvedValue(undefined); + + const pid = await getServePid('/fake/project'); + expect(pid).toBeNull(); + expect(fs.unlink).toHaveBeenCalledWith('/fake/project/.ppg/serve.pid'); + }); +}); + +describe('isServeRunning', () => { + test('given no PID file, should return false', async () => { + vi.spyOn(fs, 'readFile').mockRejectedValue(Object.assign(new Error('ENOENT'), { code: 'ENOENT' })); + + const running = await isServeRunning('/fake/project'); + expect(running).toBe(false); + }); + + test('given valid alive PID, should return true', async () => { + vi.spyOn(fs, 'readFile').mockResolvedValue(String(process.pid)); + + const running = await isServeRunning('/fake/project'); + expect(running).toBe(true); + }); +}); + +describe('getServeInfo', () => { + test('given no serve.json, should return null', async () => { + vi.spyOn(fs, 'readFile').mockRejectedValue(Object.assign(new Error('ENOENT'), { code: 'ENOENT' })); + + const info = await getServeInfo('/fake/project'); + expect(info).toBeNull(); + }); + + test('given valid serve.json, should return parsed info', async () => { + const serveInfo = { + pid: 12345, + port: 3000, + host: 'localhost', + startedAt: '2026-01-01T00:00:00.000Z', + }; + vi.spyOn(fs, 'readFile').mockResolvedValue(JSON.stringify(serveInfo)); + + const info = await getServeInfo('/fake/project'); + expect(info).toEqual(serveInfo); + }); + + test('given malformed JSON in serve.json, should return null', async () => { + vi.spyOn(fs, 'readFile').mockResolvedValue('not valid json {{{'); + + const info = await getServeInfo('/fake/project'); + expect(info).toBeNull(); + }); +}); diff --git a/src/core/serve.ts b/src/core/serve.ts new file mode 100644 index 0000000..e167096 --- /dev/null +++ b/src/core/serve.ts @@ -0,0 +1,130 @@ +import fs from 'node:fs/promises'; +import { createReadStream } from 'node:fs'; +import path from 'node:path'; +import readline from 'node:readline'; +import { serveJsonPath, serveLogPath, servePidPath, logsDir } from '../lib/paths.js'; + +export interface ServeInfo { + pid: number; + port: number; + host: string; + startedAt: string; +} + +export async function runServeDaemon(projectRoot: string, port: number, host: string): Promise { + const pidPath = servePidPath(projectRoot); + const jsonPath = serveJsonPath(projectRoot); + + // Write PID file + await fs.mkdir(path.dirname(pidPath), { recursive: true }); + await fs.writeFile(pidPath, String(process.pid), 'utf-8'); + + // Write serve.json with connection info + const info: ServeInfo = { + pid: process.pid, + port, + host, + startedAt: new Date().toISOString(), + }; + await fs.writeFile(jsonPath, JSON.stringify(info, null, 2), 'utf-8'); + + // Ensure logs directory + await fs.mkdir(logsDir(projectRoot), { recursive: true }); + + await logServe(projectRoot, `Serve daemon starting (PID: ${process.pid})`); + await logServe(projectRoot, `Listening on ${host}:${port}`); + + // Clean shutdown on SIGTERM/SIGINT + const cleanup = async () => { + await logServe(projectRoot, 'Serve daemon stopping'); + try { await fs.unlink(pidPath); } catch { /* already gone */ } + try { await fs.unlink(jsonPath); } catch { /* already gone */ } + process.exit(0); + }; + process.on('SIGTERM', cleanup); + process.on('SIGINT', cleanup); + + // Placeholder: the actual HTTP server will be implemented by issue #63. + // For now, keep the daemon alive so the lifecycle works end-to-end. + await logServe(projectRoot, 'Serve daemon ready (waiting for server implementation)'); + + // Keep alive + await new Promise(() => {}); +} + +export async function isServeRunning(projectRoot: string): Promise { + return (await getServePid(projectRoot)) !== null; +} + +export async function getServePid(projectRoot: string): Promise { + const pidPath = servePidPath(projectRoot); + let raw: string; + try { + raw = await fs.readFile(pidPath, 'utf-8'); + } catch { + return null; + } + const pid = parseInt(raw, 10); + if (isNaN(pid)) { + await cleanupPidFile(pidPath); + return null; + } + try { + process.kill(pid, 0); + return pid; + } catch { + await cleanupPidFile(pidPath); + return null; + } +} + +export async function getServeInfo(projectRoot: string): Promise { + const jsonPath = serveJsonPath(projectRoot); + try { + const raw = await fs.readFile(jsonPath, 'utf-8'); + return JSON.parse(raw) as ServeInfo; + } catch { + return null; + } +} + +async function cleanupPidFile(pidPath: string): Promise { + try { await fs.unlink(pidPath); } catch { /* already gone */ } +} + +export async function logServe(projectRoot: string, message: string): Promise { + const logPath = serveLogPath(projectRoot); + const timestamp = new Date().toISOString(); + const line = `[${timestamp}] ${message}\n`; + + process.stdout.write(line); + + try { + await fs.appendFile(logPath, line, 'utf-8'); + } catch { + await fs.mkdir(logsDir(projectRoot), { recursive: true }); + await fs.appendFile(logPath, line, 'utf-8'); + } +} + +export async function readServeLog(projectRoot: string, lines: number = 20): Promise { + const logPath = serveLogPath(projectRoot); + try { + await fs.access(logPath); + } catch { + return []; + } + const result: string[] = []; + const rl = readline.createInterface({ + input: createReadStream(logPath, { encoding: 'utf-8' }), + crlfDelay: Infinity, + }); + for await (const line of rl) { + if (!line) continue; + result.push(line); + if (result.length > lines) { + result.shift(); + } + } + return result; +} diff --git a/src/core/spawn.ts b/src/core/spawn.ts new file mode 100644 index 0000000..16680b7 --- /dev/null +++ b/src/core/spawn.ts @@ -0,0 +1,227 @@ +import { loadConfig, resolveAgentConfig } from './config.js'; +import { requireManifest, updateManifest } from './manifest.js'; +import { getCurrentBranch, createWorktree } from './worktree.js'; +import { setupWorktreeEnv } from './env.js'; +import { loadTemplate, renderTemplate, type TemplateContext } from './template.js'; +import { spawnAgent } from './agent.js'; +import * as tmux from './tmux.js'; +import { worktreeId as genWorktreeId, agentId as genAgentId, sessionId as genSessionId } from '../lib/id.js'; +import { PpgError } from '../lib/errors.js'; +import { normalizeName } from '../lib/name.js'; +import type { WorktreeEntry, AgentEntry } from '../types/manifest.js'; +import type { AgentConfig } from '../types/config.js'; + +// ─── Agent Batch Spawning ──────────────────────────────────────────────────── + +export interface SpawnBatchOptions { + projectRoot: string; + agentConfig: AgentConfig; + promptText: string; + userVars: Record; + count: number; + split: boolean; + worktreePath: string; + branch: string; + taskName: string; + sessionName: string; + windowTarget: string; + windowNamePrefix: string; + reuseWindowForFirstAgent: boolean; + onAgentSpawned?: (agent: AgentEntry) => Promise; +} + +interface SpawnTargetOptions { + index: number; + split: boolean; + reuseWindowForFirstAgent: boolean; + windowTarget: string; + sessionName: string; + windowNamePrefix: string; + worktreePath: string; +} + +async function resolveAgentTarget(opts: SpawnTargetOptions): Promise { + if (opts.index === 0 && opts.reuseWindowForFirstAgent) { + return opts.windowTarget; + } + if (opts.split) { + const direction = opts.index % 2 === 1 ? 'horizontal' : 'vertical'; + const pane = await tmux.splitPane(opts.windowTarget, direction, opts.worktreePath); + return pane.target; + } + return tmux.createWindow(opts.sessionName, `${opts.windowNamePrefix}-${opts.index}`, opts.worktreePath); +} + +export async function spawnAgentBatch(opts: SpawnBatchOptions): Promise { + const agents: AgentEntry[] = []; + for (let i = 0; i < opts.count; i++) { + const aId = genAgentId(); + const target = await resolveAgentTarget({ + index: i, + split: opts.split, + reuseWindowForFirstAgent: opts.reuseWindowForFirstAgent, + windowTarget: opts.windowTarget, + sessionName: opts.sessionName, + windowNamePrefix: opts.windowNamePrefix, + worktreePath: opts.worktreePath, + }); + + const ctx: TemplateContext = { + WORKTREE_PATH: opts.worktreePath, + BRANCH: opts.branch, + AGENT_ID: aId, + PROJECT_ROOT: opts.projectRoot, + TASK_NAME: opts.taskName, + PROMPT: opts.promptText, + ...opts.userVars, + }; + + const agentEntry = await spawnAgent({ + agentId: aId, + agentConfig: opts.agentConfig, + prompt: renderTemplate(opts.promptText, ctx), + worktreePath: opts.worktreePath, + tmuxTarget: target, + projectRoot: opts.projectRoot, + branch: opts.branch, + sessionId: genSessionId(), + }); + + agents.push(agentEntry); + if (opts.onAgentSpawned) { + await opts.onAgentSpawned(agentEntry); + } + } + + return agents; +} + +// ─── New Worktree Spawn ────────────────────────────────────────────────────── + +export interface SpawnNewWorktreeOptions { + projectRoot: string; + name: string; + promptText: string; + userVars?: Record; + agentName?: string; + baseBranch?: string; + count?: number; + split?: boolean; +} + +export interface SpawnNewWorktreeResult { + worktreeId: string; + name: string; + branch: string; + path: string; + tmuxWindow: string; + agents: AgentEntry[]; +} + +export async function spawnNewWorktree( + opts: SpawnNewWorktreeOptions, +): Promise { + const { projectRoot } = opts; + const config = await loadConfig(projectRoot); + const agentConfig = resolveAgentConfig(config, opts.agentName); + const count = opts.count ?? 1; + const userVars = opts.userVars ?? {}; + const manifest = await requireManifest(projectRoot); + const sessionName = manifest.sessionName; + + const baseBranch = opts.baseBranch ?? await getCurrentBranch(projectRoot); + const wtId = genWorktreeId(); + const name = normalizeName(opts.name, wtId); + const branchName = `ppg/${name}`; + + // Create git worktree + const wtPath = await createWorktree(projectRoot, wtId, { + branch: branchName, + base: baseBranch, + }); + + // Setup env (copy .env, symlink node_modules) + await setupWorktreeEnv(projectRoot, wtPath, config); + + // Ensure tmux session (manifest is the source of truth for session name) + await tmux.ensureSession(sessionName); + + // Create tmux window + const windowTarget = await tmux.createWindow(sessionName, name, wtPath); + + // Register skeleton worktree in manifest before spawning agents + // so partial failures leave a record for cleanup + const worktreeEntry: WorktreeEntry = { + id: wtId, + name, + path: wtPath, + branch: branchName, + baseBranch, + status: 'active', + tmuxWindow: windowTarget, + agents: {}, + createdAt: new Date().toISOString(), + }; + + await updateManifest(projectRoot, (m) => { + m.worktrees[wtId] = worktreeEntry; + return m; + }); + + // Spawn agents + const agents = await spawnAgentBatch({ + projectRoot, + agentConfig, + promptText: opts.promptText, + userVars, + count, + split: opts.split === true, + worktreePath: wtPath, + branch: branchName, + taskName: name, + sessionName, + windowTarget, + windowNamePrefix: name, + reuseWindowForFirstAgent: true, + onAgentSpawned: async (agentEntry) => { + await updateManifest(projectRoot, (m) => { + if (m.worktrees[wtId]) { + m.worktrees[wtId].agents[agentEntry.id] = agentEntry; + } + return m; + }); + }, + }); + + return { + worktreeId: wtId, + name, + branch: branchName, + path: wtPath, + tmuxWindow: windowTarget, + agents, + }; +} + +// ─── Prompt Resolution ─────────────────────────────────────────────────────── + +export interface PromptSource { + prompt?: string; + template?: string; +} + +export async function resolvePromptText( + source: PromptSource, + projectRoot: string, +): Promise { + if (source.prompt) return source.prompt; + + if (source.template) { + return loadTemplate(projectRoot, source.template); + } + + throw new PpgError( + 'Either "prompt" or "template" is required', + 'INVALID_ARGS', + ); +} diff --git a/src/core/tls.ts b/src/core/tls.ts new file mode 100644 index 0000000..e405d00 --- /dev/null +++ b/src/core/tls.ts @@ -0,0 +1,100 @@ +import fs from 'node:fs/promises'; +import os from 'node:os'; +import path from 'node:path'; +import { generateKeyPairSync, X509Certificate } from 'node:crypto'; +import { execa } from 'execa'; +import { ppgDir } from '../lib/paths.js'; +import { execaEnv } from '../lib/env.js'; + +export interface TlsCredentials { + key: string; + cert: string; + fingerprint: string; +} + +export async function ensureTlsCerts(projectRoot: string): Promise { + const certsDir = path.join(ppgDir(projectRoot), 'certs'); + const keyPath = path.join(certsDir, 'server.key'); + const certPath = path.join(certsDir, 'server.crt'); + + try { + const [key, cert] = await Promise.all([ + fs.readFile(keyPath, 'utf-8'), + fs.readFile(certPath, 'utf-8'), + ]); + const fingerprint = getCertFingerprint(cert); + return { key, cert, fingerprint }; + } catch (error) { + if (!hasErrorCode(error, 'ENOENT')) { + throw error; + } + } + + await fs.mkdir(certsDir, { recursive: true }); + + const { privateKey } = generateKeyPairSync('ec', { + namedCurve: 'prime256v1', + }); + + const keyPem = privateKey.export({ type: 'sec1', format: 'pem' }) as string; + const certPem = await generateSelfSignedCert(keyPem, buildSubjectAltName()); + + await Promise.all([ + fs.writeFile(keyPath, keyPem, { mode: 0o600 }), + fs.writeFile(certPath, certPem), + ]); + + const fingerprint = getCertFingerprint(certPem); + return { key: keyPem, cert: certPem, fingerprint }; +} + +async function generateSelfSignedCert(keyPem: string, subjectAltName: string): Promise { + const tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'ppg-tls-')); + const tmpKey = path.join(tmpDir, 'server.key'); + const tmpCert = path.join(tmpDir, 'server.crt'); + + try { + await fs.writeFile(tmpKey, keyPem, { mode: 0o600 }); + await execa('openssl', [ + 'req', '-new', '-x509', + '-key', tmpKey, + '-out', tmpCert, + '-days', '365', + '-subj', '/CN=ppg-server', + '-addext', subjectAltName, + ], { ...execaEnv, stdio: 'pipe' }); + return await fs.readFile(tmpCert, 'utf-8'); + } finally { + await fs.rm(tmpDir, { recursive: true, force: true }); + } +} + +function buildSubjectAltName(): string { + const sanEntries = new Set([ + 'DNS:localhost', + 'IP:127.0.0.1', + 'IP:::1', + ]); + + for (const addresses of Object.values(os.networkInterfaces())) { + for (const iface of addresses ?? []) { + if (iface.internal) continue; + if (iface.family !== 'IPv4' && iface.family !== 'IPv6') continue; + sanEntries.add(`IP:${iface.address}`); + } + } + + return `subjectAltName=${Array.from(sanEntries).join(',')}`; +} + +function hasErrorCode(error: unknown, code: string): boolean { + return typeof error === 'object' + && error !== null + && 'code' in error + && (error as { code?: unknown }).code === code; +} + +export function getCertFingerprint(certPem: string): string { + const x509 = new X509Certificate(certPem); + return x509.fingerprint256; +} diff --git a/src/lib/errors.ts b/src/lib/errors.ts index 0af4143..4cbf1dd 100644 --- a/src/lib/errors.ts +++ b/src/lib/errors.ts @@ -86,6 +86,36 @@ export class GhNotFoundError extends PpgError { } } +export class PromptNotFoundError extends PpgError { + constructor(agentId: string) { + super( + `Could not read original prompt for agent ${agentId}. Use --prompt to provide one.`, + 'PROMPT_NOT_FOUND', + ); + this.name = 'PromptNotFoundError'; + } +} + +export class DuplicateTokenError extends PpgError { + constructor(label: string) { + super( + `Token with label "${label}" already exists`, + 'DUPLICATE_TOKEN', + ); + this.name = 'DuplicateTokenError'; + } +} + +export class AuthCorruptError extends PpgError { + constructor(filePath: string) { + super( + `Auth data is corrupt or unreadable: ${filePath}`, + 'AUTH_CORRUPT', + ); + this.name = 'AuthCorruptError'; + } +} + export class UnmergedWorkError extends PpgError { constructor(names: string[]) { const list = names.map((n) => ` ${n}`).join('\n'); diff --git a/src/lib/paths.test.ts b/src/lib/paths.test.ts index 57a62b0..61ecf15 100644 --- a/src/lib/paths.test.ts +++ b/src/lib/paths.test.ts @@ -14,12 +14,20 @@ import { promptFile, agentPromptsDir, agentPromptFile, + serveDir, + tlsDir, + tlsCaKeyPath, + tlsCaCertPath, + tlsServerKeyPath, + tlsServerCertPath, worktreeBaseDir, worktreePath, globalPpgDir, globalPromptsDir, globalTemplatesDir, globalSwarmsDir, + serveStatePath, + servePidPath, } from './paths.js'; const ROOT = '/tmp/project'; @@ -79,6 +87,30 @@ describe('paths', () => { ); }); + test('serveDir', () => { + expect(serveDir(ROOT)).toBe(path.join(ROOT, '.ppg', 'serve')); + }); + + test('tlsDir', () => { + expect(tlsDir(ROOT)).toBe(path.join(ROOT, '.ppg', 'serve', 'tls')); + }); + + test('tlsCaKeyPath', () => { + expect(tlsCaKeyPath(ROOT)).toBe(path.join(ROOT, '.ppg', 'serve', 'tls', 'ca-key.pem')); + }); + + test('tlsCaCertPath', () => { + expect(tlsCaCertPath(ROOT)).toBe(path.join(ROOT, '.ppg', 'serve', 'tls', 'ca-cert.pem')); + }); + + test('tlsServerKeyPath', () => { + expect(tlsServerKeyPath(ROOT)).toBe(path.join(ROOT, '.ppg', 'serve', 'tls', 'server-key.pem')); + }); + + test('tlsServerCertPath', () => { + expect(tlsServerCertPath(ROOT)).toBe(path.join(ROOT, '.ppg', 'serve', 'tls', 'server-cert.pem')); + }); + test('worktreeBaseDir', () => { expect(worktreeBaseDir(ROOT)).toBe(path.join(ROOT, '.worktrees')); }); @@ -104,4 +136,12 @@ describe('paths', () => { test('globalSwarmsDir', () => { expect(globalSwarmsDir()).toBe(path.join(os.homedir(), '.ppg', 'swarms')); }); + + test('serveStatePath', () => { + expect(serveStatePath(ROOT)).toBe(path.join(ROOT, '.ppg', 'serve.json')); + }); + + test('servePidPath', () => { + expect(servePidPath(ROOT)).toBe(path.join(ROOT, '.ppg', 'serve.pid')); + }); }); diff --git a/src/lib/paths.ts b/src/lib/paths.ts index d456f5f..618a170 100644 --- a/src/lib/paths.ts +++ b/src/lib/paths.ts @@ -79,6 +79,42 @@ export function cronPidPath(projectRoot: string): string { return path.join(ppgDir(projectRoot), 'cron.pid'); } +export function serveDir(projectRoot: string): string { + return path.join(ppgDir(projectRoot), 'serve'); +} + +export function tlsDir(projectRoot: string): string { + return path.join(serveDir(projectRoot), 'tls'); +} + +export function tlsCaKeyPath(projectRoot: string): string { + return path.join(tlsDir(projectRoot), 'ca-key.pem'); +} + +export function tlsCaCertPath(projectRoot: string): string { + return path.join(tlsDir(projectRoot), 'ca-cert.pem'); +} + +export function tlsServerKeyPath(projectRoot: string): string { + return path.join(tlsDir(projectRoot), 'server-key.pem'); +} + +export function tlsServerCertPath(projectRoot: string): string { + return path.join(tlsDir(projectRoot), 'server-cert.pem'); +} + +export function servePidPath(projectRoot: string): string { + return path.join(ppgDir(projectRoot), 'serve.pid'); +} + +export function serveLogPath(projectRoot: string): string { + return path.join(logsDir(projectRoot), 'serve.log'); +} + +export function serveJsonPath(projectRoot: string): string { + return path.join(ppgDir(projectRoot), 'serve.json'); +} + export function worktreeBaseDir(projectRoot: string): string { return path.join(projectRoot, '.worktrees'); } @@ -86,3 +122,11 @@ export function worktreeBaseDir(projectRoot: string): string { export function worktreePath(projectRoot: string, id: string): string { return path.join(worktreeBaseDir(projectRoot), id); } + +export function serveStatePath(projectRoot: string): string { + return path.join(ppgDir(projectRoot), 'serve.json'); +} + +export function authPath(projectRoot: string): string { + return path.join(serveDir(projectRoot), 'auth.json'); +} diff --git a/src/server/auth.test.ts b/src/server/auth.test.ts new file mode 100644 index 0000000..325dfc7 --- /dev/null +++ b/src/server/auth.test.ts @@ -0,0 +1,544 @@ +import crypto from 'node:crypto'; +import fs from 'node:fs/promises'; +import os from 'node:os'; +import path from 'node:path'; +import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest'; +import { DuplicateTokenError } from '../lib/errors.js'; +import { authPath } from '../lib/paths.js'; +import { + type AuthStore, + type AuthenticatedRequest, + type RateLimiter, + createAuthHook, + createAuthStore, + createRateLimiter, + generateToken, + hashToken, +} from './auth.js'; + +// --- Test Helpers --- + +function makeReply() { + let sentStatus: number | null = null; + let sentBody: unknown = null; + return { + reply: { + code(status: number) { + sentStatus = status; + return { + send(body: unknown) { + sentBody = body; + }, + }; + }, + }, + status: () => sentStatus, + body: () => sentBody, + }; +} + +function makeRequest(overrides: Partial<{ headers: Record; ip: string }> = {}): AuthenticatedRequest { + return { + headers: {}, + ip: '127.0.0.1', + ...overrides, + }; +} + +// --- Token Generation --- + +describe('generateToken', () => { + test('returns string with tk_ prefix', () => { + const token = generateToken(); + expect(token.startsWith('tk_')).toBe(true); + }); + + test('body is valid base64url (32 chars from 24 bytes)', () => { + const token = generateToken(); + const body = token.slice(3); + expect(body).toMatch(/^[A-Za-z0-9_-]+$/); + expect(body.length).toBe(32); + }); + + test('generates unique tokens', () => { + const tokens = new Set(Array.from({ length: 50 }, () => generateToken())); + expect(tokens.size).toBe(50); + }); +}); + +// --- Token Hashing --- + +describe('hashToken', () => { + test('returns a 64-char hex SHA-256 digest', () => { + const hash = hashToken('tk_test'); + expect(hash).toMatch(/^[a-f0-9]{64}$/); + }); + + test('same input produces same hash', () => { + const a = hashToken('tk_abc123'); + const b = hashToken('tk_abc123'); + expect(a).toBe(b); + }); + + test('different inputs produce different hashes', () => { + const a = hashToken('tk_abc'); + const b = hashToken('tk_xyz'); + expect(a).not.toBe(b); + }); +}); + +// --- Rate Limiter --- + +describe('createRateLimiter', () => { + let clock: number; + let limiter: RateLimiter; + + beforeEach(() => { + clock = 1000000; + limiter = createRateLimiter(() => clock); + }); + + test('allows first request from new IP', () => { + expect(limiter.check('1.2.3.4')).toBe(true); + }); + + test('allows up to 5 failures', () => { + const ip = '1.2.3.4'; + for (let i = 0; i < 4; i++) { + limiter.record(ip); + expect(limiter.check(ip)).toBe(true); + } + limiter.record(ip); + expect(limiter.check(ip)).toBe(false); + }); + + test('blocks after 5 failures within window', () => { + const ip = '10.0.0.1'; + for (let i = 0; i < 5; i++) limiter.record(ip); + expect(limiter.check(ip)).toBe(false); + }); + + test('resets after window expires', () => { + const ip = '10.0.0.2'; + for (let i = 0; i < 5; i++) limiter.record(ip); + expect(limiter.check(ip)).toBe(false); + + clock += 5 * 60 * 1000; // advance 5 minutes + expect(limiter.check(ip)).toBe(true); + }); + + test('starts new window after expiry', () => { + const ip = '10.0.0.3'; + for (let i = 0; i < 5; i++) limiter.record(ip); + expect(limiter.check(ip)).toBe(false); + + clock += 5 * 60 * 1000; + limiter.record(ip); // new window, failure count = 1 + expect(limiter.check(ip)).toBe(true); + }); + + test('tracks IPs independently', () => { + for (let i = 0; i < 5; i++) limiter.record('a'); + expect(limiter.check('a')).toBe(false); + expect(limiter.check('b')).toBe(true); + }); + + test('reset clears failure count for IP', () => { + const ip = '10.0.0.4'; + for (let i = 0; i < 5; i++) limiter.record(ip); + expect(limiter.check(ip)).toBe(false); + limiter.reset(ip); + expect(limiter.check(ip)).toBe(true); + }); + + test('prunes stale entries when map exceeds max size', () => { + // Fill with 10001 stale entries + for (let i = 0; i <= 10_000; i++) { + limiter.record(`stale-${i}`); + } + // Advance past the window so all are stale + clock += 5 * 60 * 1000; + // One more record triggers prune + limiter.record('fresh'); + // The fresh one should be tracked; stale ones should allow through + expect(limiter.check('stale-0')).toBe(true); + expect(limiter.check('fresh')).toBe(true); + }); + + test('evicts oldest entries when max size is exceeded without stale IPs', () => { + for (let i = 0; i <= 10_000; i++) { + const ip = `ip-${i}`; + for (let j = 0; j < 5; j++) limiter.record(ip); + } + + // Oldest entry should be evicted once capacity is exceeded. + expect(limiter.check('ip-0')).toBe(true); + expect(limiter.check('ip-10')).toBe(false); + expect(limiter.check('ip-10000')).toBe(false); + }); +}); + +// --- Auth Store --- + +describe('createAuthStore', () => { + let tmpDir: string; + let store: AuthStore; + + beforeEach(async () => { + tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'ppg-auth-')); + store = await createAuthStore(tmpDir); + }); + + afterEach(async () => { + await fs.rm(tmpDir, { recursive: true, force: true }); + }); + + describe('addToken', () => { + test('returns a token with tk_ prefix', async () => { + const token = await store.addToken('iphone'); + expect(token.startsWith('tk_')).toBe(true); + }); + + test('stores hash, not plaintext', async () => { + const token = await store.addToken('iphone'); + const raw = await fs.readFile(authPath(tmpDir), 'utf-8'); + const data = JSON.parse(raw); + expect(data.tokens[0].hash).toBe(hashToken(token)); + expect(raw).not.toContain(token); + }); + + test('rejects duplicate labels with DuplicateTokenError', async () => { + await store.addToken('ipad'); + await expect(store.addToken('ipad')).rejects.toThrow(DuplicateTokenError); + await expect(store.addToken('ipad')).rejects.toThrow( + 'Token with label "ipad" already exists', + ); + }); + + test('supports multiple tokens with different labels', async () => { + await store.addToken('iphone'); + await store.addToken('ipad'); + await store.addToken('macbook'); + const tokens = await store.listTokens(); + expect(tokens.length).toBe(3); + }); + + test('sets createdAt and null lastUsedAt', async () => { + await store.addToken('device'); + const tokens = await store.listTokens(); + expect(tokens[0].createdAt).toBeTruthy(); + expect(tokens[0].lastUsedAt).toBeNull(); + }); + }); + + describe('validateToken', () => { + test('validates correct token', async () => { + const token = await store.addToken('iphone'); + const entry = await store.validateToken(token); + expect(entry).not.toBeNull(); + expect(entry!.label).toBe('iphone'); + }); + + test('rejects invalid token', async () => { + await store.addToken('iphone'); + const entry = await store.validateToken('tk_wrong'); + expect(entry).toBeNull(); + }); + + test('rejects empty token', async () => { + await store.addToken('iphone'); + const entry = await store.validateToken(''); + expect(entry).toBeNull(); + }); + + test('updates lastUsedAt on successful validation', async () => { + const token = await store.addToken('iphone'); + const before = await store.listTokens(); + expect(before[0].lastUsedAt).toBeNull(); + + await store.validateToken(token); + const after = await store.listTokens(); + expect(after[0].lastUsedAt).not.toBeNull(); + }); + + test('returns defensive copy of token entry', async () => { + const token = await store.addToken('iphone'); + const entry = await store.validateToken(token); + expect(entry).not.toBeNull(); + entry!.label = 'tampered'; + + const tokens = await store.listTokens(); + expect(tokens[0].label).toBe('iphone'); + }); + + test('uses timing-safe comparison', async () => { + const spy = vi.spyOn(crypto, 'timingSafeEqual'); + const token = await store.addToken('iphone'); + await store.validateToken(token); + expect(spy).toHaveBeenCalled(); + spy.mockRestore(); + }); + + test('validates correct token among multiple', async () => { + const token1 = await store.addToken('iphone'); + await store.addToken('ipad'); + const token3 = await store.addToken('macbook'); + + const entry1 = await store.validateToken(token1); + expect(entry1!.label).toBe('iphone'); + + const entry3 = await store.validateToken(token3); + expect(entry3!.label).toBe('macbook'); + }); + }); + + describe('revokeToken', () => { + test('removes token by label', async () => { + await store.addToken('iphone'); + const removed = await store.revokeToken('iphone'); + expect(removed).toBe(true); + const tokens = await store.listTokens(); + expect(tokens.length).toBe(0); + }); + + test('returns false for unknown label', async () => { + const removed = await store.revokeToken('nonexistent'); + expect(removed).toBe(false); + }); + + test('revoked token no longer validates', async () => { + const token = await store.addToken('iphone'); + await store.revokeToken('iphone'); + const entry = await store.validateToken(token); + expect(entry).toBeNull(); + }); + + test('does not affect other tokens', async () => { + const token1 = await store.addToken('iphone'); + await store.addToken('ipad'); + await store.revokeToken('ipad'); + + const entry = await store.validateToken(token1); + expect(entry!.label).toBe('iphone'); + const tokens = await store.listTokens(); + expect(tokens.length).toBe(1); + }); + }); + + describe('listTokens', () => { + test('returns empty array when no tokens', async () => { + const tokens = await store.listTokens(); + expect(tokens).toEqual([]); + }); + + test('returns all token entries', async () => { + await store.addToken('a'); + await store.addToken('b'); + const tokens = await store.listTokens(); + expect(tokens.map((t) => t.label)).toEqual(['a', 'b']); + }); + + test('returns defensive copies', async () => { + await store.addToken('a'); + const tokens = await store.listTokens(); + tokens[0].label = 'tampered'; + + const fresh = await store.listTokens(); + expect(fresh[0].label).toBe('a'); + }); + }); + + describe('persistence', () => { + test('auth.json has 0o600 permissions', async () => { + await store.addToken('iphone'); + const stat = await fs.stat(authPath(tmpDir)); + const mode = stat.mode & 0o777; + expect(mode).toBe(0o600); + }); + + test('survives store recreation', async () => { + const token = await store.addToken('iphone'); + const store2 = await createAuthStore(tmpDir); + const entry = await store2.validateToken(token); + expect(entry!.label).toBe('iphone'); + }); + + test('throws AuthCorruptError on corrupt auth.json', async () => { + await store.addToken('iphone'); + await fs.writeFile(authPath(tmpDir), '{{{invalid json'); + const store2 = await createAuthStore(tmpDir); + await expect(store2.listTokens()).rejects.toThrow('Auth data is corrupt'); + }); + + test('throws AuthCorruptError on invalid auth.json structure', async () => { + await fs.mkdir(path.dirname(authPath(tmpDir)), { recursive: true }); + await fs.writeFile( + authPath(tmpDir), + JSON.stringify({ tokens: [{ label: 'incomplete' }] }), + ); + const store2 = await createAuthStore(tmpDir); + await expect(store2.listTokens()).rejects.toThrow('Auth data is corrupt'); + }); + }); +}); + +// --- Fastify Auth Hook --- + +describe('createAuthHook', () => { + let store: AuthStore; + let limiter: RateLimiter; + let hook: ReturnType; + let tmpDir: string; + let token: string; + + beforeEach(async () => { + tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'ppg-auth-hook-')); + store = await createAuthStore(tmpDir); + limiter = createRateLimiter(); + hook = createAuthHook({ store, rateLimiter: limiter }); + token = await store.addToken('test-device'); + }); + + afterEach(async () => { + await fs.rm(tmpDir, { recursive: true, force: true }); + }); + + test('passes with valid Bearer token', async () => { + const { reply, status } = makeReply(); + await hook( + makeRequest({ headers: { authorization: `Bearer ${token}` } }), + reply, + ); + expect(status()).toBeNull(); + }); + + test('attaches tokenEntry to request on success', async () => { + const { reply } = makeReply(); + const request = makeRequest({ headers: { authorization: `Bearer ${token}` } }); + await hook(request, reply); + expect(request.tokenEntry).toBeDefined(); + expect(request.tokenEntry!.label).toBe('test-device'); + }); + + test('rejects missing Authorization header', async () => { + const { reply, status, body } = makeReply(); + await hook(makeRequest(), reply); + expect(status()).toBe(401); + expect(body()).toEqual({ error: 'Missing or malformed Authorization header' }); + }); + + test('rejects non-Bearer scheme', async () => { + const { reply, status } = makeReply(); + await hook( + makeRequest({ headers: { authorization: `Basic ${token}` } }), + reply, + ); + expect(status()).toBe(401); + }); + + test('rejects invalid token', async () => { + const { reply, status, body } = makeReply(); + await hook( + makeRequest({ headers: { authorization: 'Bearer tk_invalid' } }), + reply, + ); + expect(status()).toBe(401); + expect(body()).toEqual({ error: 'Invalid token' }); + }); + + test('returns 429 when rate limited', async () => { + for (let i = 0; i < 5; i++) { + limiter.record('127.0.0.1'); + } + const { reply, status, body } = makeReply(); + await hook( + makeRequest({ headers: { authorization: `Bearer ${token}` } }), + reply, + ); + expect(status()).toBe(429); + expect(body()).toEqual({ error: 'Too many failed attempts. Try again later.' }); + }); + + test('records failure on missing header', async () => { + for (let i = 0; i < 5; i++) { + await hook(makeRequest(), makeReply().reply); + } + const { reply, status } = makeReply(); + await hook(makeRequest(), reply); + expect(status()).toBe(429); + }); + + test('records failure on invalid token', async () => { + for (let i = 0; i < 5; i++) { + await hook( + makeRequest({ headers: { authorization: 'Bearer tk_bad' } }), + makeReply().reply, + ); + } + const { reply, status } = makeReply(); + await hook( + makeRequest({ headers: { authorization: `Bearer ${token}` } }), + reply, + ); + expect(status()).toBe(429); + }); + + test('resets rate limit on successful auth', async () => { + for (let i = 0; i < 4; i++) { + await hook( + makeRequest({ headers: { authorization: 'Bearer tk_bad' } }), + makeReply().reply, + ); + } + // Successful auth should reset + await hook( + makeRequest({ headers: { authorization: `Bearer ${token}` } }), + makeReply().reply, + ); + // Should not be rate limited now + const { reply, status } = makeReply(); + await hook( + makeRequest({ headers: { authorization: 'Bearer tk_bad' } }), + reply, + ); + expect(status()).toBe(401); // not 429 + }); + + test('rate limits per IP independently', async () => { + for (let i = 0; i < 5; i++) { + await hook( + makeRequest({ ip: '10.0.0.1', headers: { authorization: 'Bearer tk_bad' } }), + makeReply().reply, + ); + } + // Different IP should still work + const { reply, status } = makeReply(); + await hook( + makeRequest({ ip: '10.0.0.2', headers: { authorization: `Bearer ${token}` } }), + reply, + ); + expect(status()).toBeNull(); + }); + + test('returns 503 when token validation throws', async () => { + const brokenStore: AuthStore = { + addToken: async () => 'tk_unused', + validateToken: async () => { + throw new Error('disk error'); + }, + revokeToken: async () => false, + listTokens: async () => [], + }; + const brokenHook = createAuthHook({ + store: brokenStore, + rateLimiter: createRateLimiter(), + }); + const { reply, status, body } = makeReply(); + await brokenHook( + makeRequest({ headers: { authorization: 'Bearer tk_any' } }), + reply, + ); + expect(status()).toBe(503); + expect(body()).toEqual({ error: 'Authentication unavailable' }); + }); +}); diff --git a/src/server/auth.ts b/src/server/auth.ts new file mode 100644 index 0000000..0dc47cc --- /dev/null +++ b/src/server/auth.ts @@ -0,0 +1,286 @@ +import crypto from 'node:crypto'; +import fs from 'node:fs/promises'; +import { getWriteFileAtomic } from '../lib/cjs-compat.js'; +import { AuthCorruptError, DuplicateTokenError } from '../lib/errors.js'; +import { authPath, serveDir } from '../lib/paths.js'; + +// --- Types --- + +export interface TokenEntry { + label: string; + hash: string; + createdAt: string; + lastUsedAt: string | null; +} + +export interface AuthData { + tokens: TokenEntry[]; +} + +type UnknownRecord = Record; + +interface RateLimitEntry { + failures: number; + windowStart: number; +} + +// --- Constants --- + +const RATE_LIMIT_MAX_FAILURES = 5; +const RATE_LIMIT_WINDOW_MS = 5 * 60 * 1000; // 5 minutes +const RATE_LIMIT_MAX_ENTRIES = 10_000; + +// --- Token Generation & Hashing --- + +export function generateToken(): string { + const bytes = crypto.randomBytes(24); + return `tk_${bytes.toString('base64url')}`; +} + +export function hashToken(token: string): string { + return crypto.createHash('sha256').update(token).digest('hex'); +} + +// --- Rate Limiter --- + +export interface RateLimiter { + check(ip: string): boolean; + record(ip: string): void; + reset(ip: string): void; +} + +export function createRateLimiter( + now: () => number = Date.now, +): RateLimiter { + const entries = new Map(); + + function prune(): void { + if (entries.size <= RATE_LIMIT_MAX_ENTRIES) return; + + const currentTime = now(); + for (const [ip, entry] of entries.entries()) { + if (currentTime - entry.windowStart >= RATE_LIMIT_WINDOW_MS) { + entries.delete(ip); + } + } + + while (entries.size > RATE_LIMIT_MAX_ENTRIES) { + const oldestIp = entries.keys().next().value; + if (oldestIp === undefined) break; + entries.delete(oldestIp); + } + } + + return { + check(ip: string): boolean { + const entry = entries.get(ip); + if (!entry) return true; + + if (now() - entry.windowStart >= RATE_LIMIT_WINDOW_MS) { + entries.delete(ip); + return true; + } + + return entry.failures < RATE_LIMIT_MAX_FAILURES; + }, + + record(ip: string): void { + const entry = entries.get(ip); + const currentTime = now(); + + if (!entry || currentTime - entry.windowStart >= RATE_LIMIT_WINDOW_MS) { + entries.set(ip, { failures: 1, windowStart: currentTime }); + prune(); + return; + } + + entry.failures += 1; + }, + + reset(ip: string): void { + entries.delete(ip); + }, + }; +} + +// --- Auth Store --- + +export interface AuthStore { + addToken(label: string): Promise; + validateToken(token: string): Promise; + revokeToken(label: string): Promise; + listTokens(): Promise; +} + +export async function createAuthStore(projectRoot: string): Promise { + const filePath = authPath(projectRoot); + let cache: AuthData | null = null; + + function isTokenEntry(value: unknown): value is TokenEntry { + if (!value || typeof value !== 'object') return false; + + const record = value as UnknownRecord; + return ( + typeof record.label === 'string' && + typeof record.hash === 'string' && + typeof record.createdAt === 'string' && + (record.lastUsedAt === null || typeof record.lastUsedAt === 'string') + ); + } + + function isAuthData(value: unknown): value is AuthData { + if (!value || typeof value !== 'object') return false; + const record = value as UnknownRecord; + return Array.isArray(record.tokens) && record.tokens.every(isTokenEntry); + } + + function cloneTokenEntry(entry: TokenEntry): TokenEntry { + return { ...entry }; + } + + async function readData(): Promise { + if (cache) return cache; + + let raw: string; + try { + raw = await fs.readFile(filePath, 'utf-8'); + } catch (err) { + if ((err as NodeJS.ErrnoException).code === 'ENOENT') { + cache = { tokens: [] }; + return cache; + } + throw new AuthCorruptError(filePath); + } + + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + throw new AuthCorruptError(filePath); + } + + if (!isAuthData(parsed)) { + throw new AuthCorruptError(filePath); + } + + cache = { tokens: parsed.tokens.map(cloneTokenEntry) }; + return cache; + } + + async function writeData(data: AuthData): Promise { + const dir = serveDir(projectRoot); + await fs.mkdir(dir, { recursive: true }); + const writeFileAtomic = await getWriteFileAtomic(); + await writeFileAtomic(filePath, JSON.stringify(data, null, 2), { + mode: 0o600, + }); + cache = data; + } + + return { + async addToken(label: string): Promise { + const data = await readData(); + const existing = data.tokens.find((t) => t.label === label); + if (existing) { + throw new DuplicateTokenError(label); + } + + const token = generateToken(); + const entry: TokenEntry = { + label, + hash: hashToken(token), + createdAt: new Date().toISOString(), + lastUsedAt: null, + }; + data.tokens.push(entry); + await writeData(data); + return token; + }, + + async validateToken(token: string): Promise { + if (!token) return null; + + const data = await readData(); + const incomingBuf = Buffer.from(hashToken(token), 'hex'); + + for (const entry of data.tokens) { + const storedBuf = Buffer.from(entry.hash, 'hex'); + if (incomingBuf.length === storedBuf.length && crypto.timingSafeEqual(incomingBuf, storedBuf)) { + entry.lastUsedAt = new Date().toISOString(); + await writeData(data); + return cloneTokenEntry(entry); + } + } + + return null; + }, + + async revokeToken(label: string): Promise { + const data = await readData(); + const idx = data.tokens.findIndex((t) => t.label === label); + if (idx === -1) return false; + data.tokens.splice(idx, 1); + await writeData(data); + return true; + }, + + async listTokens(): Promise { + const data = await readData(); + return data.tokens.map(cloneTokenEntry); + }, + }; +} + +// --- Fastify Auth Hook --- + +export interface AuthHookDeps { + store: AuthStore; + rateLimiter: RateLimiter; +} + +export interface AuthenticatedRequest { + headers: Record; + ip: string; + tokenEntry?: TokenEntry; +} + +export function createAuthHook(deps: AuthHookDeps) { + const { store, rateLimiter } = deps; + + return async function authHook( + request: AuthenticatedRequest, + reply: { code(statusCode: number): { send(body: unknown): void } }, + ): Promise { + const ip = request.ip; + + if (!rateLimiter.check(ip)) { + reply.code(429).send({ error: 'Too many failed attempts. Try again later.' }); + return; + } + + const authHeader = request.headers['authorization']; + if (typeof authHeader !== 'string' || !authHeader.startsWith('Bearer ')) { + rateLimiter.record(ip); + reply.code(401).send({ error: 'Missing or malformed Authorization header' }); + return; + } + + const token = authHeader.slice(7).trim(); + let entry: TokenEntry | null = null; + try { + entry = await store.validateToken(token); + } catch { + reply.code(503).send({ error: 'Authentication unavailable' }); + return; + } + + if (!entry) { + rateLimiter.record(ip); + reply.code(401).send({ error: 'Invalid token' }); + return; + } + + request.tokenEntry = entry; + rateLimiter.reset(ip); + }; +} diff --git a/src/server/error-handler.test.ts b/src/server/error-handler.test.ts new file mode 100644 index 0000000..07b42f2 --- /dev/null +++ b/src/server/error-handler.test.ts @@ -0,0 +1,182 @@ +import { describe, expect, test, vi } from 'vitest'; +import { + AgentNotFoundError, + MergeFailedError, + ManifestLockError, + NotGitRepoError, + NotInitializedError, + PpgError, + TmuxNotFoundError, + WorktreeNotFoundError, + GhNotFoundError, + UnmergedWorkError, +} from '../lib/errors.js'; +import type { LogFn } from './error-handler.js'; +import { + buildErrorResponse, + errorHandler, + getHttpStatus, + registerErrorHandler, +} from './error-handler.js'; + +describe('getHttpStatus', () => { + test.each([ + ['INVALID_ARGS', 400], + ['NO_SESSION_ID', 400], + ['NOT_GIT_REPO', 400], + ['NOT_INITIALIZED', 409], + ['MANIFEST_LOCK', 409], + ['AGENTS_RUNNING', 409], + ['MERGE_FAILED', 409], + ['UNMERGED_WORK', 409], + ['WORKTREE_NOT_FOUND', 404], + ['AGENT_NOT_FOUND', 404], + ['PANE_NOT_FOUND', 404], + ['NO_TMUX_WINDOW', 404], + ['TARGET_NOT_FOUND', 404], + ['WAIT_TIMEOUT', 408], + ['AGENTS_FAILED', 500], + ['TMUX_NOT_FOUND', 500], + ['GH_NOT_FOUND', 500], + ['DOWNLOAD_FAILED', 502], + ['INSTALL_FAILED', 500], + ])('maps %s → %d', (code, expected) => { + expect(getHttpStatus(code)).toBe(expected); + }); + + test('returns 500 for unknown code', () => { + expect(getHttpStatus('SOME_UNKNOWN_CODE')).toBe(500); + }); +}); + +describe('buildErrorResponse', () => { + test.each<[string, PpgError, number, string]>([ + ['TmuxNotFoundError', new TmuxNotFoundError(), 500, 'TMUX_NOT_FOUND'], + ['NotGitRepoError', new NotGitRepoError('/tmp'), 400, 'NOT_GIT_REPO'], + ['NotInitializedError', new NotInitializedError('/tmp'), 409, 'NOT_INITIALIZED'], + ['ManifestLockError', new ManifestLockError(), 409, 'MANIFEST_LOCK'], + ['WorktreeNotFoundError', new WorktreeNotFoundError('wt-x'), 404, 'WORKTREE_NOT_FOUND'], + ['AgentNotFoundError', new AgentNotFoundError('ag-y'), 404, 'AGENT_NOT_FOUND'], + ['MergeFailedError', new MergeFailedError('conflict'), 409, 'MERGE_FAILED'], + ['GhNotFoundError', new GhNotFoundError(), 500, 'GH_NOT_FOUND'], + ['UnmergedWorkError', new UnmergedWorkError(['foo', 'bar']), 409, 'UNMERGED_WORK'], + ['INVALID_ARGS', new PpgError('bad args', 'INVALID_ARGS'), 400, 'INVALID_ARGS'], + ['NO_SESSION_ID', new PpgError('no session', 'NO_SESSION_ID'), 400, 'NO_SESSION_ID'], + ['AGENTS_RUNNING', new PpgError('running', 'AGENTS_RUNNING'), 409, 'AGENTS_RUNNING'], + ['WAIT_TIMEOUT', new PpgError('timeout', 'WAIT_TIMEOUT'), 408, 'WAIT_TIMEOUT'], + ['AGENTS_FAILED', new PpgError('failed', 'AGENTS_FAILED'), 500, 'AGENTS_FAILED'], + ['PANE_NOT_FOUND', new PpgError('pane gone', 'PANE_NOT_FOUND'), 404, 'PANE_NOT_FOUND'], + ['NO_TMUX_WINDOW', new PpgError('no window', 'NO_TMUX_WINDOW'), 404, 'NO_TMUX_WINDOW'], + ['TARGET_NOT_FOUND', new PpgError('no target', 'TARGET_NOT_FOUND'), 404, 'TARGET_NOT_FOUND'], + ['DOWNLOAD_FAILED', new PpgError('download err', 'DOWNLOAD_FAILED'), 502, 'DOWNLOAD_FAILED'], + ['INSTALL_FAILED', new PpgError('install err', 'INSTALL_FAILED'), 500, 'INSTALL_FAILED'], + ])('given %s, should return %d with code %s', (_label, error, expectedStatus, expectedCode) => { + const { status, body } = buildErrorResponse(error); + expect(status).toBe(expectedStatus); + expect(body.error.code).toBe(expectedCode); + expect(body.error.message).toBe(error.message); + }); + + test('given Fastify validation error, should return 400 with field details', () => { + const validationDetails = [ + { instancePath: '/name', message: 'must be string' }, + { instancePath: '/count', message: 'must be number' }, + ]; + const error = Object.assign(new Error('body/name must be string'), { + validation: validationDetails, + validationContext: 'body', + }); + + const { status, body } = buildErrorResponse(error); + + expect(status).toBe(400); + expect(body).toEqual({ + error: { + code: 'VALIDATION_ERROR', + message: 'body/name must be string', + details: validationDetails, + }, + }); + }); + + test('given unknown error, should return generic 500', () => { + const { status, body } = buildErrorResponse(new Error('something broke internally')); + + expect(status).toBe(500); + expect(body).toEqual({ + error: { + code: 'INTERNAL_ERROR', + message: 'An unexpected error occurred', + }, + }); + }); + + test('given unknown error, should not leak internal message', () => { + const { body } = buildErrorResponse(new TypeError('Cannot read property x of undefined')); + expect(body.error.message).toBe('An unexpected error occurred'); + }); + + test('given unknown error and log function, should log the original error', () => { + const log: LogFn = vi.fn(); + const error = new Error('db connection lost'); + + buildErrorResponse(error, log); + + expect(log).toHaveBeenCalledWith('Unhandled error', error); + }); + + test('given PpgError and log function, should not log', () => { + const log: LogFn = vi.fn(); + buildErrorResponse(new WorktreeNotFoundError('wt-x'), log); + expect(log).not.toHaveBeenCalled(); + }); +}); + +describe('errorHandler', () => { + const mockReply = () => ({ + status: vi.fn().mockReturnThis(), + send: vi.fn().mockReturnThis(), + }); + + const mockRequest = { log: { error: vi.fn() } } as unknown as Parameters[1]; + + test('given PpgError, should send structured response', () => { + const reply = mockReply(); + errorHandler(new AgentNotFoundError('ag-xyz'), mockRequest, reply as never); + + expect(reply.status).toHaveBeenCalledWith(404); + expect(reply.send).toHaveBeenCalledWith({ + error: { + code: 'AGENT_NOT_FOUND', + message: 'Agent not found: ag-xyz', + }, + }); + }); + + test('given unknown error, should send 500 and log via request.log', () => { + const request = { log: { error: vi.fn() } } as unknown as Parameters[1]; + const reply = mockReply(); + const error = new Error('oops'); + + errorHandler(error, request, reply as never); + + expect(reply.status).toHaveBeenCalledWith(500); + expect(reply.send).toHaveBeenCalledWith({ + error: { + code: 'INTERNAL_ERROR', + message: 'An unexpected error occurred', + }, + }); + expect(request.log.error).toHaveBeenCalledWith({ err: error }, 'Unhandled error'); + }); +}); + +describe('registerErrorHandler', () => { + test('given Fastify instance, should call setErrorHandler', () => { + const app = { setErrorHandler: vi.fn() }; + registerErrorHandler(app as never); + + expect(app.setErrorHandler).toHaveBeenCalledOnce(); + expect(app.setErrorHandler).toHaveBeenCalledWith(errorHandler); + }); +}); diff --git a/src/server/error-handler.ts b/src/server/error-handler.ts new file mode 100644 index 0000000..e872180 --- /dev/null +++ b/src/server/error-handler.ts @@ -0,0 +1,100 @@ +import type { FastifyError, FastifyInstance, FastifyReply, FastifyRequest } from 'fastify'; +import { PpgError } from '../lib/errors.js'; + +export interface ErrorResponseBody { + error: { + code: string; + message: string; + details?: unknown; + }; +} + +export type LogFn = (message: string, error: Error) => void; + +const httpStatusByCode: Record = { + INVALID_ARGS: 400, + NO_SESSION_ID: 400, + NOT_GIT_REPO: 400, + NOT_INITIALIZED: 409, + MANIFEST_LOCK: 409, + AGENTS_RUNNING: 409, + MERGE_FAILED: 409, + UNMERGED_WORK: 409, + WORKTREE_NOT_FOUND: 404, + AGENT_NOT_FOUND: 404, + PANE_NOT_FOUND: 404, + NO_TMUX_WINDOW: 404, + TARGET_NOT_FOUND: 404, + WAIT_TIMEOUT: 408, + AGENTS_FAILED: 500, + TMUX_NOT_FOUND: 500, + GH_NOT_FOUND: 500, + DOWNLOAD_FAILED: 502, + INSTALL_FAILED: 500, +}; + +export function getHttpStatus(ppgCode: string): number { + return httpStatusByCode[ppgCode] ?? 500; +} + +function isFastifyValidationError( + error: Error | FastifyError, +): error is FastifyError & { validation: unknown[] } { + return 'validation' in error && Array.isArray((error as { validation: unknown }).validation); +} + +export function buildErrorResponse(error: Error, log?: LogFn): { + status: number; + body: ErrorResponseBody; +} { + if (error instanceof PpgError) { + return { + status: getHttpStatus(error.code), + body: { + error: { + code: error.code, + message: error.message, + }, + }, + }; + } + + if (isFastifyValidationError(error)) { + return { + status: 400, + body: { + error: { + code: 'VALIDATION_ERROR', + message: error.message, + details: error.validation, + }, + }, + }; + } + + log?.('Unhandled error', error); + + return { + status: 500, + body: { + error: { + code: 'INTERNAL_ERROR', + message: 'An unexpected error occurred', + }, + }, + }; +} + +export function errorHandler( + error: Error, + request: FastifyRequest, + reply: FastifyReply, +): void { + const log: LogFn = (message, err) => request.log.error({ err }, message); + const { status, body } = buildErrorResponse(error, log); + reply.status(status).send(body); +} + +export function registerErrorHandler(app: FastifyInstance): void { + app.setErrorHandler(errorHandler); +} diff --git a/src/server/index.test.ts b/src/server/index.test.ts new file mode 100644 index 0000000..bc10c3c --- /dev/null +++ b/src/server/index.test.ts @@ -0,0 +1,71 @@ +import { describe, test, expect, vi, afterEach } from 'vitest'; +import os from 'node:os'; +import { detectLanAddress, timingSafeTokenMatch } from './index.js'; + +describe('detectLanAddress', () => { + afterEach(() => { + vi.restoreAllMocks(); + }); + + test('given interfaces with a non-internal IPv4 address, should return it', () => { + vi.spyOn(os, 'networkInterfaces').mockReturnValue({ + lo0: [ + { address: '127.0.0.1', family: 'IPv4', internal: true, netmask: '255.0.0.0', mac: '00:00:00:00:00:00', cidr: '127.0.0.1/8' }, + ], + en0: [ + { address: 'fe80::1', family: 'IPv6', internal: false, netmask: 'ffff:ffff:ffff:ffff::', mac: 'aa:bb:cc:dd:ee:ff', cidr: 'fe80::1/64', scopeid: 1 }, + { address: '192.168.1.42', family: 'IPv4', internal: false, netmask: '255.255.255.0', mac: 'aa:bb:cc:dd:ee:ff', cidr: '192.168.1.42/24' }, + ], + }); + expect(detectLanAddress()).toBe('192.168.1.42'); + }); + + test('given only internal interfaces, should return undefined', () => { + vi.spyOn(os, 'networkInterfaces').mockReturnValue({ + lo0: [ + { address: '127.0.0.1', family: 'IPv4', internal: true, netmask: '255.0.0.0', mac: '00:00:00:00:00:00', cidr: '127.0.0.1/8' }, + ], + }); + expect(detectLanAddress()).toBeUndefined(); + }); + + test('given empty interfaces, should return undefined', () => { + vi.spyOn(os, 'networkInterfaces').mockReturnValue({}); + expect(detectLanAddress()).toBeUndefined(); + }); +}); + +describe('timingSafeTokenMatch', () => { + const token = 'my-secret-token'; + + test('given matching bearer token, should return true', () => { + expect(timingSafeTokenMatch(`Bearer ${token}`, token)).toBe(true); + }); + + test('given wrong token, should return false', () => { + expect(timingSafeTokenMatch('Bearer wrong-token!', token)).toBe(false); + }); + + test('given missing header, should return false', () => { + expect(timingSafeTokenMatch(undefined, token)).toBe(false); + }); + + test('given empty header, should return false', () => { + expect(timingSafeTokenMatch('', token)).toBe(false); + }); + + test('given header with different length, should return false', () => { + expect(timingSafeTokenMatch('Bearer short', token)).toBe(false); + }); + + test('given header with same char length but different byte length, should return false', () => { + const unicodeHeader = `Bearer ${'é'.repeat(token.length)}`; + expect(() => timingSafeTokenMatch(unicodeHeader, token)).not.toThrow(); + expect(timingSafeTokenMatch(unicodeHeader, token)).toBe(false); + }); + + test('given raw token without Bearer prefix, should return false', () => { + const padded = token.padEnd(`Bearer ${token}`.length, 'x'); + expect(timingSafeTokenMatch(padded, token)).toBe(false); + }); +}); diff --git a/src/server/index.ts b/src/server/index.ts new file mode 100644 index 0000000..01634f1 --- /dev/null +++ b/src/server/index.ts @@ -0,0 +1,170 @@ +import crypto from 'node:crypto'; +import fs from 'node:fs/promises'; +import os from 'node:os'; +import { createRequire } from 'node:module'; +import Fastify from 'fastify'; +import cors from '@fastify/cors'; +import { agentRoutes } from './routes/agents.js'; +import { serveStatePath, servePidPath } from '../lib/paths.js'; +import { info, success } from '../lib/output.js'; + +const require = createRequire(import.meta.url); +const PACKAGE_JSON_PATHS = ['../../package.json', '../package.json'] as const; + +function getPackageVersion(): string { + for (const packageJsonPath of PACKAGE_JSON_PATHS) { + try { + const pkg = require(packageJsonPath) as { version?: unknown }; + if (typeof pkg.version === 'string') return pkg.version; + } catch { + // Fall through and try alternate path. + } + } + throw new Error('Unable to resolve package version'); +} + +const packageVersion = getPackageVersion(); + +export interface ServeOptions { + projectRoot: string; + port: number; + host: string; + token?: string; + json?: boolean; +} + +export interface ServeState { + pid: number; + port: number; + host: string; + lanAddress?: string; + startedAt: string; + version: string; +} + +export function detectLanAddress(): string | undefined { + const interfaces = os.networkInterfaces(); + for (const addrs of Object.values(interfaces)) { + if (!addrs) continue; + for (const addr of addrs) { + if (addr.family === 'IPv4' && !addr.internal) { + return addr.address; + } + } + } + return undefined; +} + +export function timingSafeTokenMatch(header: string | undefined, expected: string): boolean { + const expectedValue = `Bearer ${expected}`; + if (!header || header.length !== expectedValue.length) return false; + const headerBuffer = Buffer.from(header); + const expectedBuffer = Buffer.from(expectedValue); + if (headerBuffer.length !== expectedBuffer.length) return false; + return crypto.timingSafeEqual( + headerBuffer, + expectedBuffer, + ); +} + +async function writeStateFile(projectRoot: string, state: ServeState): Promise { + const statePath = serveStatePath(projectRoot); + await fs.writeFile(statePath, JSON.stringify(state, null, 2) + '\n', { mode: 0o600 }); +} + +async function writePidFile(projectRoot: string, pid: number): Promise { + const pidPath = servePidPath(projectRoot); + await fs.writeFile(pidPath, String(pid) + '\n', { mode: 0o600 }); +} + +async function removeStateFiles(projectRoot: string): Promise { + for (const filePath of [serveStatePath(projectRoot), servePidPath(projectRoot)]) { + try { + await fs.unlink(filePath); + } catch (err) { + if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err; + } + } +} + +export async function startServer(options: ServeOptions): Promise { + const { projectRoot, port, host, token, json } = options; + + const app = Fastify({ logger: false }); + + await app.register(cors, { origin: true }); + + if (token) { + app.addHook('onRequest', async (request, reply) => { + if (request.url === '/health') return; + if (!timingSafeTokenMatch(request.headers.authorization, token)) { + return reply.code(401).send({ error: 'Unauthorized' }); + } + }); + } + + // Decorate with projectRoot so routes can access it + app.decorate('projectRoot', projectRoot); + + app.get('/health', async () => { + return { + status: 'ok', + uptime: process.uptime(), + version: packageVersion, + }; + }); + + // GET /api/status — full manifest with live statuses + app.get('/api/status', async () => { + const { readManifest } = await import('../core/manifest.js'); + const manifest = await readManifest(projectRoot); + return manifest; + }); + + // Register route plugins + await app.register(agentRoutes, { prefix: '/api', projectRoot }); + const { worktreeRoutes } = await import('./routes/worktrees.js'); + await app.register(worktreeRoutes, { prefix: '/api' }); + const { configRoutes } = await import('./routes/config.js'); + await app.register(configRoutes, { projectRoot }); + const spawnRoute = (await import('./routes/spawn.js')).default; + await app.register(spawnRoute, { projectRoot }); + + const lanAddress = detectLanAddress(); + + const shutdown = async (signal: string) => { + if (!json) info(`Received ${signal}, shutting down...`); + await removeStateFiles(projectRoot); + await app.close(); + process.exit(0); + }; + + process.on('SIGTERM', () => { shutdown('SIGTERM').catch(() => process.exit(1)); }); + process.on('SIGINT', () => { shutdown('SIGINT').catch(() => process.exit(1)); }); + + await app.listen({ port, host }); + + const state: ServeState = { + pid: process.pid, + port, + host, + lanAddress, + startedAt: new Date().toISOString(), + version: packageVersion, + }; + + await writeStateFile(projectRoot, state); + await writePidFile(projectRoot, process.pid); + + if (json) { + console.log(JSON.stringify(state)); + } else { + success(`Server listening on http://${host}:${port}`); + if (lanAddress) { + info(`LAN address: http://${lanAddress}:${port}`); + } + if (token) { + info('Bearer token authentication enabled'); + } + } +} diff --git a/src/server/routes/agents.test.ts b/src/server/routes/agents.test.ts new file mode 100644 index 0000000..a4b3a0f --- /dev/null +++ b/src/server/routes/agents.test.ts @@ -0,0 +1,497 @@ +import { describe, test, expect, vi, beforeEach } from 'vitest'; +import Fastify from 'fastify'; +import { agentRoutes } from './agents.js'; +import type { Manifest } from '../../types/manifest.js'; +import { makeAgent, makeWorktree } from '../../test-fixtures.js'; + +// ---- Mocks ---- + +function makeManifest(overrides?: Partial): Manifest { + return { + version: 1, + projectRoot: '/tmp/project', + sessionName: 'ppg', + worktrees: { 'wt-abc123': makeWorktree({ agents: { 'ag-test1234': makeAgent() } }) }, + createdAt: '2026-01-01T00:00:00.000Z', + updatedAt: '2026-01-01T00:00:00.000Z', + ...overrides, + }; +} + +vi.mock('../../core/manifest.js', () => ({ + requireManifest: vi.fn(), + findAgent: vi.fn(), + updateManifest: vi.fn(), +})); + +vi.mock('../../core/agent.js', () => ({ + killAgent: vi.fn(), + checkAgentStatus: vi.fn(), + restartAgent: vi.fn(), +})); + +vi.mock('../../core/tmux.js', () => ({ + capturePane: vi.fn(), + sendKeys: vi.fn(), + sendLiteral: vi.fn(), + sendRawKeys: vi.fn(), +})); + +vi.mock('../../core/config.js', () => ({ + loadConfig: vi.fn(), + resolveAgentConfig: vi.fn(), +})); + +vi.mock('node:fs/promises', async () => { + const actual = await vi.importActual('node:fs/promises'); + return { + ...actual, + default: { + ...actual, + readFile: vi.fn(), + }, + }; +}); + +import { requireManifest, findAgent, updateManifest } from '../../core/manifest.js'; +import { killAgent, checkAgentStatus, restartAgent } from '../../core/agent.js'; +import * as tmux from '../../core/tmux.js'; +import { loadConfig, resolveAgentConfig } from '../../core/config.js'; +import fs from 'node:fs/promises'; + +const PROJECT_ROOT = '/tmp/project'; + +async function buildApp() { + const app = Fastify(); + await app.register(agentRoutes, { prefix: '/api', projectRoot: PROJECT_ROOT }); + return app; +} + +function setupAgentMocks(manifest?: Manifest) { + const m = manifest ?? makeManifest(); + vi.mocked(requireManifest).mockResolvedValue(m); + vi.mocked(findAgent).mockReturnValue({ + worktree: m.worktrees['wt-abc123'], + agent: m.worktrees['wt-abc123'].agents['ag-test1234'], + }); + return m; +} + +beforeEach(() => { + vi.clearAllMocks(); +}); + +// ---------- GET /api/agents/:id/logs ---------- + +describe('GET /api/agents/:id/logs', () => { + test('returns captured pane output with default 200 lines', async () => { + setupAgentMocks(); + vi.mocked(tmux.capturePane).mockResolvedValue('line1\nline2\nline3'); + + const app = await buildApp(); + const res = await app.inject({ method: 'GET', url: '/api/agents/ag-test1234/logs' }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.agentId).toBe('ag-test1234'); + expect(body.output).toBe('line1\nline2\nline3'); + expect(body.lines).toBe(200); + expect(tmux.capturePane).toHaveBeenCalledWith('ppg:1.0', 200); + }); + + test('respects custom lines parameter', async () => { + setupAgentMocks(); + vi.mocked(tmux.capturePane).mockResolvedValue('output'); + + const app = await buildApp(); + const res = await app.inject({ method: 'GET', url: '/api/agents/ag-test1234/logs?lines=50' }); + + expect(res.statusCode).toBe(200); + expect(res.json().lines).toBe(50); + expect(tmux.capturePane).toHaveBeenCalledWith('ppg:1.0', 50); + }); + + test('caps lines at 10000', async () => { + setupAgentMocks(); + vi.mocked(tmux.capturePane).mockResolvedValue('output'); + + const app = await buildApp(); + const res = await app.inject({ method: 'GET', url: '/api/agents/ag-test1234/logs?lines=999999' }); + + expect(res.statusCode).toBe(200); + expect(res.json().lines).toBe(10000); + expect(tmux.capturePane).toHaveBeenCalledWith('ppg:1.0', 10000); + }); + + test('returns 400 for invalid lines', async () => { + const app = await buildApp(); + const res = await app.inject({ method: 'GET', url: '/api/agents/ag-test1234/logs?lines=abc' }); + + expect(res.statusCode).toBe(400); + expect(res.json().code).toBe('INVALID_ARGS'); + }); + + test('returns 404 for unknown agent', async () => { + vi.mocked(requireManifest).mockResolvedValue(makeManifest()); + vi.mocked(findAgent).mockReturnValue(undefined); + + const app = await buildApp(); + const res = await app.inject({ method: 'GET', url: '/api/agents/ag-unknown/logs' }); + + expect(res.statusCode).toBe(404); + expect(res.json().code).toBe('AGENT_NOT_FOUND'); + }); + + test('returns 410 when pane no longer exists', async () => { + setupAgentMocks(); + vi.mocked(tmux.capturePane).mockRejectedValue(new Error('pane not found')); + + const app = await buildApp(); + const res = await app.inject({ method: 'GET', url: '/api/agents/ag-test1234/logs' }); + + expect(res.statusCode).toBe(410); + expect(res.json().code).toBe('PANE_NOT_FOUND'); + }); +}); + +// ---------- POST /api/agents/:id/send ---------- + +describe('POST /api/agents/:id/send', () => { + test('sends text with Enter by default', async () => { + setupAgentMocks(); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-test1234/send', + payload: { text: 'hello' }, + }); + + expect(res.statusCode).toBe(200); + expect(res.json().success).toBe(true); + expect(res.json().mode).toBe('with-enter'); + expect(tmux.sendKeys).toHaveBeenCalledWith('ppg:1.0', 'hello'); + }); + + test('sends literal text without Enter', async () => { + setupAgentMocks(); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-test1234/send', + payload: { text: 'hello', mode: 'literal' }, + }); + + expect(res.statusCode).toBe(200); + expect(tmux.sendLiteral).toHaveBeenCalledWith('ppg:1.0', 'hello'); + }); + + test('sends raw tmux keys', async () => { + setupAgentMocks(); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-test1234/send', + payload: { text: 'C-c', mode: 'raw' }, + }); + + expect(res.statusCode).toBe(200); + expect(tmux.sendRawKeys).toHaveBeenCalledWith('ppg:1.0', 'C-c'); + }); + + test('rejects invalid mode', async () => { + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-test1234/send', + payload: { text: 'hello', mode: 'invalid' }, + }); + + expect(res.statusCode).toBe(400); + }); + + test('rejects missing text field', async () => { + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-test1234/send', + payload: {}, + }); + + expect(res.statusCode).toBe(400); + }); + + test('returns 404 for unknown agent', async () => { + vi.mocked(requireManifest).mockResolvedValue(makeManifest()); + vi.mocked(findAgent).mockReturnValue(undefined); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-unknown/send', + payload: { text: 'hello' }, + }); + + expect(res.statusCode).toBe(404); + }); +}); + +// ---------- POST /api/agents/:id/kill ---------- + +describe('POST /api/agents/:id/kill', () => { + test('kills a running agent', async () => { + const manifest = makeManifest(); + vi.mocked(requireManifest).mockResolvedValue(manifest); + vi.mocked(findAgent) + .mockReturnValueOnce({ + worktree: manifest.worktrees['wt-abc123'], + agent: manifest.worktrees['wt-abc123'].agents['ag-test1234'], + }) + .mockReturnValueOnce({ + worktree: manifest.worktrees['wt-abc123'], + agent: manifest.worktrees['wt-abc123'].agents['ag-test1234'], + }); + vi.mocked(checkAgentStatus).mockResolvedValue({ status: 'running' }); + vi.mocked(killAgent).mockResolvedValue(undefined); + vi.mocked(updateManifest).mockImplementation(async (_root, updater) => { + const m = makeManifest(); + return updater(m); + }); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-test1234/kill', + }); + + expect(res.statusCode).toBe(200); + expect(res.json().success).toBe(true); + expect(res.json().killed).toBe(true); + expect(checkAgentStatus).toHaveBeenCalled(); + expect(killAgent).toHaveBeenCalled(); + expect(updateManifest).toHaveBeenCalled(); + }); + + test('returns success without killing already-stopped agent', async () => { + const stoppedAgent = makeAgent({ status: 'gone' }); + const manifest = makeManifest({ + worktrees: { 'wt-abc123': makeWorktree({ agents: { 'ag-test1234': stoppedAgent } }) }, + }); + vi.mocked(requireManifest).mockResolvedValue(manifest); + vi.mocked(findAgent).mockReturnValue({ + worktree: manifest.worktrees['wt-abc123'], + agent: stoppedAgent, + }); + vi.mocked(checkAgentStatus).mockResolvedValue({ status: 'gone' }); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-test1234/kill', + }); + + expect(res.statusCode).toBe(200); + expect(res.json().message).toMatch(/already gone/); + expect(killAgent).not.toHaveBeenCalled(); + }); + + test('uses live tmux status instead of stale manifest status', async () => { + // Agent shows "running" in manifest but tmux says "idle" + const agent = makeAgent({ status: 'running' }); + const manifest = makeManifest({ + worktrees: { 'wt-abc123': makeWorktree({ agents: { 'ag-test1234': agent } }) }, + }); + vi.mocked(requireManifest).mockResolvedValue(manifest); + vi.mocked(findAgent).mockReturnValue({ + worktree: manifest.worktrees['wt-abc123'], + agent, + }); + vi.mocked(checkAgentStatus).mockResolvedValue({ status: 'idle' }); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-test1234/kill', + }); + + expect(res.statusCode).toBe(200); + expect(res.json().message).toMatch(/already idle/); + expect(killAgent).not.toHaveBeenCalled(); + }); + + test('returns 404 for unknown agent', async () => { + vi.mocked(requireManifest).mockResolvedValue(makeManifest()); + vi.mocked(findAgent).mockReturnValue(undefined); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-unknown/kill', + }); + + expect(res.statusCode).toBe(404); + }); +}); + +// ---------- POST /api/agents/:id/restart ---------- + +describe('POST /api/agents/:id/restart', () => { + function setupRestartMocks() { + const manifest = makeManifest(); + vi.mocked(requireManifest).mockResolvedValue(manifest); + vi.mocked(findAgent).mockReturnValue({ + worktree: manifest.worktrees['wt-abc123'], + agent: manifest.worktrees['wt-abc123'].agents['ag-test1234'], + }); + vi.mocked(loadConfig).mockResolvedValue({ + sessionName: 'ppg', + defaultAgent: 'claude', + agents: { claude: { name: 'claude', command: 'claude', interactive: true } }, + envFiles: [], + symlinkNodeModules: true, + }); + vi.mocked(resolveAgentConfig).mockReturnValue({ + name: 'claude', + command: 'claude', + interactive: true, + }); + vi.mocked(restartAgent).mockResolvedValue({ + oldAgentId: 'ag-test1234', + newAgentId: 'ag-new12345', + tmuxTarget: 'ppg:2', + sessionId: 'session-uuid-123', + worktreeId: 'wt-abc123', + worktreeName: 'feature-auth', + branch: 'ppg/feature-auth', + path: '/tmp/project/.worktrees/wt-abc123', + }); + return manifest; + } + + test('restarts a running agent with original prompt', async () => { + setupRestartMocks(); + vi.mocked(fs.readFile).mockResolvedValue('original prompt'); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-test1234/restart', + payload: {}, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.oldAgentId).toBe('ag-test1234'); + expect(body.newAgent.id).toBe('ag-new12345'); + expect(restartAgent).toHaveBeenCalled(); + }); + + test('uses prompt override when provided', async () => { + setupRestartMocks(); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-test1234/restart', + payload: { prompt: 'new task' }, + }); + + expect(res.statusCode).toBe(200); + expect(fs.readFile).not.toHaveBeenCalled(); + expect(restartAgent).toHaveBeenCalledWith( + expect.objectContaining({ promptText: 'new task' }), + ); + }); + + test('skips kill for non-running agent', async () => { + const idleAgent = makeAgent({ status: 'idle' }); + const manifest = makeManifest({ + worktrees: { 'wt-abc123': makeWorktree({ agents: { 'ag-test1234': idleAgent } }) }, + }); + vi.mocked(requireManifest).mockResolvedValue(manifest); + vi.mocked(findAgent).mockReturnValue({ + worktree: manifest.worktrees['wt-abc123'], + agent: idleAgent, + }); + vi.mocked(loadConfig).mockResolvedValue({ + sessionName: 'ppg', + defaultAgent: 'claude', + agents: { claude: { name: 'claude', command: 'claude', interactive: true } }, + envFiles: [], + symlinkNodeModules: true, + }); + vi.mocked(resolveAgentConfig).mockReturnValue({ + name: 'claude', + command: 'claude', + interactive: true, + }); + vi.mocked(fs.readFile).mockResolvedValue('original prompt'); + vi.mocked(restartAgent).mockResolvedValue({ + oldAgentId: 'ag-test1234', + newAgentId: 'ag-new12345', + tmuxTarget: 'ppg:2', + sessionId: 'session-uuid-123', + worktreeId: 'wt-abc123', + worktreeName: 'feature-auth', + branch: 'ppg/feature-auth', + path: '/tmp/project/.worktrees/wt-abc123', + }); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-test1234/restart', + payload: {}, + }); + + expect(res.statusCode).toBe(200); + // restartAgent handles the kill-or-skip internally + expect(restartAgent).toHaveBeenCalledWith( + expect.objectContaining({ oldAgent: expect.objectContaining({ status: 'idle' }) }), + ); + }); + + test('returns 400 when prompt file missing and no override', async () => { + const manifest = makeManifest(); + vi.mocked(requireManifest).mockResolvedValue(manifest); + vi.mocked(findAgent).mockReturnValue({ + worktree: manifest.worktrees['wt-abc123'], + agent: manifest.worktrees['wt-abc123'].agents['ag-test1234'], + }); + vi.mocked(loadConfig).mockResolvedValue({ + sessionName: 'ppg', + defaultAgent: 'claude', + agents: { claude: { name: 'claude', command: 'claude', interactive: true } }, + envFiles: [], + symlinkNodeModules: true, + }); + vi.mocked(fs.readFile).mockRejectedValue(new Error('ENOENT')); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-test1234/restart', + payload: {}, + }); + + expect(res.statusCode).toBe(400); + expect(res.json().code).toBe('PROMPT_NOT_FOUND'); + }); + + test('returns 404 for unknown agent', async () => { + vi.mocked(requireManifest).mockResolvedValue(makeManifest()); + vi.mocked(findAgent).mockReturnValue(undefined); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/agents/ag-unknown/restart', + payload: {}, + }); + + expect(res.statusCode).toBe(404); + }); +}); diff --git a/src/server/routes/agents.ts b/src/server/routes/agents.ts new file mode 100644 index 0000000..8ef30de --- /dev/null +++ b/src/server/routes/agents.ts @@ -0,0 +1,289 @@ +import type { FastifyInstance, FastifyPluginOptions } from 'fastify'; +import { requireManifest, findAgent, updateManifest } from '../../core/manifest.js'; +import { killAgent, checkAgentStatus, restartAgent } from '../../core/agent.js'; +import { loadConfig, resolveAgentConfig } from '../../core/config.js'; +import * as tmux from '../../core/tmux.js'; +import { PpgError, AgentNotFoundError } from '../../lib/errors.js'; +import { agentPromptFile } from '../../lib/paths.js'; +import fs from 'node:fs/promises'; + +export interface AgentRoutesOptions extends FastifyPluginOptions { + projectRoot: string; +} + +const MAX_LINES = 10_000; + +function mapErrorToStatus(err: unknown): number { + if (err instanceof PpgError) { + switch (err.code) { + case 'AGENT_NOT_FOUND': return 404; + case 'PANE_NOT_FOUND': return 410; + case 'NOT_INITIALIZED': return 503; + case 'MANIFEST_LOCK': return 409; + case 'TMUX_NOT_FOUND': return 503; + case 'INVALID_ARGS': return 400; + case 'PROMPT_NOT_FOUND': return 400; + default: return 500; + } + } + return 500; +} + +function errorPayload(err: unknown): { error: string; code?: string } { + if (err instanceof PpgError) { + return { error: err.message, code: err.code }; + } + return { error: err instanceof Error ? err.message : String(err) }; +} + +export async function agentRoutes( + app: FastifyInstance, + opts: AgentRoutesOptions, +): Promise { + const { projectRoot } = opts; + + // ---------- GET /api/agents/:id/logs ---------- + app.get<{ + Params: { id: string }; + Querystring: { lines?: string }; + }>('/agents/:id/logs', { + schema: { + params: { + type: 'object', + required: ['id'], + properties: { id: { type: 'string' } }, + }, + querystring: { + type: 'object', + properties: { lines: { type: 'string' } }, + }, + }, + }, async (request, reply) => { + try { + const { id } = request.params; + const lines = request.query.lines + ? Math.min(parseInt(request.query.lines, 10), MAX_LINES) + : 200; + + if (isNaN(lines) || lines < 1) { + return reply.code(400).send({ error: 'lines must be a positive integer', code: 'INVALID_ARGS' }); + } + + const manifest = await requireManifest(projectRoot); + const found = findAgent(manifest, id); + if (!found) throw new AgentNotFoundError(id); + + const { agent } = found; + + let content: string; + try { + content = await tmux.capturePane(agent.tmuxTarget, lines); + } catch { + throw new PpgError( + `Could not capture pane for agent ${id}. Pane may no longer exist.`, + 'PANE_NOT_FOUND', + ); + } + + return { + agentId: agent.id, + status: agent.status, + tmuxTarget: agent.tmuxTarget, + lines, + output: content, + }; + } catch (err) { + const status = mapErrorToStatus(err); + return reply.code(status).send(errorPayload(err)); + } + }); + + // ---------- POST /api/agents/:id/send ---------- + app.post<{ + Params: { id: string }; + Body: { text: string; mode?: 'raw' | 'literal' | 'with-enter' }; + }>('/agents/:id/send', { + schema: { + params: { + type: 'object', + required: ['id'], + properties: { id: { type: 'string' } }, + }, + body: { + type: 'object', + required: ['text'], + properties: { + text: { type: 'string' }, + mode: { type: 'string', enum: ['raw', 'literal', 'with-enter'] }, + }, + }, + }, + }, async (request, reply) => { + try { + const { id } = request.params; + const { text, mode = 'with-enter' } = request.body; + + const manifest = await requireManifest(projectRoot); + const found = findAgent(manifest, id); + if (!found) throw new AgentNotFoundError(id); + + const { agent } = found; + + switch (mode) { + case 'raw': + await tmux.sendRawKeys(agent.tmuxTarget, text); + break; + case 'literal': + await tmux.sendLiteral(agent.tmuxTarget, text); + break; + case 'with-enter': + default: + await tmux.sendKeys(agent.tmuxTarget, text); + break; + } + + return { + success: true, + agentId: agent.id, + tmuxTarget: agent.tmuxTarget, + text, + mode, + }; + } catch (err) { + const status = mapErrorToStatus(err); + return reply.code(status).send(errorPayload(err)); + } + }); + + // ---------- POST /api/agents/:id/kill ---------- + app.post<{ + Params: { id: string }; + }>('/agents/:id/kill', { + schema: { + params: { + type: 'object', + required: ['id'], + properties: { id: { type: 'string' } }, + }, + }, + }, async (request, reply) => { + try { + const { id } = request.params; + + const manifest = await requireManifest(projectRoot); + const found = findAgent(manifest, id); + if (!found) throw new AgentNotFoundError(id); + + const { agent } = found; + + // Refresh live status from tmux (manifest may be stale in long-lived server) + const { status: liveStatus } = await checkAgentStatus(agent, projectRoot); + + if (liveStatus !== 'running') { + return { + success: true, + agentId: agent.id, + message: `Agent already ${liveStatus}`, + }; + } + + await killAgent(agent); + + await updateManifest(projectRoot, (m) => { + const f = findAgent(m, id); + if (f) { + f.agent.status = 'gone'; + } + return m; + }); + + return { + success: true, + agentId: agent.id, + killed: true, + }; + } catch (err) { + const status = mapErrorToStatus(err); + return reply.code(status).send(errorPayload(err)); + } + }); + + // ---------- POST /api/agents/:id/restart ---------- + app.post<{ + Params: { id: string }; + Body: { prompt?: string; agent?: string }; + }>('/agents/:id/restart', { + schema: { + params: { + type: 'object', + required: ['id'], + properties: { id: { type: 'string' } }, + }, + body: { + type: 'object', + properties: { + prompt: { type: 'string' }, + agent: { type: 'string' }, + }, + }, + }, + }, async (request, reply) => { + try { + const { id } = request.params; + const { prompt: promptOverride, agent: agentType } = request.body ?? {}; + + const manifest = await requireManifest(projectRoot); + const config = await loadConfig(projectRoot); + + const found = findAgent(manifest, id); + if (!found) throw new AgentNotFoundError(id); + + const { worktree: wt, agent: oldAgent } = found; + + // Read original prompt or use override + let promptText: string; + if (promptOverride) { + promptText = promptOverride; + } else { + const pFile = agentPromptFile(projectRoot, oldAgent.id); + try { + promptText = await fs.readFile(pFile, 'utf-8'); + } catch { + throw new PpgError( + `Could not read original prompt for agent ${oldAgent.id}. Provide a prompt in the request body.`, + 'PROMPT_NOT_FOUND', + ); + } + } + + const agentConfig = resolveAgentConfig(config, agentType ?? oldAgent.agentType); + + const result = await restartAgent({ + projectRoot, + agentId: oldAgent.id, + worktree: wt, + oldAgent, + sessionName: manifest.sessionName, + agentConfig, + promptText, + }); + + return { + success: true, + oldAgentId: result.oldAgentId, + newAgent: { + id: result.newAgentId, + tmuxTarget: result.tmuxTarget, + sessionId: result.sessionId, + worktreeId: result.worktreeId, + worktreeName: result.worktreeName, + branch: result.branch, + path: result.path, + }, + }; + } catch (err) { + const status = mapErrorToStatus(err); + return reply.code(status).send(errorPayload(err)); + } + }); +} diff --git a/src/server/routes/config.test.ts b/src/server/routes/config.test.ts new file mode 100644 index 0000000..9e1a551 --- /dev/null +++ b/src/server/routes/config.test.ts @@ -0,0 +1,252 @@ +import fs from 'node:fs/promises'; +import os from 'node:os'; +import path from 'node:path'; +import Fastify, { type FastifyInstance } from 'fastify'; +import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest'; +import { configRoutes } from './config.js'; + +let tmpDir: string; +let globalDir: string; +let app: FastifyInstance; + +vi.mock('../../lib/paths.js', async () => { + const actual = await vi.importActual('../../lib/paths.js'); + return { + ...actual, + globalTemplatesDir: () => path.join(globalDir, 'templates'), + globalPromptsDir: () => path.join(globalDir, 'prompts'), + }; +}); + +beforeEach(async () => { + tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'ppg-config-routes-')); + globalDir = path.join(tmpDir, 'global'); + await fs.mkdir(path.join(globalDir, 'templates'), { recursive: true }); + await fs.mkdir(path.join(globalDir, 'prompts'), { recursive: true }); +}); + +afterEach(async () => { + await app?.close(); + await fs.rm(tmpDir, { recursive: true, force: true }); +}); + +function buildApp(projectRoot: string) { + app = Fastify({ logger: false }); + app.register(configRoutes, { projectRoot }); + return app; +} + +// --- GET /api/config --- + +describe('GET /api/config', () => { + test('given no config.yaml, should return default config', async () => { + const server = buildApp(tmpDir); + const res = await server.inject({ method: 'GET', url: '/api/config' }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.sessionName).toBe('ppg'); + expect(body.defaultAgent).toBe('claude'); + expect(body.agents).toBeInstanceOf(Array); + expect(body.agents.length).toBeGreaterThanOrEqual(3); + expect(body.agents.find((a: { name: string }) => a.name === 'claude')).toBeTruthy(); + expect(body.envFiles).toEqual(['.env', '.env.local']); + expect(body.symlinkNodeModules).toBe(true); + }); + + test('given user config.yaml, should merge with defaults', async () => { + const ppgDir = path.join(tmpDir, '.ppg'); + await fs.mkdir(ppgDir, { recursive: true }); + await fs.writeFile( + path.join(ppgDir, 'config.yaml'), + 'sessionName: custom\ndefaultAgent: codex\nagents:\n myagent:\n name: myagent\n command: myagent --fast\n interactive: false\n', + ); + + const server = buildApp(tmpDir); + const res = await server.inject({ method: 'GET', url: '/api/config' }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.sessionName).toBe('custom'); + expect(body.defaultAgent).toBe('codex'); + expect(body.agents.find((a: { name: string }) => a.name === 'claude')).toBeTruthy(); + const myagent = body.agents.find((a: { name: string }) => a.name === 'myagent'); + expect(myagent).toBeTruthy(); + expect(myagent.command).toBe('myagent --fast'); + expect(myagent.interactive).toBe(false); + }); + + test('given invalid YAML, should return 500 error', async () => { + const ppgDir = path.join(tmpDir, '.ppg'); + await fs.mkdir(ppgDir, { recursive: true }); + await fs.writeFile(path.join(ppgDir, 'config.yaml'), ':\n bad: [yaml\n'); + + const server = buildApp(tmpDir); + const res = await server.inject({ method: 'GET', url: '/api/config' }); + + expect(res.statusCode).toBe(500); + }); +}); + +// --- GET /api/templates --- + +describe('GET /api/templates', () => { + test('given no template dirs, should return empty array', async () => { + const server = buildApp(tmpDir); + const res = await server.inject({ method: 'GET', url: '/api/templates' }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.templates).toEqual([]); + }); + + test('given local template, should return source and metadata', async () => { + const tplDir = path.join(tmpDir, '.ppg', 'templates'); + await fs.mkdir(tplDir, { recursive: true }); + await fs.writeFile( + path.join(tplDir, 'task.md'), + '# Task Template\n\nDo {{TASK}} in {{WORKTREE_PATH}}\n', + ); + + const server = buildApp(tmpDir); + const res = await server.inject({ method: 'GET', url: '/api/templates' }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.templates).toHaveLength(1); + expect(body.templates[0]).toEqual({ + name: 'task', + description: 'Task Template', + variables: ['TASK', 'WORKTREE_PATH'], + source: 'local', + }); + }); + + test('given global template, should return with global source', async () => { + await fs.writeFile( + path.join(globalDir, 'templates', 'shared.md'), + '# Global Template\n\n{{VAR}}\n', + ); + + const server = buildApp(tmpDir); + const res = await server.inject({ method: 'GET', url: '/api/templates' }); + + const body = res.json(); + expect(body.templates).toHaveLength(1); + expect(body.templates[0].name).toBe('shared'); + expect(body.templates[0].source).toBe('global'); + }); + + test('given same name in local and global, should prefer local', async () => { + const tplDir = path.join(tmpDir, '.ppg', 'templates'); + await fs.mkdir(tplDir, { recursive: true }); + await fs.writeFile(path.join(tplDir, 'shared.md'), '# Local Version\n'); + await fs.writeFile(path.join(globalDir, 'templates', 'shared.md'), '# Global Version\n'); + + const server = buildApp(tmpDir); + const res = await server.inject({ method: 'GET', url: '/api/templates' }); + + const body = res.json(); + const shared = body.templates.filter((t: { name: string }) => t.name === 'shared'); + expect(shared).toHaveLength(1); + expect(shared[0].source).toBe('local'); + expect(shared[0].description).toBe('Local Version'); + }); + + test('given duplicate variables, should deduplicate', async () => { + const tplDir = path.join(tmpDir, '.ppg', 'templates'); + await fs.mkdir(tplDir, { recursive: true }); + await fs.writeFile( + path.join(tplDir, 'dupe.md'), + '{{NAME}} and {{NAME}} and {{OTHER}}\n', + ); + + const server = buildApp(tmpDir); + const res = await server.inject({ method: 'GET', url: '/api/templates' }); + + const body = res.json(); + expect(body.templates[0].variables).toEqual(['NAME', 'OTHER']); + }); +}); + +// --- GET /api/prompts --- + +describe('GET /api/prompts', () => { + test('given no prompt dirs, should return empty array', async () => { + const server = buildApp(tmpDir); + const res = await server.inject({ method: 'GET', url: '/api/prompts' }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.prompts).toEqual([]); + }); + + test('given local prompt, should return source and metadata', async () => { + const pDir = path.join(tmpDir, '.ppg', 'prompts'); + await fs.mkdir(pDir, { recursive: true }); + await fs.writeFile( + path.join(pDir, 'review.md'), + '# Code Review\n\nReview {{BRANCH}} for issues\n', + ); + + const server = buildApp(tmpDir); + const res = await server.inject({ method: 'GET', url: '/api/prompts' }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.prompts).toHaveLength(1); + expect(body.prompts[0]).toEqual({ + name: 'review', + description: 'Code Review', + variables: ['BRANCH'], + source: 'local', + }); + }); + + test('given same name in local and global, should prefer local', async () => { + const localDir = path.join(tmpDir, '.ppg', 'prompts'); + await fs.mkdir(localDir, { recursive: true }); + await fs.writeFile(path.join(localDir, 'shared.md'), '# Local Shared\n'); + await fs.writeFile(path.join(globalDir, 'prompts', 'shared.md'), '# Global Shared\n'); + + const server = buildApp(tmpDir); + const res = await server.inject({ method: 'GET', url: '/api/prompts' }); + + const body = res.json(); + const shared = body.prompts.filter((p: { name: string }) => p.name === 'shared'); + expect(shared).toHaveLength(1); + expect(shared[0].source).toBe('local'); + expect(shared[0].description).toBe('Local Shared'); + }); + + test('given global-only prompt, should return with global source', async () => { + await fs.writeFile( + path.join(globalDir, 'prompts', 'global-only.md'), + '# Global Prompt\n\n{{WHO}}\n', + ); + + const server = buildApp(tmpDir); + const res = await server.inject({ method: 'GET', url: '/api/prompts' }); + + const body = res.json(); + expect(body.prompts).toHaveLength(1); + expect(body.prompts[0].name).toBe('global-only'); + expect(body.prompts[0].source).toBe('global'); + expect(body.prompts[0].variables).toEqual(['WHO']); + }); + + test('given non-.md files, should ignore them', async () => { + const pDir = path.join(tmpDir, '.ppg', 'prompts'); + await fs.mkdir(pDir, { recursive: true }); + await fs.writeFile(path.join(pDir, 'valid.md'), '# Valid Prompt\n'); + await fs.writeFile(path.join(pDir, 'readme.txt'), 'not a prompt'); + await fs.writeFile(path.join(pDir, '.hidden'), 'hidden file'); + + const server = buildApp(tmpDir); + const res = await server.inject({ method: 'GET', url: '/api/prompts' }); + + const body = res.json(); + expect(body.prompts).toHaveLength(1); + expect(body.prompts[0].name).toBe('valid'); + }); +}); diff --git a/src/server/routes/config.ts b/src/server/routes/config.ts new file mode 100644 index 0000000..81d490d --- /dev/null +++ b/src/server/routes/config.ts @@ -0,0 +1,68 @@ +import type { FastifyInstance } from 'fastify'; +import { loadConfig } from '../../core/config.js'; +import { listTemplatesWithSource } from '../../core/template.js'; +import { listPromptsWithSource, enrichEntryMetadata } from '../../core/prompt.js'; +import { + templatesDir, + globalTemplatesDir, + promptsDir, + globalPromptsDir, +} from '../../lib/paths.js'; + +export interface ConfigRouteOptions { + projectRoot: string; +} + +// Auth note: these routes expect the parent server to register an onRequest +// auth hook before this plugin (e.g. Bearer token via createAuthHook). + +export async function configRoutes( + app: FastifyInstance, + opts: ConfigRouteOptions, +): Promise { + const { projectRoot } = opts; + + // GET /api/config — agent configuration from config.yaml + app.get('/api/config', async () => { + const config = await loadConfig(projectRoot); + return { + sessionName: config.sessionName, + defaultAgent: config.defaultAgent, + agents: Object.values(config.agents), + envFiles: config.envFiles, + symlinkNodeModules: config.symlinkNodeModules, + }; + }); + + // GET /api/templates — templates with source tracking + app.get('/api/templates', async () => { + const entries = await listTemplatesWithSource(projectRoot); + const templates = await Promise.all( + entries.map(({ name, source }) => + enrichEntryMetadata( + name, + source, + templatesDir(projectRoot), + globalTemplatesDir(), + ), + ), + ); + return { templates }; + }); + + // GET /api/prompts — prompts with deduplication across local/global + app.get('/api/prompts', async () => { + const entries = await listPromptsWithSource(projectRoot); + const prompts = await Promise.all( + entries.map(({ name, source }) => + enrichEntryMetadata( + name, + source, + promptsDir(projectRoot), + globalPromptsDir(), + ), + ), + ); + return { prompts }; + }); +} diff --git a/src/server/routes/spawn.test.ts b/src/server/routes/spawn.test.ts new file mode 100644 index 0000000..c84fc82 --- /dev/null +++ b/src/server/routes/spawn.test.ts @@ -0,0 +1,353 @@ +import { describe, test, expect, vi, beforeEach } from 'vitest'; +import Fastify from 'fastify'; +import type { FastifyInstance } from 'fastify'; +import spawnRoute from './spawn.js'; +import type { SpawnRequestBody, SpawnResponseBody } from './spawn.js'; + +// ─── Mocks ──────────────────────────────────────────────────────────────────── + +vi.mock('../../core/spawn.js', () => ({ + spawnNewWorktree: vi.fn().mockResolvedValue({ + worktreeId: 'wt-abc123', + name: 'my-task', + branch: 'ppg/my-task', + path: '/fake/project/.worktrees/wt-abc123', + tmuxWindow: 'ppg-test:my-task', + agents: [ + { + id: 'ag-agent001', + name: 'claude', + agentType: 'claude', + status: 'running', + tmuxTarget: 'ppg-test:my-task', + prompt: 'Fix the bug', + startedAt: '2025-01-01T00:00:00.000Z', + sessionId: 'sess-uuid-001', + }, + ], + }), + resolvePromptText: vi.fn().mockResolvedValue('Fix the bug'), +})); + +// ─── Helpers ────────────────────────────────────────────────────────────────── + +const PROJECT_ROOT = '/fake/project'; + +async function buildApp(): Promise { + const app = Fastify(); + await app.register(spawnRoute, { projectRoot: PROJECT_ROOT }); + return app; +} + +function postSpawn(app: FastifyInstance, body: Partial) { + return app.inject({ + method: 'POST', + url: '/api/spawn', + payload: body, + }); +} + +// ─── Tests ──────────────────────────────────────────────────────────────────── + +describe('POST /api/spawn', () => { + let app: FastifyInstance; + + beforeEach(async () => { + vi.clearAllMocks(); + app = await buildApp(); + }); + + // ─── Happy Path ───────────────────────────────────────────────────────────── + + test('given valid name and prompt, should spawn worktree with 1 agent', async () => { + const res = await postSpawn(app, { + name: 'my-task', + prompt: 'Fix the bug', + }); + + expect(res.statusCode).toBe(201); + const body = res.json(); + expect(body.worktreeId).toBe('wt-abc123'); + expect(body.name).toBe('my-task'); + expect(body.branch).toBe('ppg/my-task'); + expect(body.agents).toHaveLength(1); + expect(body.agents[0].id).toBe('ag-agent001'); + expect(body.agents[0].tmuxTarget).toBe('ppg-test:my-task'); + expect(body.agents[0].sessionId).toBe('sess-uuid-001'); + }); + + test('given all options, should pass them to spawnNewWorktree', async () => { + const { spawnNewWorktree } = await import('../../core/spawn.js'); + + await postSpawn(app, { + name: 'my-task', + prompt: 'Fix the bug', + agent: 'codex', + base: 'develop', + count: 3, + vars: { ISSUE: '42' }, + }); + + expect(vi.mocked(spawnNewWorktree)).toHaveBeenCalledWith({ + projectRoot: PROJECT_ROOT, + name: 'my-task', + promptText: 'Fix the bug', + userVars: { ISSUE: '42' }, + agentName: 'codex', + baseBranch: 'develop', + count: 3, + }); + }); + + test('given template name, should resolve prompt via resolvePromptText', async () => { + const { resolvePromptText } = await import('../../core/spawn.js'); + + await postSpawn(app, { + name: 'my-task', + template: 'review', + }); + + expect(vi.mocked(resolvePromptText)).toHaveBeenCalledWith( + { prompt: undefined, template: 'review' }, + PROJECT_ROOT, + ); + }); + + test('given prompt and template both provided, should use prompt (prompt wins)', async () => { + const { resolvePromptText } = await import('../../core/spawn.js'); + + await postSpawn(app, { + name: 'my-task', + prompt: 'Inline prompt', + template: 'review', + }); + + // resolvePromptText receives both — its implementation short-circuits on prompt + expect(vi.mocked(resolvePromptText)).toHaveBeenCalledWith( + expect.objectContaining({ prompt: 'Inline prompt', template: 'review' }), + PROJECT_ROOT, + ); + }); + + test('given no vars, should pass undefined userVars', async () => { + const { spawnNewWorktree } = await import('../../core/spawn.js'); + + await postSpawn(app, { + name: 'my-task', + prompt: 'Fix it', + }); + + expect(vi.mocked(spawnNewWorktree)).toHaveBeenCalledWith( + expect.objectContaining({ userVars: undefined }), + ); + }); + + // ─── Validation ───────────────────────────────────────────────────────────── + + test('given missing name, should return 400', async () => { + const res = await postSpawn(app, { + prompt: 'Fix the bug', + }); + + expect(res.statusCode).toBe(400); + const body = res.json<{ message: string }>(); + expect(body.message).toMatch(/name/i); + }); + + test('given empty name, should return 400', async () => { + const res = await postSpawn(app, { + name: '', + prompt: 'Fix the bug', + }); + + expect(res.statusCode).toBe(400); + }); + + test('given count below 1, should return 400', async () => { + const res = await postSpawn(app, { + name: 'my-task', + prompt: 'Fix the bug', + count: 0, + }); + + expect(res.statusCode).toBe(400); + }); + + test('given count above 20, should return 400', async () => { + const res = await postSpawn(app, { + name: 'my-task', + prompt: 'Fix the bug', + count: 21, + }); + + expect(res.statusCode).toBe(400); + }); + + test('given non-integer count, should return 400', async () => { + const res = await postSpawn(app, { + name: 'my-task', + prompt: 'Fix the bug', + count: 1.5, + }); + + expect(res.statusCode).toBe(400); + }); + + // ─── Input Sanitization ───────────────────────────────────────────────────── + + test('given vars with shell metacharacters in value, should return 400 INVALID_ARGS', async () => { + const res = await postSpawn(app, { + name: 'my-task', + prompt: 'Fix the bug', + vars: { ISSUE: '$(whoami)' }, + }); + + expect(res.statusCode).toBe(400); + const body = res.json<{ message: string; code: string }>(); + expect(body.message).toMatch(/shell metacharacters/i); + expect(body.code).toBe('INVALID_ARGS'); + }); + + test('given vars with shell metacharacters in key, should return 400 INVALID_ARGS', async () => { + const res = await postSpawn(app, { + name: 'my-task', + prompt: 'Fix the bug', + vars: { 'KEY;rm': 'value' }, + }); + + expect(res.statusCode).toBe(400); + const body = res.json<{ message: string; code: string }>(); + expect(body.message).toMatch(/shell metacharacters/i); + expect(body.code).toBe('INVALID_ARGS'); + }); + + test('given vars with backtick in value, should reject', async () => { + const res = await postSpawn(app, { + name: 'my-task', + prompt: 'Fix the bug', + vars: { CMD: '`whoami`' }, + }); + + expect(res.statusCode).toBe(400); + const body = res.json<{ message: string; code: string }>(); + expect(body.message).toMatch(/shell metacharacters/i); + expect(body.code).toBe('INVALID_ARGS'); + }); + + test('given safe vars, should pass through', async () => { + const { spawnNewWorktree } = await import('../../core/spawn.js'); + + const res = await postSpawn(app, { + name: 'my-task', + prompt: 'Fix the bug', + vars: { ISSUE: '42', REPO: 'ppg-cli', TAG: 'v1.0.0' }, + }); + + expect(res.statusCode).toBe(201); + expect(vi.mocked(spawnNewWorktree)).toHaveBeenCalledWith( + expect.objectContaining({ + userVars: { ISSUE: '42', REPO: 'ppg-cli', TAG: 'v1.0.0' }, + }), + ); + }); + + // ─── Error Paths ──────────────────────────────────────────────────────────── + + test('given neither prompt nor template, should return 400 with INVALID_ARGS', async () => { + const { resolvePromptText } = await import('../../core/spawn.js'); + const { PpgError } = await import('../../lib/errors.js'); + vi.mocked(resolvePromptText).mockRejectedValueOnce( + new PpgError('Either "prompt" or "template" is required', 'INVALID_ARGS'), + ); + + const res = await postSpawn(app, { + name: 'my-task', + }); + + expect(res.statusCode).toBe(400); + const body = res.json<{ message: string; code: string }>(); + expect(body.message).toMatch(/prompt.*template/i); + expect(body.code).toBe('INVALID_ARGS'); + }); + + test('given unknown agent type, should propagate error', async () => { + const { spawnNewWorktree } = await import('../../core/spawn.js'); + vi.mocked(spawnNewWorktree).mockRejectedValueOnce( + new Error('Unknown agent type: gpt. Available: claude, codex'), + ); + + const res = await postSpawn(app, { + name: 'my-task', + prompt: 'Fix it', + agent: 'gpt', + }); + + expect(res.statusCode).toBe(500); + const body = res.json<{ message: string }>(); + expect(body.message).toMatch(/Unknown agent type/); + }); + + test('given template not found, should propagate error', async () => { + const { resolvePromptText } = await import('../../core/spawn.js'); + vi.mocked(resolvePromptText).mockRejectedValueOnce( + new Error("ENOENT: no such file or directory, open '.ppg/templates/nonexistent.md'"), + ); + + const res = await postSpawn(app, { + name: 'my-task', + template: 'nonexistent', + }); + + expect(res.statusCode).toBe(500); + }); + + test('given not initialized error, should return 409', async () => { + const { spawnNewWorktree } = await import('../../core/spawn.js'); + const { PpgError } = await import('../../lib/errors.js'); + vi.mocked(spawnNewWorktree).mockRejectedValueOnce( + new PpgError('Point Guard not initialized in /fake/project', 'NOT_INITIALIZED'), + ); + + const res = await postSpawn(app, { + name: 'my-task', + prompt: 'Fix it', + }); + + expect(res.statusCode).toBe(409); + const body = res.json<{ message: string; code: string }>(); + expect(body.message).toMatch(/not initialized/i); + expect(body.code).toBe('NOT_INITIALIZED'); + }); + + test('given tmux not available, should propagate TmuxNotFoundError', async () => { + const { spawnNewWorktree } = await import('../../core/spawn.js'); + const { PpgError } = await import('../../lib/errors.js'); + vi.mocked(spawnNewWorktree).mockRejectedValueOnce( + new PpgError('tmux is not installed or not in PATH', 'TMUX_NOT_FOUND'), + ); + + const res = await postSpawn(app, { + name: 'my-task', + prompt: 'Fix it', + }); + + expect(res.statusCode).toBe(500); + const body = res.json<{ message: string }>(); + expect(body.message).toMatch(/tmux/i); + }); + + // ─── projectRoot Injection ────────────────────────────────────────────────── + + test('should use injected projectRoot, not process.cwd()', async () => { + const { spawnNewWorktree } = await import('../../core/spawn.js'); + + await postSpawn(app, { + name: 'my-task', + prompt: 'Fix it', + }); + + expect(vi.mocked(spawnNewWorktree)).toHaveBeenCalledWith( + expect.objectContaining({ projectRoot: '/fake/project' }), + ); + }); +}); diff --git a/src/server/routes/spawn.ts b/src/server/routes/spawn.ts new file mode 100644 index 0000000..587a17d --- /dev/null +++ b/src/server/routes/spawn.ts @@ -0,0 +1,141 @@ +import type { FastifyInstance, FastifyRequest, FastifyReply } from 'fastify'; +import { spawnNewWorktree, resolvePromptText } from '../../core/spawn.js'; +import { PpgError } from '../../lib/errors.js'; + +export interface SpawnRequestBody { + name: string; + agent?: string; + prompt?: string; + template?: string; + vars?: Record; + base?: string; + count?: number; +} + +export interface SpawnResponseBody { + worktreeId: string; + name: string; + branch: string; + agents: Array<{ + id: string; + tmuxTarget: string; + sessionId?: string; + }>; +} + +const spawnBodySchema = { + type: 'object' as const, + required: ['name'], + properties: { + name: { type: 'string' as const, minLength: 1 }, + agent: { type: 'string' as const }, + prompt: { type: 'string' as const }, + template: { type: 'string' as const }, + vars: { + type: 'object' as const, + additionalProperties: { type: 'string' as const }, + }, + base: { type: 'string' as const }, + count: { type: 'integer' as const, minimum: 1, maximum: 20 }, + }, + additionalProperties: false, +}; + +// Shell metacharacters that could be injected via tmux send-keys +const SHELL_META_RE = /[`$\\!;|&()<>{}[\]"'\n\r]/; + +function validateVars(vars: Record): void { + for (const [key, value] of Object.entries(vars)) { + if (SHELL_META_RE.test(key)) { + throw new PpgError( + `Var key "${key}" contains shell metacharacters`, + 'INVALID_ARGS', + ); + } + if (SHELL_META_RE.test(value)) { + throw new PpgError( + `Var value for "${key}" contains shell metacharacters`, + 'INVALID_ARGS', + ); + } + } +} + +function statusForPpgError(code: string): number { + switch (code) { + case 'INVALID_ARGS': + return 400; + case 'NOT_INITIALIZED': + return 409; + default: + return 500; + } +} + +export interface SpawnRouteOptions { + projectRoot: string; +} + +export default async function spawnRoute( + app: FastifyInstance, + opts: SpawnRouteOptions, +): Promise { + const { projectRoot } = opts; + + app.post( + '/api/spawn', + { schema: { body: spawnBodySchema } }, + async ( + request: FastifyRequest<{ Body: SpawnRequestBody }>, + reply: FastifyReply, + ) => { + try { + const body = request.body; + + // Validate vars for shell safety before any side effects + if (body.vars) { + validateVars(body.vars); + } + + const promptText = await resolvePromptText( + { prompt: body.prompt, template: body.template }, + projectRoot, + ); + + const result = await spawnNewWorktree({ + projectRoot, + name: body.name, + promptText, + userVars: body.vars, + agentName: body.agent, + baseBranch: body.base, + count: body.count, + }); + + const response: SpawnResponseBody = { + worktreeId: result.worktreeId, + name: result.name, + branch: result.branch, + agents: result.agents.map((a) => ({ + id: a.id, + tmuxTarget: a.tmuxTarget, + sessionId: a.sessionId, + })), + }; + + return reply.status(201).send(response); + } catch (err) { + if (err instanceof PpgError) { + return reply.status(statusForPpgError(err.code)).send({ + message: err.message, + code: err.code, + }); + } + + return reply.status(500).send({ + message: err instanceof Error ? err.message : 'Internal server error', + }); + } + }, + ); +} diff --git a/src/server/routes/status.test.ts b/src/server/routes/status.test.ts new file mode 100644 index 0000000..d3575bf --- /dev/null +++ b/src/server/routes/status.test.ts @@ -0,0 +1,344 @@ +import { describe, test, expect, vi, beforeEach } from 'vitest'; +import Fastify from 'fastify'; +import type { FastifyInstance } from 'fastify'; +import statusRoutes from './status.js'; +import { makeWorktree, makeAgent } from '../../test-fixtures.js'; +import type { Manifest } from '../../types/manifest.js'; +import { NotInitializedError, ManifestLockError } from '../../lib/errors.js'; + +const PROJECT_ROOT = '/tmp/project'; +const TOKEN = 'test-token-123'; + +const mockManifest: Manifest = { + version: 1, + projectRoot: PROJECT_ROOT, + sessionName: 'ppg-test', + worktrees: { + 'wt-abc123': makeWorktree({ + agents: { + 'ag-test1234': makeAgent(), + }, + }), + }, + createdAt: '2026-01-01T00:00:00.000Z', + updatedAt: '2026-01-01T00:00:00.000Z', +}; + +vi.mock('../../core/manifest.js', () => ({ + readManifest: vi.fn(), + resolveWorktree: vi.fn(), + updateManifest: vi.fn(), +})); + +vi.mock('../../core/agent.js', () => ({ + refreshAllAgentStatuses: vi.fn((m: Manifest) => m), +})); + +vi.mock('execa', () => ({ + execa: vi.fn(), +})); + +import { readManifest, resolveWorktree, updateManifest } from '../../core/manifest.js'; +import { refreshAllAgentStatuses } from '../../core/agent.js'; +import { execa } from 'execa'; + +const mockedUpdateManifest = vi.mocked(updateManifest); +const mockedReadManifest = vi.mocked(readManifest); +const mockedResolveWorktree = vi.mocked(resolveWorktree); +const mockedRefreshAllAgentStatuses = vi.mocked(refreshAllAgentStatuses); +const mockedExeca = vi.mocked(execa); + +function buildApp(): FastifyInstance { + const app = Fastify(); + app.register(statusRoutes, { projectRoot: PROJECT_ROOT, bearerToken: TOKEN }); + return app; +} + +function authHeaders() { + return { authorization: `Bearer ${TOKEN}` }; +} + +describe('status routes', () => { + beforeEach(() => { + vi.clearAllMocks(); + + mockedUpdateManifest.mockImplementation(async (_root, updater) => { + return updater(structuredClone(mockManifest)); + }); + mockedReadManifest.mockResolvedValue(structuredClone(mockManifest)); + mockedRefreshAllAgentStatuses.mockImplementation(async (m) => m); + }); + + describe('authentication', () => { + test('given no auth header, should return 401', async () => { + const app = buildApp(); + const res = await app.inject({ method: 'GET', url: '/api/status' }); + expect(res.statusCode).toBe(401); + expect(res.json()).toEqual({ error: 'Unauthorized' }); + }); + + test('given wrong token, should return 401', async () => { + const app = buildApp(); + const res = await app.inject({ + method: 'GET', + url: '/api/status', + headers: { authorization: 'Bearer wrong-token' }, + }); + expect(res.statusCode).toBe(401); + }); + + test('given valid token, should return 200', async () => { + const app = buildApp(); + const res = await app.inject({ + method: 'GET', + url: '/api/status', + headers: authHeaders(), + }); + expect(res.statusCode).toBe(200); + }); + + test('given failed auth, should not execute route handler', async () => { + const app = buildApp(); + await app.inject({ method: 'GET', url: '/api/status' }); + expect(mockedUpdateManifest).not.toHaveBeenCalled(); + }); + }); + + describe('GET /api/status', () => { + test('should return full manifest with lifecycle', async () => { + const app = buildApp(); + const res = await app.inject({ + method: 'GET', + url: '/api/status', + headers: authHeaders(), + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.session).toBe('ppg-test'); + expect(body.worktrees['wt-abc123']).toBeDefined(); + expect(body.worktrees['wt-abc123'].lifecycle).toBe('busy'); + }); + + test('should call refreshAllAgentStatuses', async () => { + const app = buildApp(); + await app.inject({ + method: 'GET', + url: '/api/status', + headers: authHeaders(), + }); + + expect(mockedRefreshAllAgentStatuses).toHaveBeenCalled(); + }); + + test('given manifest lock error, should return 503', async () => { + mockedUpdateManifest.mockRejectedValue(new ManifestLockError()); + + const app = buildApp(); + const res = await app.inject({ + method: 'GET', + url: '/api/status', + headers: authHeaders(), + }); + + expect(res.statusCode).toBe(503); + expect(res.json().code).toBe('MANIFEST_LOCK'); + }); + + test('given not initialized error, should return 503', async () => { + mockedUpdateManifest.mockRejectedValue(new NotInitializedError('/tmp/project')); + + const app = buildApp(); + const res = await app.inject({ + method: 'GET', + url: '/api/status', + headers: authHeaders(), + }); + + expect(res.statusCode).toBe(503); + expect(res.json().code).toBe('NOT_INITIALIZED'); + }); + }); + + describe('GET /api/worktrees/:id', () => { + test('given valid worktree id, should return worktree detail', async () => { + mockedResolveWorktree.mockReturnValue(mockManifest.worktrees['wt-abc123']); + + const app = buildApp(); + const res = await app.inject({ + method: 'GET', + url: '/api/worktrees/wt-abc123', + headers: authHeaders(), + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.id).toBe('wt-abc123'); + expect(body.name).toBe('feature-auth'); + expect(body.lifecycle).toBe('busy'); + }); + + test('given worktree name, should resolve by name', async () => { + mockedResolveWorktree.mockReturnValue(mockManifest.worktrees['wt-abc123']); + + const app = buildApp(); + const res = await app.inject({ + method: 'GET', + url: '/api/worktrees/feature-auth', + headers: authHeaders(), + }); + + expect(res.statusCode).toBe(200); + expect(mockedResolveWorktree).toHaveBeenCalledWith(expect.anything(), 'feature-auth'); + }); + + test('given unknown worktree, should return 404', async () => { + mockedResolveWorktree.mockReturnValue(undefined); + + const app = buildApp(); + const res = await app.inject({ + method: 'GET', + url: '/api/worktrees/wt-unknown', + headers: authHeaders(), + }); + + expect(res.statusCode).toBe(404); + expect(res.json()).toEqual({ error: 'Worktree not found: wt-unknown' }); + }); + }); + + describe('GET /api/worktrees/:id/diff', () => { + test('given valid worktree, should return numstat diff', async () => { + mockedResolveWorktree.mockReturnValue(mockManifest.worktrees['wt-abc123']); + mockedExeca.mockResolvedValue({ + stdout: '10\t2\tsrc/index.ts\n5\t0\tsrc/utils.ts', + } as never); + + const app = buildApp(); + const res = await app.inject({ + method: 'GET', + url: '/api/worktrees/wt-abc123/diff', + headers: authHeaders(), + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.worktreeId).toBe('wt-abc123'); + expect(body.branch).toBe('ppg/feature-auth'); + expect(body.baseBranch).toBe('main'); + expect(body.files).toEqual([ + { file: 'src/index.ts', added: 10, removed: 2 }, + { file: 'src/utils.ts', added: 5, removed: 0 }, + ]); + }); + + test('given empty diff, should return empty files array', async () => { + mockedResolveWorktree.mockReturnValue(mockManifest.worktrees['wt-abc123']); + mockedExeca.mockResolvedValue({ stdout: '' } as never); + + const app = buildApp(); + const res = await app.inject({ + method: 'GET', + url: '/api/worktrees/wt-abc123/diff', + headers: authHeaders(), + }); + + expect(res.statusCode).toBe(200); + expect(res.json().files).toEqual([]); + }); + + test('given unknown worktree, should return 404', async () => { + mockedResolveWorktree.mockReturnValue(undefined); + + const app = buildApp(); + const res = await app.inject({ + method: 'GET', + url: '/api/worktrees/wt-unknown/diff', + headers: authHeaders(), + }); + + expect(res.statusCode).toBe(404); + expect(res.json()).toEqual({ error: 'Worktree not found: wt-unknown' }); + }); + + test('given missing manifest file, should return 503', async () => { + const enoentError = Object.assign(new Error('not found'), { code: 'ENOENT' }); + mockedReadManifest.mockRejectedValue(enoentError); + + const app = buildApp(); + const res = await app.inject({ + method: 'GET', + url: '/api/worktrees/wt-abc123/diff', + headers: authHeaders(), + }); + + expect(res.statusCode).toBe(503); + expect(res.json().code).toBe('NOT_INITIALIZED'); + }); + + test('should call git diff with correct range', async () => { + mockedResolveWorktree.mockReturnValue(mockManifest.worktrees['wt-abc123']); + mockedExeca.mockResolvedValue({ stdout: '' } as never); + + const app = buildApp(); + await app.inject({ + method: 'GET', + url: '/api/worktrees/wt-abc123/diff', + headers: authHeaders(), + }); + + expect(mockedExeca).toHaveBeenCalledWith( + 'git', + ['diff', '--numstat', 'main...ppg/feature-auth'], + expect.objectContaining({ cwd: PROJECT_ROOT }), + ); + }); + + test('given binary files in diff, should treat dash counts as 0', async () => { + mockedResolveWorktree.mockReturnValue(mockManifest.worktrees['wt-abc123']); + mockedExeca.mockResolvedValue({ + stdout: '-\t-\timage.png', + } as never); + + const app = buildApp(); + const res = await app.inject({ + method: 'GET', + url: '/api/worktrees/wt-abc123/diff', + headers: authHeaders(), + }); + + expect(res.json().files).toEqual([ + { file: 'image.png', added: 0, removed: 0 }, + ]); + }); + + test('given git diff failure, should return 500', async () => { + mockedResolveWorktree.mockReturnValue(mockManifest.worktrees['wt-abc123']); + mockedExeca.mockRejectedValue(new Error('git diff failed')); + + const app = buildApp(); + const res = await app.inject({ + method: 'GET', + url: '/api/worktrees/wt-abc123/diff', + headers: authHeaders(), + }); + + expect(res.statusCode).toBe(500); + }); + + test('should use readManifest instead of updateManifest', async () => { + mockedResolveWorktree.mockReturnValue(mockManifest.worktrees['wt-abc123']); + mockedExeca.mockResolvedValue({ stdout: '' } as never); + + const app = buildApp(); + await app.inject({ + method: 'GET', + url: '/api/worktrees/wt-abc123/diff', + headers: authHeaders(), + }); + + expect(mockedReadManifest).toHaveBeenCalledWith(PROJECT_ROOT); + expect(mockedUpdateManifest).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/src/server/routes/status.ts b/src/server/routes/status.ts new file mode 100644 index 0000000..d1ee242 --- /dev/null +++ b/src/server/routes/status.ts @@ -0,0 +1,150 @@ +import crypto from 'node:crypto'; +import type { FastifyInstance, FastifyRequest, FastifyReply } from 'fastify'; +import { execa } from 'execa'; +import { readManifest, resolveWorktree, updateManifest } from '../../core/manifest.js'; +import { refreshAllAgentStatuses } from '../../core/agent.js'; +import { computeLifecycle } from '../../core/lifecycle.js'; +import { NotInitializedError, PpgError } from '../../lib/errors.js'; +import { execaEnv } from '../../lib/env.js'; +import type { Manifest } from '../../types/manifest.js'; + +export interface StatusRouteOptions { + projectRoot: string; + bearerToken: string; +} + +function timingSafeEqual(a: string, b: string): boolean { + const aBuffer = Buffer.from(a); + const bBuffer = Buffer.from(b); + if (aBuffer.length !== bBuffer.length) return false; + return crypto.timingSafeEqual(aBuffer, bBuffer); +} + +function parseNumstatLine(line: string): { file: string; added: number; removed: number } { + const [addedRaw = '', removedRaw = '', ...fileParts] = line.split('\t'); + + const parseCount = (value: string): number => { + if (value === '-') return 0; + const parsed = Number.parseInt(value, 10); + return Number.isNaN(parsed) ? 0 : parsed; + }; + + return { + file: fileParts.join('\t'), + added: parseCount(addedRaw), + removed: parseCount(removedRaw), + }; +} + +function authenticate(token: string) { + const expected = `Bearer ${token}`; + return async (request: FastifyRequest, reply: FastifyReply) => { + const auth = request.headers.authorization ?? ''; + if (!timingSafeEqual(auth, expected)) { + return reply.code(401).send({ error: 'Unauthorized' }); + } + }; +} + +const ppgErrorToStatus: Record = { + NOT_INITIALIZED: 503, + MANIFEST_LOCK: 503, + WORKTREE_NOT_FOUND: 404, + AGENT_NOT_FOUND: 404, +}; + +export default async function statusRoutes( + fastify: FastifyInstance, + options: StatusRouteOptions, +): Promise { + const { projectRoot, bearerToken } = options; + + fastify.addHook('onRequest', authenticate(bearerToken)); + + fastify.setErrorHandler((error, _request, reply) => { + if (error instanceof PpgError) { + const status = ppgErrorToStatus[error.code] ?? 500; + reply.code(status).send({ error: error.message, code: error.code }); + return; + } + reply.code(500).send({ error: 'Internal server error' }); + }); + + // GET /api/status — full manifest with live agent statuses + fastify.get('/api/status', async (_request, reply) => { + const manifest = await updateManifest(projectRoot, async (m) => { + return refreshAllAgentStatuses(m, projectRoot); + }); + + const worktrees = Object.fromEntries( + Object.values(manifest.worktrees).map((wt) => [ + wt.id, + { ...wt, lifecycle: computeLifecycle(wt) }, + ]), + ); + + reply.send({ + session: manifest.sessionName, + worktrees, + }); + }); + + // GET /api/worktrees/:id — single worktree detail with refreshed statuses + fastify.get<{ Params: { id: string } }>( + '/api/worktrees/:id', + async (request, reply) => { + const manifest = await updateManifest(projectRoot, async (m) => { + return refreshAllAgentStatuses(m, projectRoot); + }); + + const wt = resolveWorktree(manifest, request.params.id); + if (!wt) { + reply.code(404).send({ error: `Worktree not found: ${request.params.id}` }); + return; + } + + reply.send({ ...wt, lifecycle: computeLifecycle(wt) }); + }, + ); + + // GET /api/worktrees/:id/diff — branch diff (numstat format) + fastify.get<{ Params: { id: string } }>( + '/api/worktrees/:id/diff', + async (request, reply) => { + let manifest: Manifest; + try { + manifest = await readManifest(projectRoot); + } catch (error) { + if ((error as NodeJS.ErrnoException).code === 'ENOENT') { + throw new NotInitializedError(projectRoot); + } + throw error; + } + + const wt = resolveWorktree(manifest, request.params.id); + if (!wt) { + reply.code(404).send({ error: `Worktree not found: ${request.params.id}` }); + return; + } + + const diffRange = `${wt.baseBranch}...${wt.branch}`; + const result = await execa('git', ['diff', '--numstat', diffRange], { + ...execaEnv, + cwd: projectRoot, + }); + + const files = result.stdout + .trim() + .split('\n') + .filter(Boolean) + .map((line) => parseNumstatLine(line)); + + reply.send({ + worktreeId: wt.id, + branch: wt.branch, + baseBranch: wt.baseBranch, + files, + }); + }, + ); +} diff --git a/src/server/routes/worktrees.test.ts b/src/server/routes/worktrees.test.ts new file mode 100644 index 0000000..dad0351 --- /dev/null +++ b/src/server/routes/worktrees.test.ts @@ -0,0 +1,383 @@ +import { describe, test, expect, vi, beforeEach } from 'vitest'; +import Fastify from 'fastify'; +import type { FastifyInstance } from 'fastify'; +import { makeWorktree, makeAgent } from '../../test-fixtures.js'; +import type { Manifest } from '../../types/manifest.js'; +import type { WorktreeEntry } from '../../types/manifest.js'; + +// ---- Mocks ---- + +const mockManifest: Manifest = { + version: 1, + projectRoot: '/tmp/project', + sessionName: 'ppg', + worktrees: {}, + createdAt: '2026-01-01T00:00:00.000Z', + updatedAt: '2026-01-01T00:00:00.000Z', +}; + +vi.mock('../../core/manifest.js', () => ({ + updateManifest: vi.fn(async (_root: string, updater: (m: Manifest) => Manifest | Promise) => { + return updater(structuredClone(mockManifest)); + }), + resolveWorktree: vi.fn(), +})); + +vi.mock('../../core/agent.js', () => ({ + refreshAllAgentStatuses: vi.fn((m: Manifest) => m), +})); + +vi.mock('../../core/merge.js', () => ({ + mergeWorktree: vi.fn(async (_root: string, wt: WorktreeEntry, opts: Record = {}) => ({ + worktreeId: wt.id, + branch: wt.branch, + baseBranch: wt.baseBranch, + strategy: (opts.strategy as string) ?? 'squash', + cleaned: opts.cleanup !== false, + selfProtected: false, + })), +})); + +vi.mock('../../core/kill.js', () => ({ + killWorktreeAgents: vi.fn(async (_root: string, wt: WorktreeEntry) => { + const killed = Object.values(wt.agents) + .filter((a) => a.status === 'running') + .map((a) => a.id); + return { worktreeId: wt.id, killed }; + }), +})); + +vi.mock('../../core/pr.js', () => ({ + createWorktreePr: vi.fn(async (_root: string, wt: WorktreeEntry) => ({ + worktreeId: wt.id, + branch: wt.branch, + baseBranch: wt.baseBranch, + prUrl: 'https://github.com/owner/repo/pull/1', + })), +})); + +// ---- Imports (after mocks) ---- + +import { resolveWorktree, updateManifest } from '../../core/manifest.js'; +import { mergeWorktree } from '../../core/merge.js'; +import { killWorktreeAgents } from '../../core/kill.js'; +import { createWorktreePr } from '../../core/pr.js'; +import { worktreeRoutes } from './worktrees.js'; + +const PROJECT_ROOT = '/tmp/project'; + +async function buildApp(): Promise { + const app = Fastify(); + app.decorate('projectRoot', PROJECT_ROOT); + await app.register(worktreeRoutes, { prefix: '/api' }); + await app.ready(); + return app; +} + +describe('worktreeRoutes', () => { + beforeEach(() => { + vi.clearAllMocks(); + mockManifest.worktrees = {}; + }); + + // ================================================================== + // POST /api/worktrees/:id/merge + // ================================================================== + describe('POST /api/worktrees/:id/merge', () => { + test('given valid worktree, should merge with squash strategy by default', async () => { + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + mockManifest.worktrees['wt-abc123'] = wt; + vi.mocked(resolveWorktree).mockReturnValue(wt); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-abc123/merge', + payload: {}, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.worktreeId).toBe('wt-abc123'); + expect(body.strategy).toBe('squash'); + expect(body.cleaned).toBe(true); + expect(vi.mocked(mergeWorktree)).toHaveBeenCalledWith( + PROJECT_ROOT, wt, { strategy: undefined, cleanup: undefined, force: undefined }, + ); + }); + + test('given strategy no-ff, should pass strategy to mergeWorktree', async () => { + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + mockManifest.worktrees['wt-abc123'] = wt; + vi.mocked(resolveWorktree).mockReturnValue(wt); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-abc123/merge', + payload: { strategy: 'no-ff' }, + }); + + expect(res.statusCode).toBe(200); + expect(res.json().strategy).toBe('no-ff'); + expect(vi.mocked(mergeWorktree)).toHaveBeenCalledWith( + PROJECT_ROOT, wt, expect.objectContaining({ strategy: 'no-ff' }), + ); + }); + + test('given cleanup false, should pass cleanup false', async () => { + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + mockManifest.worktrees['wt-abc123'] = wt; + vi.mocked(resolveWorktree).mockReturnValue(wt); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-abc123/merge', + payload: { cleanup: false }, + }); + + expect(res.statusCode).toBe(200); + expect(res.json().cleaned).toBe(false); + expect(vi.mocked(mergeWorktree)).toHaveBeenCalledWith( + PROJECT_ROOT, wt, expect.objectContaining({ cleanup: false }), + ); + }); + + test('given worktree not found, should return 404', async () => { + vi.mocked(resolveWorktree).mockReturnValue(undefined as unknown as ReturnType); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-nonexist/merge', + payload: {}, + }); + + expect(res.statusCode).toBe(404); + expect(res.json().code).toBe('WORKTREE_NOT_FOUND'); + }); + + test('given AGENTS_RUNNING error from core, should return 409', async () => { + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + mockManifest.worktrees['wt-abc123'] = wt; + vi.mocked(resolveWorktree).mockReturnValue(wt); + + const { PpgError } = await import('../../lib/errors.js'); + vi.mocked(mergeWorktree).mockRejectedValueOnce( + new PpgError('1 agent(s) still running', 'AGENTS_RUNNING'), + ); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-abc123/merge', + payload: {}, + }); + + expect(res.statusCode).toBe(409); + expect(res.json().code).toBe('AGENTS_RUNNING'); + }); + + test('given force flag, should pass force to mergeWorktree', async () => { + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + mockManifest.worktrees['wt-abc123'] = wt; + vi.mocked(resolveWorktree).mockReturnValue(wt); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-abc123/merge', + payload: { force: true }, + }); + + expect(res.statusCode).toBe(200); + expect(vi.mocked(mergeWorktree)).toHaveBeenCalledWith( + PROJECT_ROOT, wt, expect.objectContaining({ force: true }), + ); + }); + + test('given MERGE_FAILED error from core, should return 500', async () => { + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + mockManifest.worktrees['wt-abc123'] = wt; + vi.mocked(resolveWorktree).mockReturnValue(wt); + + const { MergeFailedError } = await import('../../lib/errors.js'); + vi.mocked(mergeWorktree).mockRejectedValueOnce( + new MergeFailedError('Merge failed: conflict'), + ); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-abc123/merge', + payload: {}, + }); + + expect(res.statusCode).toBe(500); + expect(res.json().code).toBe('MERGE_FAILED'); + }); + }); + + // ================================================================== + // POST /api/worktrees/:id/kill + // ================================================================== + describe('POST /api/worktrees/:id/kill', () => { + test('given worktree with running agents, should kill via core and return killed list', async () => { + const agent1 = makeAgent({ id: 'ag-run00001', status: 'running' }); + const agent2 = makeAgent({ id: 'ag-idle0001', status: 'idle' }); + const wt = makeWorktree({ + id: 'wt-abc123', + agents: { 'ag-run00001': agent1, 'ag-idle0001': agent2 }, + }); + mockManifest.worktrees['wt-abc123'] = wt; + vi.mocked(resolveWorktree).mockReturnValue(wt); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-abc123/kill', + payload: {}, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.killed).toEqual(['ag-run00001']); + expect(vi.mocked(killWorktreeAgents)).toHaveBeenCalledWith(PROJECT_ROOT, wt); + }); + + test('given worktree with no running agents, should return empty killed list', async () => { + const agent = makeAgent({ id: 'ag-done0001', status: 'exited' }); + const wt = makeWorktree({ + id: 'wt-abc123', + agents: { 'ag-done0001': agent }, + }); + mockManifest.worktrees['wt-abc123'] = wt; + vi.mocked(resolveWorktree).mockReturnValue(wt); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-abc123/kill', + payload: {}, + }); + + expect(res.statusCode).toBe(200); + expect(res.json().killed).toEqual([]); + }); + + test('given worktree not found, should return 404', async () => { + vi.mocked(resolveWorktree).mockReturnValue(undefined as unknown as ReturnType); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-nonexist/kill', + payload: {}, + }); + + expect(res.statusCode).toBe(404); + expect(res.json().code).toBe('WORKTREE_NOT_FOUND'); + }); + }); + + // ================================================================== + // POST /api/worktrees/:id/pr + // ================================================================== + describe('POST /api/worktrees/:id/pr', () => { + test('given valid worktree, should create PR and return URL', async () => { + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + mockManifest.worktrees['wt-abc123'] = wt; + vi.mocked(resolveWorktree).mockReturnValue(wt); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-abc123/pr', + payload: { title: 'My PR', body: 'Description' }, + }); + + expect(res.statusCode).toBe(200); + const body = res.json(); + expect(body.success).toBe(true); + expect(body.prUrl).toBe('https://github.com/owner/repo/pull/1'); + expect(body.worktreeId).toBe('wt-abc123'); + expect(vi.mocked(createWorktreePr)).toHaveBeenCalledWith( + PROJECT_ROOT, wt, { title: 'My PR', body: 'Description', draft: undefined }, + ); + }); + + test('given draft flag, should pass draft to createWorktreePr', async () => { + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + mockManifest.worktrees['wt-abc123'] = wt; + vi.mocked(resolveWorktree).mockReturnValue(wt); + + const app = await buildApp(); + await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-abc123/pr', + payload: { draft: true }, + }); + + expect(vi.mocked(createWorktreePr)).toHaveBeenCalledWith( + PROJECT_ROOT, wt, expect.objectContaining({ draft: true }), + ); + }); + + test('given worktree not found, should return 404', async () => { + vi.mocked(resolveWorktree).mockReturnValue(undefined as unknown as ReturnType); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-nonexist/pr', + payload: {}, + }); + + expect(res.statusCode).toBe(404); + expect(res.json().code).toBe('WORKTREE_NOT_FOUND'); + }); + + test('given GH_NOT_FOUND error from core, should return 502', async () => { + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + mockManifest.worktrees['wt-abc123'] = wt; + vi.mocked(resolveWorktree).mockReturnValue(wt); + + const { GhNotFoundError } = await import('../../lib/errors.js'); + vi.mocked(createWorktreePr).mockRejectedValueOnce(new GhNotFoundError()); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-abc123/pr', + payload: {}, + }); + + expect(res.statusCode).toBe(502); + expect(res.json().code).toBe('GH_NOT_FOUND'); + }); + + test('given INVALID_ARGS error from core, should return 400', async () => { + const wt = makeWorktree({ id: 'wt-abc123', agents: {} }); + mockManifest.worktrees['wt-abc123'] = wt; + vi.mocked(resolveWorktree).mockReturnValue(wt); + + const { PpgError } = await import('../../lib/errors.js'); + vi.mocked(createWorktreePr).mockRejectedValueOnce( + new PpgError('Failed to push', 'INVALID_ARGS'), + ); + + const app = await buildApp(); + const res = await app.inject({ + method: 'POST', + url: '/api/worktrees/wt-abc123/pr', + payload: {}, + }); + + expect(res.statusCode).toBe(400); + expect(res.json().code).toBe('INVALID_ARGS'); + }); + }); +}); diff --git a/src/server/routes/worktrees.ts b/src/server/routes/worktrees.ts new file mode 100644 index 0000000..517da5a --- /dev/null +++ b/src/server/routes/worktrees.ts @@ -0,0 +1,124 @@ +import type { FastifyInstance, FastifyReply } from 'fastify'; +import { updateManifest, resolveWorktree } from '../../core/manifest.js'; +import { refreshAllAgentStatuses } from '../../core/agent.js'; +import { mergeWorktree } from '../../core/merge.js'; +import { killWorktreeAgents } from '../../core/kill.js'; +import { createWorktreePr } from '../../core/pr.js'; +import { PpgError, WorktreeNotFoundError } from '../../lib/errors.js'; + +// ------------------------------------------------------------------ +// Fastify plugin — worktree action routes +// ------------------------------------------------------------------ + +declare module 'fastify' { + interface FastifyInstance { + projectRoot: string; + } +} + +interface WorktreeParams { + id: string; +} + +interface MergeBody { + strategy?: 'squash' | 'no-ff'; + cleanup?: boolean; + force?: boolean; +} + +interface PrBody { + title?: string; + body?: string; + draft?: boolean; +} + +function errorReply(reply: FastifyReply, err: unknown): FastifyReply { + if (err instanceof PpgError) { + const statusMap: Record = { + WORKTREE_NOT_FOUND: 404, + AGENT_NOT_FOUND: 404, + NOT_INITIALIZED: 400, + AGENTS_RUNNING: 409, + MERGE_FAILED: 500, + GH_NOT_FOUND: 502, + INVALID_ARGS: 400, + }; + const status = statusMap[err.code] ?? 500; + return reply.code(status).send({ error: err.message, code: err.code }); + } + const message = err instanceof Error ? err.message : String(err); + return reply.code(500).send({ error: message }); +} + +async function resolveWorktreeFromRequest( + projectRoot: string, + id: string, +) { + const manifest = await updateManifest(projectRoot, async (m) => { + return refreshAllAgentStatuses(m, projectRoot); + }); + + const wt = resolveWorktree(manifest, id); + if (!wt) throw new WorktreeNotFoundError(id); + return wt; +} + +export async function worktreeRoutes(app: FastifyInstance): Promise { + const { projectRoot } = app; + + // ---------------------------------------------------------------- + // POST /api/worktrees/:id/merge + // ---------------------------------------------------------------- + app.post<{ Params: WorktreeParams; Body: MergeBody }>( + '/worktrees/:id/merge', + async (request, reply) => { + try { + const wt = await resolveWorktreeFromRequest(projectRoot, request.params.id); + const { strategy, cleanup, force } = request.body ?? {}; + + const result = await mergeWorktree(projectRoot, wt, { strategy, cleanup, force }); + + return { success: true, ...result }; + } catch (err) { + return errorReply(reply, err); + } + }, + ); + + // ---------------------------------------------------------------- + // POST /api/worktrees/:id/kill + // ---------------------------------------------------------------- + app.post<{ Params: WorktreeParams }>( + '/worktrees/:id/kill', + async (request, reply) => { + try { + const wt = await resolveWorktreeFromRequest(projectRoot, request.params.id); + + const result = await killWorktreeAgents(projectRoot, wt); + + return { success: true, ...result }; + } catch (err) { + return errorReply(reply, err); + } + }, + ); + + // ---------------------------------------------------------------- + // POST /api/worktrees/:id/pr + // ---------------------------------------------------------------- + app.post<{ Params: WorktreeParams; Body: PrBody }>( + '/worktrees/:id/pr', + async (request, reply) => { + try { + const wt = await resolveWorktreeFromRequest(projectRoot, request.params.id); + const { title, body, draft } = request.body ?? {}; + + const result = await createWorktreePr(projectRoot, wt, { title, body, draft }); + + return { success: true, ...result }; + } catch (err) { + return errorReply(reply, err); + } + }, + ); +} diff --git a/src/server/tls.test.ts b/src/server/tls.test.ts new file mode 100644 index 0000000..7bc050b --- /dev/null +++ b/src/server/tls.test.ts @@ -0,0 +1,253 @@ +import { describe, test, expect, beforeEach, afterEach, vi } from 'vitest'; +import crypto from 'node:crypto'; +import fs from 'node:fs'; +import os from 'node:os'; +import path from 'node:path'; + +import { ensureTls, getLanIps, buildPairingUrl } from './tls.js'; +import { + tlsCaKeyPath, + tlsCaCertPath, + tlsServerKeyPath, + tlsServerCertPath, +} from '../lib/paths.js'; + +vi.setConfig({ testTimeout: 30_000 }); + +let tmpDir: string; + +beforeEach(() => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ppg-tls-test-')); +}); + +afterEach(() => { + fs.rmSync(tmpDir, { recursive: true, force: true }); +}); + +describe('ensureTls', () => { + test('generates valid PEM certificates', () => { + const bundle = ensureTls(tmpDir); + + expect(bundle.caCert).toMatch(/^-----BEGIN CERTIFICATE-----/); + expect(bundle.caCert).toMatch(/-----END CERTIFICATE-----\n$/); + expect(bundle.caKey).toMatch(/^-----BEGIN PRIVATE KEY-----/); + expect(bundle.serverCert).toMatch(/^-----BEGIN CERTIFICATE-----/); + expect(bundle.serverKey).toMatch(/^-----BEGIN PRIVATE KEY-----/); + }); + + test('CA cert has cA:TRUE and ~10 year validity', () => { + const bundle = ensureTls(tmpDir); + const ca = new crypto.X509Certificate(bundle.caCert); + + expect(ca.subject).toBe('CN=ppg-ca'); + expect(ca.issuer).toBe('CN=ppg-ca'); + expect(ca.ca).toBe(true); + + const notAfter = new Date(ca.validTo); + const yearsFromNow = (notAfter.getTime() - Date.now()) / (1000 * 60 * 60 * 24 * 365); + expect(yearsFromNow).toBeGreaterThan(9); + expect(yearsFromNow).toBeLessThan(11); + }); + + test('server cert is signed by CA with ~1 year validity', () => { + const bundle = ensureTls(tmpDir); + const ca = new crypto.X509Certificate(bundle.caCert); + const server = new crypto.X509Certificate(bundle.serverCert); + + expect(server.subject).toBe('CN=ppg-server'); + expect(server.issuer).toBe('CN=ppg-ca'); + expect(server.verify(ca.publicKey)).toBe(true); + expect(server.ca).toBe(false); + + const notAfter = new Date(server.validTo); + const daysFromNow = (notAfter.getTime() - Date.now()) / (1000 * 60 * 60 * 24); + expect(daysFromNow).toBeGreaterThan(360); + expect(daysFromNow).toBeLessThan(370); + }); + + test('server cert includes correct SANs', () => { + const bundle = ensureTls(tmpDir); + const server = new crypto.X509Certificate(bundle.serverCert); + const sanStr = server.subjectAltName ?? ''; + + expect(sanStr).toContain('IP Address:127.0.0.1'); + + for (const ip of bundle.sans) { + expect(sanStr).toContain(`IP Address:${ip}`); + } + }); + + test('persists files with correct permissions', () => { + ensureTls(tmpDir); + + const files = [ + tlsCaKeyPath(tmpDir), + tlsCaCertPath(tmpDir), + tlsServerKeyPath(tmpDir), + tlsServerCertPath(tmpDir), + ]; + + for (const f of files) { + expect(fs.existsSync(f)).toBe(true); + const stat = fs.statSync(f); + expect(stat.mode & 0o777).toBe(0o600); + } + }); + + test('reuses valid certs without rewriting', async () => { + const bundle1 = ensureTls(tmpDir); + const mtime1 = fs.statSync(tlsCaCertPath(tmpDir)).mtimeMs; + + // Small delay to ensure mtime would differ if rewritten + await new Promise((r) => setTimeout(r, 50)); + + const bundle2 = ensureTls(tmpDir); + const mtime2 = fs.statSync(tlsCaCertPath(tmpDir)).mtimeMs; + + expect(bundle2.caFingerprint).toBe(bundle1.caFingerprint); + expect(bundle2.caCert).toBe(bundle1.caCert); + expect(bundle2.serverCert).toBe(bundle1.serverCert); + expect(mtime2).toBe(mtime1); + }); + + test('regenerates server cert when SAN is missing', () => { + const bundle1 = ensureTls(tmpDir); + + // Replace server cert with CA cert (has no SANs matching LAN IPs) + fs.writeFileSync(tlsServerCertPath(tmpDir), bundle1.caCert, { mode: 0o600 }); + + const bundle2 = ensureTls(tmpDir); + + // CA should be preserved + expect(bundle2.caCert).toBe(bundle1.caCert); + expect(bundle2.caFingerprint).toBe(bundle1.caFingerprint); + + // Server cert should be regenerated + expect(bundle2.serverCert).not.toBe(bundle1.caCert); + const server = new crypto.X509Certificate(bundle2.serverCert); + expect(server.subject).toBe('CN=ppg-server'); + }); + + test('regenerates server cert when signed by a different CA', () => { + const bundle1 = ensureTls(tmpDir); + const otherDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ppg-tls-test-other-')); + + try { + const otherBundle = ensureTls(otherDir); + fs.writeFileSync(tlsServerCertPath(tmpDir), otherBundle.serverCert, { mode: 0o600 }); + fs.writeFileSync(tlsServerKeyPath(tmpDir), otherBundle.serverKey, { mode: 0o600 }); + + const bundle2 = ensureTls(tmpDir); + const ca = new crypto.X509Certificate(bundle1.caCert); + const server = new crypto.X509Certificate(bundle2.serverCert); + + expect(bundle2.caFingerprint).toBe(bundle1.caFingerprint); + expect(server.verify(ca.publicKey)).toBe(true); + expect(bundle2.serverCert).not.toBe(otherBundle.serverCert); + } finally { + fs.rmSync(otherDir, { recursive: true, force: true }); + } + }); + + test('regenerates server cert when server key does not match cert', () => { + const bundle1 = ensureTls(tmpDir); + const { privateKey } = crypto.generateKeyPairSync('rsa', { modulusLength: 2048 }); + const wrongKey = privateKey.export({ type: 'pkcs8', format: 'pem' }) as string; + fs.writeFileSync(tlsServerKeyPath(tmpDir), wrongKey, { mode: 0o600 }); + + const bundle2 = ensureTls(tmpDir); + const server = new crypto.X509Certificate(bundle2.serverCert); + + expect(bundle2.caFingerprint).toBe(bundle1.caFingerprint); + expect(bundle2.serverKey).not.toBe(wrongKey); + expect(server.checkPrivateKey(crypto.createPrivateKey(bundle2.serverKey))).toBe(true); + }); + + test('regenerates everything when CA cert file is missing', () => { + const bundle1 = ensureTls(tmpDir); + + fs.unlinkSync(tlsCaCertPath(tmpDir)); + + const bundle2 = ensureTls(tmpDir); + + expect(bundle2.caFingerprint).not.toBe(bundle1.caFingerprint); + }); + + test('regenerates everything when CA key does not match CA cert', () => { + const bundle1 = ensureTls(tmpDir); + const { privateKey } = crypto.generateKeyPairSync('rsa', { modulusLength: 2048 }); + const wrongCaKey = privateKey.export({ type: 'pkcs8', format: 'pem' }) as string; + fs.writeFileSync(tlsCaKeyPath(tmpDir), wrongCaKey, { mode: 0o600 }); + + const bundle2 = ensureTls(tmpDir); + + expect(bundle2.caFingerprint).not.toBe(bundle1.caFingerprint); + }); + + test('regenerates everything when PEM files contain garbage', () => { + ensureTls(tmpDir); + + // Corrupt both cert files with garbage + fs.writeFileSync(tlsCaCertPath(tmpDir), 'not a cert', { mode: 0o600 }); + fs.writeFileSync(tlsServerCertPath(tmpDir), 'also garbage', { mode: 0o600 }); + + // Should regenerate without throwing + const bundle = ensureTls(tmpDir); + + expect(bundle.caCert).toMatch(/^-----BEGIN CERTIFICATE-----/); + const ca = new crypto.X509Certificate(bundle.caCert); + expect(ca.subject).toBe('CN=ppg-ca'); + }); + + test('CA fingerprint is colon-delimited SHA-256 hex', () => { + const bundle = ensureTls(tmpDir); + + // Format: XX:XX:XX:... (32 hex pairs with colons) + expect(bundle.caFingerprint).toMatch(/^([0-9A-F]{2}:){31}[0-9A-F]{2}$/); + }); + + test('CA fingerprint is stable across calls', () => { + const bundle1 = ensureTls(tmpDir); + const bundle2 = ensureTls(tmpDir); + + expect(bundle2.caFingerprint).toBe(bundle1.caFingerprint); + }); +}); + +describe('getLanIps', () => { + test('always includes 127.0.0.1', () => { + const ips = getLanIps(); + expect(ips).toContain('127.0.0.1'); + }); + + test('returns only IPv4 addresses', () => { + const ips = getLanIps(); + for (const ip of ips) { + expect(ip).toMatch(/^\d+\.\d+\.\d+\.\d+$/); + } + }); +}); + +describe('buildPairingUrl', () => { + test('formats ppg:// URL with query params', () => { + const url = buildPairingUrl({ + host: '192.168.1.5', + port: 3000, + caFingerprint: 'AA:BB:CC', + token: 'tok123', + }); + + expect(url).toBe('ppg://connect?host=192.168.1.5&port=3000&ca=AA%3ABB%3ACC&token=tok123'); + }); + + test('encodes special characters in params', () => { + const url = buildPairingUrl({ + host: '10.0.0.1', + port: 443, + caFingerprint: 'AA:BB', + token: 'a b+c', + }); + + expect(url).toContain('token=a+b%2Bc'); + }); +}); diff --git a/src/server/tls.ts b/src/server/tls.ts new file mode 100644 index 0000000..577b671 --- /dev/null +++ b/src/server/tls.ts @@ -0,0 +1,502 @@ +import crypto from 'node:crypto'; +import fs from 'node:fs'; +import os from 'node:os'; + +import { + tlsDir, + tlsCaKeyPath, + tlsCaCertPath, + tlsServerKeyPath, + tlsServerCertPath, +} from '../lib/paths.js'; + +// --------------------------------------------------------------------------- +// Public types +// --------------------------------------------------------------------------- + +export interface TlsBundle { + caCert: string; + caKey: string; + serverCert: string; + serverKey: string; + caFingerprint: string; + sans: string[]; +} + +// --------------------------------------------------------------------------- +// ASN.1 / DER primitives +// --------------------------------------------------------------------------- + +function derLength(len: number): Buffer { + if (len < 0x80) return Buffer.from([len]); + if (len < 0x100) return Buffer.from([0x81, len]); + if (len <= 0xffff) return Buffer.from([0x82, (len >> 8) & 0xff, len & 0xff]); + throw new Error(`DER length ${len} exceeds 2-byte encoding`); +} + +function derTlv(tag: number, value: Buffer): Buffer { + return Buffer.concat([Buffer.from([tag]), derLength(value.length), value]); +} + +function derSeq(items: Buffer[]): Buffer { + return derTlv(0x30, Buffer.concat(items)); +} + +function derSet(items: Buffer[]): Buffer { + return derTlv(0x31, Buffer.concat(items)); +} + +function derInteger(n: Buffer | number): Buffer { + let buf: Buffer; + if (typeof n === 'number') { + // Encode small integers — used for version field (0, 2) + if (n === 0) { + buf = Buffer.from([0]); + } else { + const hex = n.toString(16); + buf = Buffer.from(hex.length % 2 ? '0' + hex : hex, 'hex'); + if (buf[0] & 0x80) buf = Buffer.concat([Buffer.from([0]), buf]); + } + } else { + buf = n; + if (buf[0] & 0x80) buf = Buffer.concat([Buffer.from([0]), buf]); + } + return derTlv(0x02, buf); +} + +function derOid(encoded: number[]): Buffer { + return derTlv(0x06, Buffer.from(encoded)); +} + +function derUtf8(s: string): Buffer { + return derTlv(0x0c, Buffer.from(s, 'utf8')); +} + +function derUtcTime(d: Date): Buffer { + const s = + String(d.getUTCFullYear()).slice(2) + + String(d.getUTCMonth() + 1).padStart(2, '0') + + String(d.getUTCDate()).padStart(2, '0') + + String(d.getUTCHours()).padStart(2, '0') + + String(d.getUTCMinutes()).padStart(2, '0') + + String(d.getUTCSeconds()).padStart(2, '0') + + 'Z'; + return derTlv(0x17, Buffer.from(s, 'ascii')); +} + +function derGeneralizedTime(d: Date): Buffer { + const s = + String(d.getUTCFullYear()) + + String(d.getUTCMonth() + 1).padStart(2, '0') + + String(d.getUTCDate()).padStart(2, '0') + + String(d.getUTCHours()).padStart(2, '0') + + String(d.getUTCMinutes()).padStart(2, '0') + + String(d.getUTCSeconds()).padStart(2, '0') + + 'Z'; + return derTlv(0x18, Buffer.from(s, 'ascii')); +} + +function derBitString(data: Buffer): Buffer { + // Prepend 0x00 (unused-bits count) + return derTlv(0x03, Buffer.concat([Buffer.from([0]), data])); +} + +function derNull(): Buffer { + return Buffer.from([0x05, 0x00]); +} + +/** Context-tagged explicit wrapper: [tagNum] EXPLICIT */ +function derContextExplicit(tagNum: number, inner: Buffer): Buffer { + return derTlv(0xa0 | tagNum, inner); +} + +/** Context-tagged OCTET STRING wrapper */ +function derContextOctetString(tagNum: number, inner: Buffer): Buffer { + return derTlv(0x80 | tagNum, inner); +} + +// --------------------------------------------------------------------------- +// OIDs +// --------------------------------------------------------------------------- + +// sha256WithRSAEncryption 1.2.840.113549.1.1.11 +const OID_SHA256_RSA = [0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x0b]; +// commonName 2.5.4.3 +const OID_CN = [0x55, 0x04, 0x03]; +// basicConstraints 2.5.29.19 +const OID_BASIC_CONSTRAINTS = [0x55, 0x1d, 0x13]; +// keyUsage 2.5.29.15 +const OID_KEY_USAGE = [0x55, 0x1d, 0x0f]; +// subjectAltName 2.5.29.17 +const OID_SAN = [0x55, 0x1d, 0x11]; + +// --------------------------------------------------------------------------- +// Structural helpers +// --------------------------------------------------------------------------- + +function buildAlgorithmIdentifier(): Buffer { + return derSeq([derOid(OID_SHA256_RSA), derNull()]); +} + +function buildName(cn: string): Buffer { + const rdn = derSet([derSeq([derOid(OID_CN), derUtf8(cn)])]); + return derSeq([rdn]); +} + +function buildValidity(from: Date, to: Date): Buffer { + // Use UTCTime for dates before 2050, GeneralizedTime otherwise + const encodeTime = (d: Date) => + d.getUTCFullYear() < 2050 ? derUtcTime(d) : derGeneralizedTime(d); + return derSeq([encodeTime(from), encodeTime(to)]); +} + +function buildBasicConstraintsExt(isCA: boolean, critical: boolean): Buffer { + const value = derSeq(isCA ? [derTlv(0x01, Buffer.from([0xff]))] : []); + const octetValue = derTlv(0x04, value); + const parts: Buffer[] = [derOid(OID_BASIC_CONSTRAINTS)]; + if (critical) parts.push(derTlv(0x01, Buffer.from([0xff]))); + parts.push(octetValue); + return derSeq(parts); +} + +function buildKeyUsageExt(isCA: boolean, critical: boolean): Buffer { + let bits: number; + if (isCA) { + // keyCertSign (5) | cRLSign (6) → byte = 0x06, unused = 1 + bits = 0x06; + } else { + // digitalSignature (0) | keyEncipherment (2) → byte = 0xa0, unused = 5 + bits = 0xa0; + } + const unusedBits = isCA ? 1 : 5; + const bitStringContent = Buffer.from([unusedBits, bits]); + const bitString = derTlv(0x03, bitStringContent); + const octetValue = derTlv(0x04, bitString); + const parts: Buffer[] = [derOid(OID_KEY_USAGE)]; + if (critical) parts.push(derTlv(0x01, Buffer.from([0xff]))); + parts.push(octetValue); + return derSeq(parts); +} + +function buildSanExt(ips: string[]): Buffer { + const names = ips.map((ip) => { + const bytes = ip.split('.').map(Number); + return derContextOctetString(7, Buffer.from(bytes)); + }); + const sanValue = derSeq(names); + const octetValue = derTlv(0x04, sanValue); + return derSeq([derOid(OID_SAN), octetValue]); +} + +function buildExtensions(exts: Buffer[]): Buffer { + return derContextExplicit(3, derSeq(exts)); +} + +// --------------------------------------------------------------------------- +// Certificate generation +// --------------------------------------------------------------------------- + +function generateSerial(): Buffer { + const bytes = crypto.randomBytes(16); + // Ensure positive (clear high bit) + bytes[0] &= 0x7f; + // Ensure non-zero + if (bytes[0] === 0) bytes[0] = 1; + return bytes; +} + +function buildTbs(options: { + serial: Buffer; + issuer: Buffer; + subject: Buffer; + validity: Buffer; + publicKeyInfo: Buffer; + extensions: Buffer; +}): Buffer { + return derSeq([ + derContextExplicit(0, derInteger(2)), // v3 + derInteger(options.serial), + buildAlgorithmIdentifier(), + options.issuer, + options.validity, + options.subject, + options.publicKeyInfo, + options.extensions, + ]); +} + +function wrapCertificate(tbs: Buffer, signature: Buffer): Buffer { + return derSeq([tbs, buildAlgorithmIdentifier(), derBitString(signature)]); +} + +function toPem(tag: string, der: Buffer): string { + const b64 = der.toString('base64'); + const lines: string[] = []; + for (let i = 0; i < b64.length; i += 64) { + lines.push(b64.slice(i, i + 64)); + } + return `-----BEGIN ${tag}-----\n${lines.join('\n')}\n-----END ${tag}-----\n`; +} + +function wrapAndSign( + tbs: Buffer, + signingKey: crypto.KeyObject, + subjectKey: crypto.KeyObject, +): { cert: string; key: string } { + const signature = crypto.sign('sha256', tbs, signingKey); + return { + cert: toPem('CERTIFICATE', wrapCertificate(tbs, signature)), + key: subjectKey.export({ type: 'pkcs8', format: 'pem' }) as string, + }; +} + +function buildCertTbs(options: { + issuerCn: string; + subjectCn: string; + validityYears: number; + publicKeyDer: Buffer; + extensions: Buffer[]; +}): Buffer { + const now = new Date(); + const notAfter = new Date(now); + notAfter.setUTCFullYear(notAfter.getUTCFullYear() + options.validityYears); + + return buildTbs({ + serial: generateSerial(), + issuer: buildName(options.issuerCn), + subject: buildName(options.subjectCn), + validity: buildValidity(now, notAfter), + publicKeyInfo: Buffer.from(options.publicKeyDer), + extensions: buildExtensions(options.extensions), + }); +} + +function generateCaCert(): { cert: string; key: string } { + // Self-signed: same keypair for subject and signer + const { publicKey, privateKey } = crypto.generateKeyPairSync('rsa', { modulusLength: 2048 }); + + const tbs = buildCertTbs({ + issuerCn: 'ppg-ca', + subjectCn: 'ppg-ca', + validityYears: 10, + publicKeyDer: publicKey.export({ type: 'spki', format: 'der' }), + extensions: [ + buildBasicConstraintsExt(true, true), + buildKeyUsageExt(true, true), + ], + }); + + return wrapAndSign(tbs, privateKey, privateKey); +} + +function generateServerCert(caKey: string, sans: string[]): { cert: string; key: string } { + const { publicKey, privateKey } = crypto.generateKeyPairSync('rsa', { modulusLength: 2048 }); + + const tbs = buildCertTbs({ + issuerCn: 'ppg-ca', + subjectCn: 'ppg-server', + validityYears: 1, + publicKeyDer: publicKey.export({ type: 'spki', format: 'der' }), + extensions: [ + buildBasicConstraintsExt(false, false), + buildKeyUsageExt(false, false), + buildSanExt(sans), + ], + }); + + return wrapAndSign(tbs, crypto.createPrivateKey(caKey), privateKey); +} + +// --------------------------------------------------------------------------- +// LAN IP detection +// --------------------------------------------------------------------------- + +export function getLanIps(): string[] { + const interfaces = os.networkInterfaces(); + const ips = new Set(); + ips.add('127.0.0.1'); + + for (const infos of Object.values(interfaces)) { + if (!infos) continue; + for (const info of infos) { + if (info.family === 'IPv4' && !info.internal) { + ips.add(info.address); + } + } + } + + return [...ips]; +} + +// --------------------------------------------------------------------------- +// Pairing URL +// --------------------------------------------------------------------------- + +export function buildPairingUrl(params: { + host: string; + port: number; + caFingerprint: string; + token: string; +}): string { + const q = new URLSearchParams({ + host: params.host, + port: String(params.port), + ca: params.caFingerprint, + token: params.token, + }); + return `ppg://connect?${q.toString()}`; +} + +// --------------------------------------------------------------------------- +// File I/O and reuse logic +// --------------------------------------------------------------------------- + +function loadTlsBundle(projectRoot: string): TlsBundle | null { + const paths = [ + tlsCaKeyPath(projectRoot), + tlsCaCertPath(projectRoot), + tlsServerKeyPath(projectRoot), + tlsServerCertPath(projectRoot), + ]; + + const contents: string[] = []; + for (const p of paths) { + try { + contents.push(fs.readFileSync(p, 'utf8')); + } catch { + return null; + } + } + + const [caKey, caCert, serverKey, serverCert] = contents; + + try { + const x509 = new crypto.X509Certificate(caCert); + const serverX509 = new crypto.X509Certificate(serverCert); + const fingerprint = x509.fingerprint256; + const sans = parseIpSans(serverX509.subjectAltName); + + return { caCert, caKey, serverCert, serverKey, caFingerprint: fingerprint, sans }; + } catch { + return null; + } +} + +function isCaValid(caCert: string, caKey: string, minDaysRemaining: number): boolean { + try { + const x509 = new crypto.X509Certificate(caCert); + if (x509.subject !== 'CN=ppg-ca' || x509.issuer !== 'CN=ppg-ca' || !x509.ca) { + return false; + } + if (!x509.verify(x509.publicKey)) return false; + if (!x509.checkPrivateKey(crypto.createPrivateKey(caKey))) return false; + + const notAfter = new Date(x509.validTo); + const remaining = (notAfter.getTime() - Date.now()) / (1000 * 60 * 60 * 24); + return remaining > minDaysRemaining; + } catch { + return false; + } +} + +function isServerCertValid( + serverCert: string, + serverKey: string, + caCert: string, + requiredIps: string[], + minDaysRemaining: number, +): boolean { + try { + const caX509 = new crypto.X509Certificate(caCert); + const serverX509 = new crypto.X509Certificate(serverCert); + const notAfter = new Date(serverX509.validTo); + const remaining = (notAfter.getTime() - Date.now()) / (1000 * 60 * 60 * 24); + if (remaining <= minDaysRemaining) return false; + if (serverX509.subject !== 'CN=ppg-server' || serverX509.issuer !== caX509.subject) { + return false; + } + if (serverX509.ca) return false; + if (!serverX509.verify(caX509.publicKey)) return false; + if (!serverX509.checkPrivateKey(crypto.createPrivateKey(serverKey))) return false; + + const certIps = new Set(parseIpSans(serverX509.subjectAltName)); + + return requiredIps.every((ip) => certIps.has(ip)); + } catch { + return false; + } +} + +function writePemFile(filePath: string, content: string): void { + fs.writeFileSync(filePath, content, { mode: 0o600 }); +} + +function parseIpSans(subjectAltName: string | undefined): string[] { + const sanStr = subjectAltName ?? ''; + return [...sanStr.matchAll(/IP Address:(\d+\.\d+\.\d+\.\d+)/g)].map((m) => m[1]); +} + +// --------------------------------------------------------------------------- +// Main entry point +// --------------------------------------------------------------------------- + +export function ensureTls(projectRoot: string): TlsBundle { + const dir = tlsDir(projectRoot); + fs.mkdirSync(dir, { recursive: true }); + + const lanIps = getLanIps(); + const existing = loadTlsBundle(projectRoot); + + if (existing) { + // Check if everything is still valid + const caOk = isCaValid(existing.caCert, existing.caKey, 30); + const serverOk = isServerCertValid( + existing.serverCert, + existing.serverKey, + existing.caCert, + lanIps, + 7, + ); + + if (caOk && serverOk) { + return existing; + } + + // CA still valid — only regenerate server cert + if (caOk) { + const server = generateServerCert(existing.caKey, lanIps); + writePemFile(tlsServerKeyPath(projectRoot), server.key); + writePemFile(tlsServerCertPath(projectRoot), server.cert); + + const x509 = new crypto.X509Certificate(existing.caCert); + return { + caCert: existing.caCert, + caKey: existing.caKey, + serverCert: server.cert, + serverKey: server.key, + caFingerprint: x509.fingerprint256, + sans: lanIps, + }; + } + } + + // Generate everything fresh + const ca = generateCaCert(); + const server = generateServerCert(ca.key, lanIps); + + writePemFile(tlsCaKeyPath(projectRoot), ca.key); + writePemFile(tlsCaCertPath(projectRoot), ca.cert); + writePemFile(tlsServerKeyPath(projectRoot), server.key); + writePemFile(tlsServerCertPath(projectRoot), server.cert); + + const x509 = new crypto.X509Certificate(ca.cert); + + return { + caCert: ca.cert, + caKey: ca.key, + serverCert: server.cert, + serverKey: server.key, + caFingerprint: x509.fingerprint256, + sans: lanIps, + }; +} diff --git a/src/server/ws/events.ts b/src/server/ws/events.ts new file mode 100644 index 0000000..82878a6 --- /dev/null +++ b/src/server/ws/events.ts @@ -0,0 +1,110 @@ +import type { AgentStatus, Manifest, WorktreeStatus } from '../../types/manifest.js'; + +// --- Inbound Commands (client → server) --- + +export interface PingCommand { + type: 'ping'; +} + +export interface TerminalSubscribeCommand { + type: 'terminal:subscribe'; + agentId: string; +} + +export interface TerminalUnsubscribeCommand { + type: 'terminal:unsubscribe'; + agentId: string; +} + +export interface TerminalInputCommand { + type: 'terminal:input'; + agentId: string; + data: string; +} + +export type ClientCommand = + | PingCommand + | TerminalSubscribeCommand + | TerminalUnsubscribeCommand + | TerminalInputCommand; + +// --- Outbound Events (server → client) --- + +export interface PongEvent { + type: 'pong'; +} + +export interface ManifestUpdatedEvent { + type: 'manifest:updated'; + manifest: Manifest; +} + +export interface AgentStatusEvent { + type: 'agent:status'; + worktreeId: string; + agentId: string; + status: AgentStatus; + worktreeStatus: WorktreeStatus; +} + +export interface TerminalOutputEvent { + type: 'terminal:output'; + agentId: string; + data: string; +} + +export interface ErrorEvent { + type: 'error'; + code: string; + message: string; +} + +export type ServerEvent = + | PongEvent + | ManifestUpdatedEvent + | AgentStatusEvent + | TerminalOutputEvent + | ErrorEvent; + +// --- Parsing --- + +const VALID_COMMAND_TYPES = new Set([ + 'ping', + 'terminal:subscribe', + 'terminal:unsubscribe', + 'terminal:input', +]); + +export function parseCommand(raw: string): ClientCommand | null { + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + return null; + } + + if (typeof parsed !== 'object' || parsed === null) return null; + + const obj = parsed as Record; + if (typeof obj.type !== 'string' || !VALID_COMMAND_TYPES.has(obj.type)) return null; + + if (obj.type === 'ping') { + return { type: 'ping' }; + } + + if (obj.type === 'terminal:subscribe' || obj.type === 'terminal:unsubscribe') { + if (typeof obj.agentId !== 'string') return null; + return { type: obj.type, agentId: obj.agentId }; + } + + if (obj.type === 'terminal:input') { + if (typeof obj.agentId !== 'string' || typeof obj.data !== 'string') return null; + return { type: 'terminal:input', agentId: obj.agentId, data: obj.data }; + } + + return null; +} + +export function serializeEvent(event: ServerEvent): string { + return JSON.stringify(event); +} diff --git a/src/server/ws/handler.test.ts b/src/server/ws/handler.test.ts new file mode 100644 index 0000000..532b81f --- /dev/null +++ b/src/server/ws/handler.test.ts @@ -0,0 +1,463 @@ +import { describe, test, expect, afterEach } from 'vitest'; +import http from 'node:http'; +import { WebSocket, type RawData } from 'ws'; +import { createWsHandler, type WsHandler } from './handler.js'; +import { parseCommand, serializeEvent, type ServerEvent } from './events.js'; + +// --- Helpers --- + +function createTestServer(): http.Server { + return http.createServer((_req, res) => { + res.writeHead(404); + res.end(); + }); +} + +function listen(server: http.Server): Promise { + return new Promise((resolve) => { + server.listen(0, '127.0.0.1', () => { + const addr = server.address(); + if (typeof addr === 'object' && addr !== null) { + resolve(addr.port); + } + }); + }); +} + +function closeServer(server: http.Server): Promise { + return new Promise((resolve) => { + server.close(() => resolve()); + }); +} + +function connectWs(port: number, token: string): Promise { + return new Promise((resolve, reject) => { + const ws = new WebSocket(`ws://127.0.0.1:${port}/ws?token=${token}`); + ws.on('open', () => resolve(ws)); + ws.on('error', reject); + }); +} + +function waitForMessage(ws: WebSocket): Promise { + return new Promise((resolve) => { + ws.once('message', (data: RawData) => { + const str = (() => { + if (typeof data === 'string') return data; + if (Buffer.isBuffer(data)) return data.toString('utf-8'); + if (data instanceof ArrayBuffer) return Buffer.from(data).toString('utf-8'); + if (Array.isArray(data)) return Buffer.concat(data).toString('utf-8'); + return ''; + })(); + resolve(JSON.parse(str) as ServerEvent); + }); + }); +} + +/** Wait for a ws client to close or error (rejected upgrades emit error then close) */ +function waitForDisconnect(ws: WebSocket): Promise { + return new Promise((resolve) => { + if (ws.readyState === WebSocket.CLOSED) { + resolve(); + return; + } + ws.on('close', () => resolve()); + ws.on('error', () => { + if (ws.readyState === WebSocket.CLOSED) resolve(); + }); + }); +} + +function send(ws: WebSocket, obj: Record): void { + ws.send(JSON.stringify(obj)); +} + +/** Send a ping and wait for pong — acts as a deterministic sync barrier. */ +async function roundTrip(ws: WebSocket): Promise { + const msg = waitForMessage(ws); + send(ws, { type: 'ping' }); + await msg; +} + +// --- Tests --- + +describe('WebSocket handler', () => { + let server: http.Server; + let handler: WsHandler; + const openSockets: WebSocket[] = []; + + async function setup( + opts: { + validateToken?: (token: string) => boolean | Promise; + onTerminalInput?: (agentId: string, data: string) => void | Promise; + } = {}, + ): Promise { + server = createTestServer(); + const port = await listen(server); + handler = createWsHandler({ + server, + validateToken: opts.validateToken ?? ((t) => t === 'valid-token'), + onTerminalInput: opts.onTerminalInput, + }); + return port; + } + + async function connect(port: number, token = 'valid-token'): Promise { + const ws = await connectWs(port, token); + openSockets.push(ws); + return ws; + } + + afterEach(async () => { + for (const ws of openSockets) { + if (ws.readyState === WebSocket.OPEN || ws.readyState === WebSocket.CONNECTING) { + ws.close(); + } + } + openSockets.length = 0; + + if (handler) { + await handler.close().catch(() => {}); + } + if (server?.listening) { + await closeServer(server); + } + }); + + describe('connection and auth', () => { + test('accepts connection with valid token', async () => { + const port = await setup(); + const ws = await connect(port); + expect(ws.readyState).toBe(WebSocket.OPEN); + expect(handler.clients.size).toBe(1); + }); + + test('rejects connection with invalid token', async () => { + const port = await setup(); + const ws = new WebSocket(`ws://127.0.0.1:${port}/ws?token=bad-token`); + openSockets.push(ws); + + await waitForDisconnect(ws); + expect(handler.clients.size).toBe(0); + }); + + test('rejects connection with no token', async () => { + const port = await setup(); + const ws = new WebSocket(`ws://127.0.0.1:${port}/ws`); + openSockets.push(ws); + + await waitForDisconnect(ws); + expect(handler.clients.size).toBe(0); + }); + + test('rejects connection on wrong path', async () => { + const port = await setup(); + const ws = new WebSocket(`ws://127.0.0.1:${port}/other?token=valid-token`); + openSockets.push(ws); + + await waitForDisconnect(ws); + expect(handler.clients.size).toBe(0); + }); + + test('supports async token validation', async () => { + const port = await setup({ + validateToken: async (t) => t === 'async-token', + }); + const ws = await connect(port, 'async-token'); + expect(ws.readyState).toBe(WebSocket.OPEN); + }); + }); + + describe('command dispatch', () => { + test('responds to ping with pong', async () => { + const port = await setup(); + const ws = await connect(port); + + const msgPromise = waitForMessage(ws); + send(ws, { type: 'ping' }); + + const event = await msgPromise; + expect(event).toEqual({ type: 'pong' }); + }); + + test('sends error for invalid JSON', async () => { + const port = await setup(); + const ws = await connect(port); + + const msgPromise = waitForMessage(ws); + ws.send('not json'); + + const event = await msgPromise; + expect(event.type).toBe('error'); + expect((event as { code: string }).code).toBe('INVALID_COMMAND'); + }); + + test('sends error for unknown command type', async () => { + const port = await setup(); + const ws = await connect(port); + + const msgPromise = waitForMessage(ws); + send(ws, { type: 'unknown' }); + + const event = await msgPromise; + expect(event.type).toBe('error'); + expect((event as { code: string }).code).toBe('INVALID_COMMAND'); + }); + + test('handles terminal:subscribe', async () => { + const port = await setup(); + const ws = await connect(port); + + send(ws, { type: 'terminal:subscribe', agentId: 'ag-12345678' }); + await roundTrip(ws); + + const [client] = handler.clients; + expect(client.subscribedAgents.has('ag-12345678')).toBe(true); + }); + + test('handles terminal:unsubscribe', async () => { + const port = await setup(); + const ws = await connect(port); + + send(ws, { type: 'terminal:subscribe', agentId: 'ag-12345678' }); + await roundTrip(ws); + + send(ws, { type: 'terminal:unsubscribe', agentId: 'ag-12345678' }); + await roundTrip(ws); + + const [client] = handler.clients; + expect(client.subscribedAgents.has('ag-12345678')).toBe(false); + }); + + test('handles terminal:input and calls onTerminalInput', async () => { + let capturedAgentId = ''; + let capturedData = ''; + + const port = await setup({ + onTerminalInput: (agentId, data) => { + capturedAgentId = agentId; + capturedData = data; + }, + }); + const ws = await connect(port); + + send(ws, { type: 'terminal:input', agentId: 'ag-12345678', data: 'hello\n' }); + await roundTrip(ws); + + expect(capturedAgentId).toBe('ag-12345678'); + expect(capturedData).toBe('hello\n'); + }); + + test('terminal:input is a no-op when onTerminalInput is not provided', async () => { + const port = await setup(); // no onTerminalInput + const ws = await connect(port); + + send(ws, { type: 'terminal:input', agentId: 'ag-12345678', data: 'hello\n' }); + // Should not throw or send error — verify via round-trip + const msg = waitForMessage(ws); + send(ws, { type: 'ping' }); + const event = await msg; + expect(event).toEqual({ type: 'pong' }); + }); + + test('terminal:input sends error when onTerminalInput throws', async () => { + const port = await setup({ + onTerminalInput: () => { + throw new Error('tmux exploded'); + }, + }); + const ws = await connect(port); + + const msgPromise = waitForMessage(ws); + send(ws, { type: 'terminal:input', agentId: 'ag-12345678', data: 'hello\n' }); + + const event = await msgPromise; + expect(event.type).toBe('error'); + expect((event as { code: string }).code).toBe('TERMINAL_INPUT_FAILED'); + }); + + test('terminal:input sends error when async onTerminalInput rejects', async () => { + const port = await setup({ + onTerminalInput: async () => { + throw new Error('async tmux exploded'); + }, + }); + const ws = await connect(port); + + const msgPromise = waitForMessage(ws); + send(ws, { type: 'terminal:input', agentId: 'ag-12345678', data: 'hello\n' }); + + const event = await msgPromise; + expect(event.type).toBe('error'); + expect((event as { code: string }).code).toBe('TERMINAL_INPUT_FAILED'); + }); + }); + + describe('broadcast and sendEvent', () => { + test('broadcast sends to all connected clients', async () => { + const port = await setup(); + const ws1 = await connect(port); + const ws2 = await connect(port); + + expect(handler.clients.size).toBe(2); + + const msg1 = waitForMessage(ws1); + const msg2 = waitForMessage(ws2); + + handler.broadcast({ + type: 'manifest:updated', + manifest: { + version: 1, + projectRoot: '/tmp', + sessionName: 'test', + worktrees: {}, + createdAt: '2025-01-01T00:00:00Z', + updatedAt: '2025-01-01T00:00:00Z', + }, + }); + + const [event1, event2] = await Promise.all([msg1, msg2]); + expect(event1.type).toBe('manifest:updated'); + expect(event2.type).toBe('manifest:updated'); + }); + + test('sendEvent sends to specific client only', async () => { + const port = await setup(); + const ws1 = await connect(port); + const ws2 = await connect(port); + + const [client1] = handler.clients; + handler.sendEvent(client1, { type: 'pong' }); + + // ws1 should receive the pong + const event = await waitForMessage(ws1); + expect(event).toEqual({ type: 'pong' }); + + // ws2 should have no pending messages — verify by sending a ping + // and confirming the next message is the pong, not the earlier event + const msg2 = waitForMessage(ws2); + send(ws2, { type: 'ping' }); + const event2 = await msg2; + expect(event2).toEqual({ type: 'pong' }); + }); + + test('sendEvent skips client with closed socket', async () => { + const port = await setup(); + const ws = await connect(port); + + const [client] = handler.clients; + ws.close(); + await waitForDisconnect(ws); + + // Should not throw when sending to a closed client + handler.sendEvent(client, { type: 'pong' }); + }); + }); + + describe('cleanup', () => { + test('removes client on disconnect', async () => { + const port = await setup(); + const ws = await connect(port); + + expect(handler.clients.size).toBe(1); + + ws.close(); + await waitForDisconnect(ws); + // Use a round-trip on a second connection as a sync barrier + const ws2 = await connect(port); + await roundTrip(ws2); + + expect(handler.clients.size).toBe(1); // only ws2 remains + }); + + test('close() terminates all clients', async () => { + const port = await setup(); + const ws1 = await connect(port); + const ws2 = await connect(port); + + const close1 = waitForDisconnect(ws1); + const close2 = waitForDisconnect(ws2); + + await handler.close(); + await Promise.all([close1, close2]); + + expect(handler.clients.size).toBe(0); + }); + + test('close() removes upgrade listener from server', async () => { + const port = await setup(); + await handler.close(); + + // After close, a new WS connection attempt should not be handled + const ws = new WebSocket(`ws://127.0.0.1:${port}/ws?token=valid-token`); + openSockets.push(ws); + + await waitForDisconnect(ws); + expect(handler.clients.size).toBe(0); + }); + }); +}); + +describe('parseCommand', () => { + test('parses ping command', () => { + expect(parseCommand('{"type":"ping"}')).toEqual({ type: 'ping' }); + }); + + test('parses terminal:subscribe', () => { + expect(parseCommand('{"type":"terminal:subscribe","agentId":"ag-123"}')).toEqual({ + type: 'terminal:subscribe', + agentId: 'ag-123', + }); + }); + + test('parses terminal:unsubscribe', () => { + expect(parseCommand('{"type":"terminal:unsubscribe","agentId":"ag-123"}')).toEqual({ + type: 'terminal:unsubscribe', + agentId: 'ag-123', + }); + }); + + test('parses terminal:input', () => { + expect(parseCommand('{"type":"terminal:input","agentId":"ag-123","data":"ls\\n"}')).toEqual({ + type: 'terminal:input', + agentId: 'ag-123', + data: 'ls\n', + }); + }); + + test('returns null for invalid JSON', () => { + expect(parseCommand('not json')).toBeNull(); + }); + + test('returns null for unknown type', () => { + expect(parseCommand('{"type":"unknown"}')).toBeNull(); + }); + + test('returns null for missing required fields', () => { + expect(parseCommand('{"type":"terminal:subscribe"}')).toBeNull(); + expect(parseCommand('{"type":"terminal:input","agentId":"ag-123"}')).toBeNull(); + }); + + test('returns null for non-object', () => { + expect(parseCommand('"string"')).toBeNull(); + expect(parseCommand('42')).toBeNull(); + expect(parseCommand('null')).toBeNull(); + }); +}); + +describe('serializeEvent', () => { + test('serializes pong event', () => { + expect(serializeEvent({ type: 'pong' })).toBe('{"type":"pong"}'); + }); + + test('serializes error event', () => { + const event: ServerEvent = { type: 'error', code: 'TEST', message: 'msg' }; + const parsed = JSON.parse(serializeEvent(event)); + expect(parsed).toEqual({ type: 'error', code: 'TEST', message: 'msg' }); + }); + + test('serializes terminal:output event', () => { + const event: ServerEvent = { type: 'terminal:output', agentId: 'ag-1', data: 'hello' }; + const parsed = JSON.parse(serializeEvent(event)); + expect(parsed).toEqual({ type: 'terminal:output', agentId: 'ag-1', data: 'hello' }); + }); +}); diff --git a/src/server/ws/handler.ts b/src/server/ws/handler.ts new file mode 100644 index 0000000..757d690 --- /dev/null +++ b/src/server/ws/handler.ts @@ -0,0 +1,214 @@ +import { URL } from 'node:url'; +import type { Server as HttpServer, IncomingMessage } from 'node:http'; +import { WebSocketServer, WebSocket } from 'ws'; +import type { RawData } from 'ws'; +import type { Duplex } from 'node:stream'; +import { + parseCommand, + serializeEvent, + type ClientCommand, + type ServerEvent, +} from './events.js'; + +// --- Client State --- + +export interface ClientState { + ws: WebSocket; + subscribedAgents: Set; +} + +// --- Handler Options --- + +export interface WsHandlerOptions { + server: HttpServer; + validateToken: (token: string) => boolean | Promise; + onTerminalInput?: (agentId: string, data: string) => void | Promise; +} + +// --- WebSocket Handler --- + +export interface WsHandler { + wss: WebSocketServer; + clients: Set; + broadcast: (event: ServerEvent) => void; + sendEvent: (client: ClientState, event: ServerEvent) => void; + close: () => Promise; +} + +const MAX_PAYLOAD = 65_536; // 64 KB + +export function createWsHandler(options: WsHandlerOptions): WsHandler { + const { server, validateToken, onTerminalInput } = options; + + const wss = new WebSocketServer({ noServer: true, maxPayload: MAX_PAYLOAD }); + const clients = new Set(); + + function sendData(ws: WebSocket, data: string): boolean { + if (ws.readyState !== WebSocket.OPEN) return false; + try { + ws.send(data); + return true; + } catch { + return false; + } + } + + function decodeRawData(raw: RawData): string { + if (typeof raw === 'string') return raw; + if (Buffer.isBuffer(raw)) return raw.toString('utf-8'); + if (raw instanceof ArrayBuffer) return Buffer.from(raw).toString('utf-8'); + if (Array.isArray(raw)) return Buffer.concat(raw).toString('utf-8'); + return ''; + } + + function rejectUpgrade(socket: Duplex, statusLine: string): void { + if (socket.destroyed) return; + try { + socket.write(`${statusLine}\r\nConnection: close\r\n\r\n`); + } catch { + // ignore write errors on broken sockets + } finally { + socket.destroy(); + } + } + + function sendEvent(client: ClientState, event: ServerEvent): void { + if (!sendData(client.ws, serializeEvent(event))) { + clients.delete(client); + } + } + + function broadcast(event: ServerEvent): void { + const data = serializeEvent(event); + for (const client of clients) { + if (!sendData(client.ws, data)) { + clients.delete(client); + } + } + } + + function handleCommand(client: ClientState, command: ClientCommand): void { + switch (command.type) { + case 'ping': + sendEvent(client, { type: 'pong' }); + break; + + case 'terminal:subscribe': + client.subscribedAgents.add(command.agentId); + break; + + case 'terminal:unsubscribe': + client.subscribedAgents.delete(command.agentId); + break; + + case 'terminal:input': + if (onTerminalInput) { + try { + Promise.resolve(onTerminalInput(command.agentId, command.data)).catch(() => { + sendEvent(client, { + type: 'error', + code: 'TERMINAL_INPUT_FAILED', + message: `Failed to send input to agent ${command.agentId}`, + }); + }); + } catch { + sendEvent(client, { + type: 'error', + code: 'TERMINAL_INPUT_FAILED', + message: `Failed to send input to agent ${command.agentId}`, + }); + } + } + break; + } + } + + function onUpgrade(request: IncomingMessage, socket: Duplex, head: Buffer): void { + let url: URL; + try { + // The path/query in request.url is all we need; avoid trusting Host header. + url = new URL(request.url ?? '/', 'http://localhost'); + } catch { + rejectUpgrade(socket, 'HTTP/1.1 400 Bad Request'); + return; + } + + if (url.pathname !== '/ws') { + rejectUpgrade(socket, 'HTTP/1.1 404 Not Found'); + return; + } + + const token = url.searchParams.get('token'); + if (!token) { + rejectUpgrade(socket, 'HTTP/1.1 401 Unauthorized'); + return; + } + + Promise.resolve(validateToken(token)) + .then((valid) => { + if (socket.destroyed) return; + if (!valid) { + rejectUpgrade(socket, 'HTTP/1.1 401 Unauthorized'); + return; + } + + try { + wss.handleUpgrade(request, socket, head, (ws) => { + wss.emit('connection', ws, request); + }); + } catch { + rejectUpgrade(socket, 'HTTP/1.1 500 Internal Server Error'); + } + }) + .catch(() => { + rejectUpgrade(socket, 'HTTP/1.1 500 Internal Server Error'); + }); + } + + server.on('upgrade', onUpgrade); + + wss.on('connection', (ws: WebSocket) => { + const client: ClientState = { + ws, + subscribedAgents: new Set(), + }; + clients.add(client); + + ws.on('message', (raw: RawData) => { + const data = decodeRawData(raw); + const command = parseCommand(data); + + if (!command) { + sendEvent(client, { + type: 'error', + code: 'INVALID_COMMAND', + message: 'Could not parse command', + }); + return; + } + + handleCommand(client, command); + }); + + ws.on('close', () => { + clients.delete(client); + }); + + ws.on('error', () => { + clients.delete(client); + }); + }); + + async function close(): Promise { + server.removeListener('upgrade', onUpgrade); + for (const client of clients) { + client.ws.close(1001, 'Server shutting down'); + } + await new Promise((resolve, reject) => { + wss.close((err) => (err ? reject(err) : resolve())); + }); + clients.clear(); + } + + return { wss, clients, broadcast, sendEvent, close }; +} diff --git a/src/server/ws/terminal.test.ts b/src/server/ws/terminal.test.ts new file mode 100644 index 0000000..125e022 --- /dev/null +++ b/src/server/ws/terminal.test.ts @@ -0,0 +1,347 @@ +import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest'; +import { diffLines, TerminalStreamer } from './terminal.js'; +import type { TerminalData, TerminalError } from './terminal.js'; + +// --------------------------------------------------------------------------- +// diffLines — longest common suffix algorithm +// --------------------------------------------------------------------------- + +describe('diffLines', () => { + test('given empty prev, should return all of curr', () => { + const result = diffLines([], ['line1', 'line2']); + expect(result).toEqual(['line1', 'line2']); + }); + + test('given empty curr, should return empty', () => { + const result = diffLines(['line1', 'line2'], []); + expect(result).toEqual([]); + }); + + test('given identical buffers, should return empty', () => { + const lines = ['a', 'b', 'c']; + const result = diffLines(lines, [...lines]); + expect(result).toEqual([]); + }); + + test('given appended lines, should return only new lines', () => { + const prev = ['line1', 'line2']; + const curr = ['line1', 'line2', 'line3', 'line4']; + const result = diffLines(prev, curr); + expect(result).toEqual(['line3', 'line4']); + }); + + test('given scrolled buffer with new lines, should return new lines', () => { + // Terminal scrolled: line1 is gone, lines 2-3 remain, line4 is new + const prev = ['line1', 'line2', 'line3']; + const curr = ['line2', 'line3', 'line4']; + const result = diffLines(prev, curr); + expect(result).toEqual(['line4']); + }); + + test('given completely different content, should return all of curr', () => { + const prev = ['aaa', 'bbb']; + const curr = ['xxx', 'yyy']; + const result = diffLines(prev, curr); + expect(result).toEqual(['xxx', 'yyy']); + }); + + test('given partial overlap in scrolled buffer, should detect suffix match', () => { + const prev = ['a', 'b', 'c', 'd']; + const curr = ['c', 'd', 'e', 'f']; + const result = diffLines(prev, curr); + expect(result).toEqual(['e', 'f']); + }); + + test('given single line overlap, should return new lines after overlap', () => { + const prev = ['x', 'y', 'z']; + const curr = ['z', 'new1', 'new2']; + const result = diffLines(prev, curr); + expect(result).toEqual(['new1', 'new2']); + }); + + test('given prev longer than curr with overlap, should return new lines', () => { + const prev = ['a', 'b', 'c', 'd', 'e']; + const curr = ['d', 'e', 'f']; + const result = diffLines(prev, curr); + expect(result).toEqual(['f']); + }); + + test('given trailing empty lines from tmux, should handle correctly', () => { + // capturePane often returns "line1\nline2\n" → split gives trailing '' + const prev = ['line1', 'line2', '']; + const curr = ['line1', 'line2', '', 'line3', '']; + const result = diffLines(prev, curr); + expect(result).toEqual(['line3', '']); + }); +}); + +// --------------------------------------------------------------------------- +// TerminalStreamer +// --------------------------------------------------------------------------- + +describe('TerminalStreamer', () => { + let streamer: TerminalStreamer; + let mockCapture: ReturnType; + + beforeEach(() => { + vi.useFakeTimers(); + mockCapture = vi.fn<(target: string, lines?: number) => Promise>(); + streamer = new TerminalStreamer({ + pollIntervalMs: 500, + capture: mockCapture, + }); + }); + + afterEach(() => { + streamer.destroy(); + vi.useRealTimers(); + }); + + // -- Subscription lifecycle ----------------------------------------------- + + describe('subscription lifecycle', () => { + test('given first subscriber, should start polling', () => { + mockCapture.mockResolvedValue('hello'); + const send = vi.fn(); + + streamer.subscribe('ag-001', 'ppg:1.0', send); + + expect(streamer.subscriberCount('ag-001')).toBe(1); + expect(streamer.isPolling('ag-001')).toBe(true); + }); + + test('given second subscriber, should share timer', () => { + mockCapture.mockResolvedValue('hello'); + const send1 = vi.fn(); + const send2 = vi.fn(); + + streamer.subscribe('ag-001', 'ppg:1.0', send1); + streamer.subscribe('ag-001', 'ppg:1.0', send2); + + expect(streamer.subscriberCount('ag-001')).toBe(2); + expect(streamer.isPolling('ag-001')).toBe(true); + }); + + test('given unsubscribe of one, should keep timer for remaining', () => { + mockCapture.mockResolvedValue('hello'); + const send1 = vi.fn(); + const send2 = vi.fn(); + + const unsub1 = streamer.subscribe('ag-001', 'ppg:1.0', send1); + streamer.subscribe('ag-001', 'ppg:1.0', send2); + + unsub1(); + + expect(streamer.subscriberCount('ag-001')).toBe(1); + expect(streamer.isPolling('ag-001')).toBe(true); + }); + + test('given all unsubscribed, should stop polling and cleanup', () => { + mockCapture.mockResolvedValue('hello'); + const send = vi.fn(); + + const unsub = streamer.subscribe('ag-001', 'ppg:1.0', send); + unsub(); + + expect(streamer.subscriberCount('ag-001')).toBe(0); + expect(streamer.isPolling('ag-001')).toBe(false); + }); + + test('given double unsubscribe, should be idempotent', () => { + mockCapture.mockResolvedValue('hello'); + const send = vi.fn(); + + const unsub = streamer.subscribe('ag-001', 'ppg:1.0', send); + unsub(); + unsub(); // second call should not throw + + expect(streamer.subscriberCount('ag-001')).toBe(0); + expect(streamer.isPolling('ag-001')).toBe(false); + }); + + test('given multiple agents, should track independently', () => { + mockCapture.mockResolvedValue('hello'); + const send1 = vi.fn(); + const send2 = vi.fn(); + + streamer.subscribe('ag-001', 'ppg:1.0', send1); + streamer.subscribe('ag-002', 'ppg:1.1', send2); + + expect(streamer.subscriberCount('ag-001')).toBe(1); + expect(streamer.subscriberCount('ag-002')).toBe(1); + expect(streamer.isPolling('ag-001')).toBe(true); + expect(streamer.isPolling('ag-002')).toBe(true); + }); + }); + + // -- Polling & diff ------------------------------------------------------- + + describe('polling and diff', () => { + test('given initial content, should send all lines on first poll', async () => { + mockCapture.mockResolvedValue('line1\nline2\nline3'); + const send = vi.fn(); + + streamer.subscribe('ag-001', 'ppg:1.0', send); + + await vi.advanceTimersByTimeAsync(500); + + expect(mockCapture).toHaveBeenCalledWith('ppg:1.0'); + expect(send).toHaveBeenCalledTimes(1); + + const msg: TerminalData = JSON.parse(send.mock.calls[0][0]); + expect(msg.type).toBe('terminal'); + expect(msg.agentId).toBe('ag-001'); + expect(msg.lines).toEqual(['line1', 'line2', 'line3']); + }); + + test('given unchanged content, should not send', async () => { + mockCapture.mockResolvedValue('line1\nline2'); + const send = vi.fn(); + + streamer.subscribe('ag-001', 'ppg:1.0', send); + + await vi.advanceTimersByTimeAsync(500); + expect(send).toHaveBeenCalledTimes(1); + + // Same content on next poll + await vi.advanceTimersByTimeAsync(500); + expect(send).toHaveBeenCalledTimes(1); // No new call + }); + + test('given new lines appended, should send only diff', async () => { + mockCapture.mockResolvedValueOnce('line1\nline2'); + const send = vi.fn(); + + streamer.subscribe('ag-001', 'ppg:1.0', send); + await vi.advanceTimersByTimeAsync(500); + + // New lines appended + mockCapture.mockResolvedValueOnce('line1\nline2\nline3\nline4'); + await vi.advanceTimersByTimeAsync(500); + + expect(send).toHaveBeenCalledTimes(2); + const msg: TerminalData = JSON.parse(send.mock.calls[1][0]); + expect(msg.lines).toEqual(['line3', 'line4']); + }); + + test('given content broadcast to multiple subscribers, should send to all', async () => { + mockCapture.mockResolvedValue('hello'); + const send1 = vi.fn(); + const send2 = vi.fn(); + + streamer.subscribe('ag-001', 'ppg:1.0', send1); + streamer.subscribe('ag-001', 'ppg:1.0', send2); + + await vi.advanceTimersByTimeAsync(500); + + expect(send1).toHaveBeenCalledTimes(1); + expect(send2).toHaveBeenCalledTimes(1); + expect(send1.mock.calls[0][0]).toBe(send2.mock.calls[0][0]); + }); + + test('given 500ms interval, should not poll before interval', async () => { + mockCapture.mockResolvedValue('hello'); + const send = vi.fn(); + + streamer.subscribe('ag-001', 'ppg:1.0', send); + + await vi.advanceTimersByTimeAsync(200); + expect(mockCapture).not.toHaveBeenCalled(); + + await vi.advanceTimersByTimeAsync(300); + expect(mockCapture).toHaveBeenCalledTimes(1); + }); + }); + + // -- Error handling ------------------------------------------------------- + + describe('error handling', () => { + test('given pane capture fails, should send error and cleanup', async () => { + const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + mockCapture.mockRejectedValue(new Error('pane not found')); + const send = vi.fn(); + + streamer.subscribe('ag-001', 'ppg:1.0', send); + + await vi.advanceTimersByTimeAsync(500); + + expect(send).toHaveBeenCalledTimes(1); + const msg: TerminalError = JSON.parse(send.mock.calls[0][0]); + expect(msg.type).toBe('terminal:error'); + expect(msg.agentId).toBe('ag-001'); + expect(msg.error).toBe('Pane no longer available'); + + // Original error should be logged + expect(consoleSpy).toHaveBeenCalledWith( + expect.stringContaining('pane not found'), + ); + + // Stream should be cleaned up + expect(streamer.subscriberCount('ag-001')).toBe(0); + expect(streamer.isPolling('ag-001')).toBe(false); + consoleSpy.mockRestore(); + }); + + test('given dead subscriber send throws, should remove subscriber', async () => { + mockCapture.mockResolvedValue('line1'); + const goodSend = vi.fn(); + const badSend = vi.fn().mockImplementation(() => { + throw new Error('connection closed'); + }); + + streamer.subscribe('ag-001', 'ppg:1.0', badSend); + streamer.subscribe('ag-001', 'ppg:1.0', goodSend); + + await vi.advanceTimersByTimeAsync(500); + + // Good subscriber got the message + expect(goodSend).toHaveBeenCalledTimes(1); + // Bad subscriber was removed + expect(streamer.subscriberCount('ag-001')).toBe(1); + }); + }); + + // -- Shared timer --------------------------------------------------------- + + describe('shared timer', () => { + test('given shared timer, should only call capture once per interval', async () => { + mockCapture.mockResolvedValue('data'); + const send1 = vi.fn(); + const send2 = vi.fn(); + const send3 = vi.fn(); + + streamer.subscribe('ag-001', 'ppg:1.0', send1); + streamer.subscribe('ag-001', 'ppg:1.0', send2); + streamer.subscribe('ag-001', 'ppg:1.0', send3); + + await vi.advanceTimersByTimeAsync(500); + + // Only one capture call despite three subscribers + expect(mockCapture).toHaveBeenCalledTimes(1); + }); + }); + + // -- destroy -------------------------------------------------------------- + + describe('destroy', () => { + test('given active streams, should clean up everything', async () => { + mockCapture.mockResolvedValue('data'); + const send1 = vi.fn(); + const send2 = vi.fn(); + + streamer.subscribe('ag-001', 'ppg:1.0', send1); + streamer.subscribe('ag-002', 'ppg:1.1', send2); + + streamer.destroy(); + + expect(streamer.subscriberCount('ag-001')).toBe(0); + expect(streamer.subscriberCount('ag-002')).toBe(0); + expect(streamer.isPolling('ag-001')).toBe(false); + expect(streamer.isPolling('ag-002')).toBe(false); + + // No more polling after destroy + await vi.advanceTimersByTimeAsync(1000); + expect(mockCapture).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/src/server/ws/terminal.ts b/src/server/ws/terminal.ts new file mode 100644 index 0000000..1d9defd --- /dev/null +++ b/src/server/ws/terminal.ts @@ -0,0 +1,240 @@ +import { capturePane } from '../../core/tmux.js'; + +// --------------------------------------------------------------------------- +// Types +// --------------------------------------------------------------------------- + +/** A function that sends a message to a connected client. */ +export type SendFn = (message: string) => void; + +/** Wire format for terminal data pushed to subscribers. */ +export interface TerminalData { + type: 'terminal'; + agentId: string; + lines: string[]; +} + +/** Wire format for terminal errors pushed to subscribers. */ +export interface TerminalError { + type: 'terminal:error'; + agentId: string; + error: string; +} + +/** Internal state for a single subscriber. */ +interface Subscriber { + id: number; + send: SendFn; +} + +/** Shared polling state for all subscribers watching the same agent. */ +interface AgentStream { + tmuxTarget: string; + subscribers: Map; + timer: ReturnType | null; + /** Previous captured lines, used by the diff algorithm. */ + lastLines: string[]; +} + +// --------------------------------------------------------------------------- +// Diff algorithm — longest common suffix +// --------------------------------------------------------------------------- + +/** + * Given the previous set of lines and the current set, return only the new + * lines that were appended to the terminal buffer. + * + * Strategy: find the longest suffix of `prev` that is also a prefix of `curr`. + * Everything in `curr` after that shared region is new output. + * + * This handles the common terminal pattern where existing content scrolls up + * and new content appears at the bottom. It degrades gracefully when content + * is rewritten (e.g. TUI redraw) — in that case the full buffer is sent. + */ +export function diffLines(prev: string[], curr: string[]): string[] { + if (prev.length === 0) return curr; + if (curr.length === 0) return []; + + // Find the longest suffix of prev that matches a prefix of curr. + // We search from the longest possible overlap downward. + const maxOverlap = Math.min(prev.length, curr.length); + + for (let overlap = maxOverlap; overlap > 0; overlap--) { + const prevStart = prev.length - overlap; + let match = true; + for (let i = 0; i < overlap; i++) { + if (prev[prevStart + i] !== curr[i]) { + match = false; + break; + } + } + if (match) { + return curr.slice(overlap); + } + } + + // No shared suffix/prefix — full content is "new" + return curr; +} + +// --------------------------------------------------------------------------- +// TerminalStreamer — manages per-agent subscriptions and shared polling +// --------------------------------------------------------------------------- + +const POLL_INTERVAL_MS = 500; + +export class TerminalStreamer { + private streams = new Map(); + private nextSubscriberId = 1; + private readonly pollIntervalMs: number; + /** Injectable capture function — defaults to tmux capturePane. */ + private readonly capture: (target: string, lines?: number) => Promise; + + constructor(options?: { + pollIntervalMs?: number; + capture?: (target: string, lines?: number) => Promise; + }) { + this.pollIntervalMs = options?.pollIntervalMs ?? POLL_INTERVAL_MS; + this.capture = options?.capture ?? capturePane; + } + + /** + * Subscribe a client to terminal output for an agent. + * Returns an unsubscribe function. + */ + subscribe( + agentId: string, + tmuxTarget: string, + send: SendFn, + ): () => void { + const subId = this.nextSubscriberId++; + + let stream = this.streams.get(agentId); + if (!stream) { + stream = { + tmuxTarget, + subscribers: new Map(), + timer: null, + lastLines: [], + }; + this.streams.set(agentId, stream); + } + + stream.subscribers.set(subId, { id: subId, send }); + + // Lazy init: start polling only when the first subscriber arrives + if (stream.timer === null) { + this.scheduleNextPoll(agentId, stream); + } + + // Return unsubscribe function + return () => { + this.unsubscribe(agentId, subId); + }; + } + + /** Number of active subscribers for an agent. */ + subscriberCount(agentId: string): number { + return this.streams.get(agentId)?.subscribers.size ?? 0; + } + + /** Whether a polling timer is active for an agent. */ + isPolling(agentId: string): boolean { + const stream = this.streams.get(agentId); + return stream !== undefined && stream.timer !== null; + } + + /** Tear down all streams and timers. */ + destroy(): void { + for (const stream of this.streams.values()) { + if (stream.timer !== null) { + clearTimeout(stream.timer); + stream.timer = null; + } + stream.subscribers.clear(); + } + this.streams.clear(); + } + + // ----------------------------------------------------------------------- + // Private + // ----------------------------------------------------------------------- + + private unsubscribe(agentId: string, subId: number): void { + const stream = this.streams.get(agentId); + if (!stream) return; + + stream.subscribers.delete(subId); + + // Auto-cleanup: stop polling when no subscribers remain + if (stream.subscribers.size === 0) { + if (stream.timer !== null) { + clearTimeout(stream.timer); + stream.timer = null; + } + this.streams.delete(agentId); + } + } + + private scheduleNextPoll(agentId: string, stream: AgentStream): void { + stream.timer = setTimeout(() => { + void this.poll(agentId, stream); + }, this.pollIntervalMs); + } + + private async poll(agentId: string, stream: AgentStream): Promise { + try { + const raw = await this.capture(stream.tmuxTarget); + const currentLines = raw.split('\n'); + + const newLines = diffLines(stream.lastLines, currentLines); + stream.lastLines = currentLines; + + if (newLines.length > 0) { + const message = JSON.stringify({ + type: 'terminal', + agentId, + lines: newLines, + } satisfies TerminalData); + + for (const sub of stream.subscribers.values()) { + try { + sub.send(message); + } catch { + // Dead client — remove immediately + stream.subscribers.delete(sub.id); + } + } + } + + // Schedule next poll only after this one completes + if (stream.subscribers.size > 0) { + this.scheduleNextPoll(agentId, stream); + } + } catch (err) { + // Pane gone / tmux error — notify subscribers and clean up + const errorMsg = JSON.stringify({ + type: 'terminal:error', + agentId, + error: 'Pane no longer available', + } satisfies TerminalError); + + if (err instanceof Error) { + console.error(`[ppg] terminal poll failed for ${agentId}: ${err.message}`); + } + + for (const sub of stream.subscribers.values()) { + try { + sub.send(errorMsg); + } catch { + // ignore + } + } + + // Stop polling — pane is dead + stream.timer = null; + stream.subscribers.clear(); + this.streams.delete(agentId); + } + } +} diff --git a/src/server/ws/watcher.test.ts b/src/server/ws/watcher.test.ts new file mode 100644 index 0000000..f246ddc --- /dev/null +++ b/src/server/ws/watcher.test.ts @@ -0,0 +1,391 @@ +import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest'; +import { makeAgent, makeManifest, makeWorktree } from '../../test-fixtures.js'; +import type { WsEvent } from './watcher.js'; + +// Mock fs (synchronous watch API) +vi.mock('node:fs', () => ({ + default: { + watch: vi.fn((_path: string, _cb: (...args: unknown[]) => void) => ({ + on: vi.fn(), + close: vi.fn(), + })), + }, +})); + +// Mock core modules +vi.mock('../../core/manifest.js', () => ({ + readManifest: vi.fn(), +})); + +vi.mock('../../core/agent.js', () => ({ + checkAgentStatus: vi.fn(), +})); + +vi.mock('../../core/tmux.js', () => ({ + listSessionPanes: vi.fn(), +})); + +vi.mock('../../lib/paths.js', () => ({ + manifestPath: vi.fn(() => '/tmp/project/.ppg/manifest.json'), + ppgDir: vi.fn(() => '/tmp/project/.ppg'), +})); + +import nodefs from 'node:fs'; +import { readManifest } from '../../core/manifest.js'; +import { checkAgentStatus } from '../../core/agent.js'; +import { listSessionPanes } from '../../core/tmux.js'; +import { startManifestWatcher } from './watcher.js'; + +const mockedReadManifest = vi.mocked(readManifest); +const mockedCheckAgentStatus = vi.mocked(checkAgentStatus); +const mockedListSessionPanes = vi.mocked(listSessionPanes); +const mockedFsWatch = vi.mocked(nodefs.watch); + +const PROJECT_ROOT = '/tmp/project'; + +/** Trigger the most recent fs.watch callback (simulates file change) */ +function triggerFsWatch(): void { + const calls = mockedFsWatch.mock.calls; + if (calls.length > 0) { + const cb = calls[calls.length - 1][1] as () => void; + cb(); + } +} + +beforeEach(() => { + vi.useFakeTimers(); + vi.clearAllMocks(); + mockedListSessionPanes.mockResolvedValue(new Map()); +}); + +afterEach(() => { + vi.useRealTimers(); +}); + +describe('startManifestWatcher', () => { + describe('fs.watch debounce', () => { + test('given file change, should broadcast manifest:updated after debounce', async () => { + const agent = makeAgent({ id: 'ag-aaa11111', status: 'running' }); + const wt = makeWorktree({ id: 'wt-abc123', agents: { [agent.id]: agent } }); + const manifest = makeManifest({ projectRoot: PROJECT_ROOT, worktrees: { [wt.id]: wt } }); + mockedReadManifest.mockResolvedValue(manifest); + mockedCheckAgentStatus.mockResolvedValue({ status: 'running' }); + + const events: WsEvent[] = []; + const watcher = startManifestWatcher(PROJECT_ROOT, (e) => events.push(e), { + debounceMs: 300, + pollIntervalMs: 60_000, // effectively disable polling for this test + }); + + triggerFsWatch(); + + // Before debounce fires — no event yet + expect(events).toHaveLength(0); + + // Advance past debounce + await vi.advanceTimersByTimeAsync(350); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('manifest:updated'); + expect(events[0].payload).toEqual(manifest); + + watcher.stop(); + }); + + test('given rapid file changes, should debounce to single broadcast', async () => { + const manifest = makeManifest({ projectRoot: PROJECT_ROOT }); + mockedReadManifest.mockResolvedValue(manifest); + + const events: WsEvent[] = []; + const watcher = startManifestWatcher(PROJECT_ROOT, (e) => events.push(e), { + debounceMs: 300, + pollIntervalMs: 60_000, + }); + + // Three rapid changes + triggerFsWatch(); + await vi.advanceTimersByTimeAsync(100); + triggerFsWatch(); + await vi.advanceTimersByTimeAsync(100); + triggerFsWatch(); + + // Advance past debounce from last trigger + await vi.advanceTimersByTimeAsync(350); + + expect(events).toHaveLength(1); + expect(events[0].type).toBe('manifest:updated'); + + watcher.stop(); + }); + + test('given manifest read error during file change, should not broadcast', async () => { + mockedReadManifest.mockRejectedValue(new SyntaxError('Unexpected end of JSON')); + + const errors: unknown[] = []; + const events: WsEvent[] = []; + const watcher = startManifestWatcher(PROJECT_ROOT, (e) => events.push(e), { + debounceMs: 300, + pollIntervalMs: 60_000, + onError: (err) => errors.push(err), + }); + + triggerFsWatch(); + await vi.advanceTimersByTimeAsync(350); + + expect(events).toHaveLength(0); + expect(errors).toHaveLength(1); + expect(errors[0]).toBeInstanceOf(SyntaxError); + + watcher.stop(); + }); + }); + + describe('status polling', () => { + test('given agent status change, should broadcast agent:status', async () => { + const agent = makeAgent({ id: 'ag-aaa11111', status: 'running' }); + const wt = makeWorktree({ id: 'wt-abc123', agents: { [agent.id]: agent } }); + const manifest = makeManifest({ projectRoot: PROJECT_ROOT, worktrees: { [wt.id]: wt } }); + mockedReadManifest.mockResolvedValue(manifest); + + // First poll: running, second poll: idle + mockedCheckAgentStatus + .mockResolvedValueOnce({ status: 'running' }) + .mockResolvedValueOnce({ status: 'idle' }); + + const events: WsEvent[] = []; + const watcher = startManifestWatcher(PROJECT_ROOT, (e) => events.push(e), { + debounceMs: 300, + pollIntervalMs: 1000, + }); + + // First poll — establishes baseline, no change event + await vi.advanceTimersByTimeAsync(1000); + expect(events).toHaveLength(0); + + // Second poll — status changed from running → idle + await vi.advanceTimersByTimeAsync(1000); + expect(events).toHaveLength(1); + expect(events[0]).toEqual({ + type: 'agent:status', + payload: { + agentId: 'ag-aaa11111', + worktreeId: 'wt-abc123', + status: 'idle', + previousStatus: 'running', + }, + }); + + watcher.stop(); + }); + + test('given multiple agents across worktrees, should broadcast each change', async () => { + const agent1 = makeAgent({ id: 'ag-aaa11111', status: 'running', tmuxTarget: 'ppg:1.0' }); + const agent2 = makeAgent({ id: 'ag-bbb22222', status: 'running', tmuxTarget: 'ppg:2.0' }); + const wt1 = makeWorktree({ id: 'wt-aaa111', name: 'auth', agents: { [agent1.id]: agent1 } }); + const wt2 = makeWorktree({ id: 'wt-bbb222', name: 'api', agents: { [agent2.id]: agent2 } }); + const manifest = makeManifest({ + projectRoot: PROJECT_ROOT, + worktrees: { [wt1.id]: wt1, [wt2.id]: wt2 }, + }); + mockedReadManifest.mockResolvedValue(manifest); + + // First poll: both running. Second poll: agent1 idle, agent2 gone + mockedCheckAgentStatus + .mockResolvedValueOnce({ status: 'running' }) + .mockResolvedValueOnce({ status: 'running' }) + .mockResolvedValueOnce({ status: 'idle' }) + .mockResolvedValueOnce({ status: 'gone' }); + + const events: WsEvent[] = []; + const watcher = startManifestWatcher(PROJECT_ROOT, (e) => events.push(e), { + debounceMs: 300, + pollIntervalMs: 1000, + }); + + // First poll — baseline + await vi.advanceTimersByTimeAsync(1000); + expect(events).toHaveLength(0); + + // Second poll — both changed + await vi.advanceTimersByTimeAsync(1000); + expect(events).toHaveLength(2); + + const statusEvents = events.filter((e) => e.type === 'agent:status'); + expect(statusEvents).toHaveLength(2); + + const payloads = statusEvents.map((e) => e.payload); + expect(payloads).toContainEqual({ + agentId: 'ag-aaa11111', + worktreeId: 'wt-aaa111', + status: 'idle', + previousStatus: 'running', + }); + expect(payloads).toContainEqual({ + agentId: 'ag-bbb22222', + worktreeId: 'wt-bbb222', + status: 'gone', + previousStatus: 'running', + }); + + watcher.stop(); + }); + + test('given agent removed between polls, should not emit stale event', async () => { + const agent = makeAgent({ id: 'ag-aaa11111', status: 'running' }); + const wt = makeWorktree({ id: 'wt-abc123', agents: { [agent.id]: agent } }); + const manifestWithAgent = makeManifest({ projectRoot: PROJECT_ROOT, worktrees: { [wt.id]: wt } }); + const manifestEmpty = makeManifest({ projectRoot: PROJECT_ROOT, worktrees: {} }); + + mockedCheckAgentStatus.mockResolvedValue({ status: 'running' }); + + // First poll sees agent, second poll agent's worktree is gone + mockedReadManifest + .mockResolvedValueOnce(manifestWithAgent) + .mockResolvedValueOnce(manifestEmpty); + + const events: WsEvent[] = []; + const watcher = startManifestWatcher(PROJECT_ROOT, (e) => events.push(e), { + debounceMs: 300, + pollIntervalMs: 1000, + }); + + // First poll — baseline with agent + await vi.advanceTimersByTimeAsync(1000); + expect(events).toHaveLength(0); + + // Second poll — agent gone from manifest, no stale event emitted + await vi.advanceTimersByTimeAsync(1000); + expect(events).toHaveLength(0); + + watcher.stop(); + }); + + test('given no status change, should not broadcast', async () => { + const agent = makeAgent({ id: 'ag-aaa11111', status: 'running' }); + const wt = makeWorktree({ id: 'wt-abc123', agents: { [agent.id]: agent } }); + const manifest = makeManifest({ projectRoot: PROJECT_ROOT, worktrees: { [wt.id]: wt } }); + mockedReadManifest.mockResolvedValue(manifest); + mockedCheckAgentStatus.mockResolvedValue({ status: 'running' }); + + const events: WsEvent[] = []; + const watcher = startManifestWatcher(PROJECT_ROOT, (e) => events.push(e), { + debounceMs: 300, + pollIntervalMs: 1000, + }); + + // Two polls — same status each time + await vi.advanceTimersByTimeAsync(1000); + await vi.advanceTimersByTimeAsync(1000); + + expect(events).toHaveLength(0); + + watcher.stop(); + }); + + test('given manifest read failure during poll, should skip cycle and report error', async () => { + const readError = new Error('ENOENT'); + mockedReadManifest.mockRejectedValue(readError); + + const errors: unknown[] = []; + const events: WsEvent[] = []; + const watcher = startManifestWatcher(PROJECT_ROOT, (e) => events.push(e), { + debounceMs: 300, + pollIntervalMs: 1000, + onError: (err) => errors.push(err), + }); + + await vi.advanceTimersByTimeAsync(1000); + expect(events).toHaveLength(0); + expect(errors).toHaveLength(1); + expect(errors[0]).toBe(readError); + + watcher.stop(); + }); + + test('given tmux unavailable during poll, should skip cycle and report error', async () => { + const manifest = makeManifest({ projectRoot: PROJECT_ROOT }); + mockedReadManifest.mockResolvedValue(manifest); + const tmuxError = new Error('tmux not found'); + mockedListSessionPanes.mockRejectedValue(tmuxError); + + const errors: unknown[] = []; + const events: WsEvent[] = []; + const watcher = startManifestWatcher(PROJECT_ROOT, (e) => events.push(e), { + debounceMs: 300, + pollIntervalMs: 1000, + onError: (err) => errors.push(err), + }); + + await vi.advanceTimersByTimeAsync(1000); + expect(events).toHaveLength(0); + expect(errors).toHaveLength(1); + expect(errors[0]).toBe(tmuxError); + + watcher.stop(); + }); + }); + + describe('overlap guard', () => { + test('given slow poll, should skip overlapping tick', async () => { + const agent = makeAgent({ id: 'ag-aaa11111', status: 'running' }); + const wt = makeWorktree({ id: 'wt-abc123', agents: { [agent.id]: agent } }); + const manifest = makeManifest({ projectRoot: PROJECT_ROOT, worktrees: { [wt.id]: wt } }); + + // readManifest takes 1500ms on first call (longer than pollInterval) + let callCount = 0; + mockedReadManifest.mockImplementation(() => { + callCount++; + if (callCount === 1) { + return new Promise((resolve) => setTimeout(() => resolve(manifest), 1500)); + } + return Promise.resolve(manifest); + }); + mockedCheckAgentStatus.mockResolvedValue({ status: 'running' }); + + const events: WsEvent[] = []; + const watcher = startManifestWatcher(PROJECT_ROOT, (e) => events.push(e), { + debounceMs: 300, + pollIntervalMs: 1000, + }); + + // First tick at 1000ms starts a slow poll + await vi.advanceTimersByTimeAsync(1000); + // Second tick at 2000ms — poll still running, should be skipped + await vi.advanceTimersByTimeAsync(1000); + // Finish slow poll at 2500ms + await vi.advanceTimersByTimeAsync(500); + + // readManifest called once for the slow poll, second tick was skipped + expect(callCount).toBe(1); + + watcher.stop(); + }); + }); + + describe('cleanup', () => { + test('stop should clear all timers and close watcher', async () => { + const manifest = makeManifest({ projectRoot: PROJECT_ROOT }); + mockedReadManifest.mockResolvedValue(manifest); + + const events: WsEvent[] = []; + const watcher = startManifestWatcher(PROJECT_ROOT, (e) => events.push(e), { + debounceMs: 300, + pollIntervalMs: 1000, + }); + + watcher.stop(); + + // Trigger fs.watch and advance timers — nothing should fire + triggerFsWatch(); + await vi.advanceTimersByTimeAsync(5000); + + expect(events).toHaveLength(0); + + // Verify fs.watch close was called + const watchResults = mockedFsWatch.mock.results; + expect(watchResults.length).toBeGreaterThan(0); + const fsWatcher = watchResults[0].value as { close: ReturnType }; + expect(fsWatcher.close).toHaveBeenCalled(); + }); + }); +}); diff --git a/src/server/ws/watcher.ts b/src/server/ws/watcher.ts new file mode 100644 index 0000000..7e149dd --- /dev/null +++ b/src/server/ws/watcher.ts @@ -0,0 +1,172 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import { readManifest } from '../../core/manifest.js'; +import { checkAgentStatus } from '../../core/agent.js'; +import { listSessionPanes, type PaneInfo } from '../../core/tmux.js'; +import { manifestPath, ppgDir } from '../../lib/paths.js'; +import type { AgentStatus, Manifest } from '../../types/manifest.js'; + +export type WsEvent = + | { type: 'manifest:updated'; payload: Manifest } + | { type: 'agent:status'; payload: { agentId: string; worktreeId: string; status: AgentStatus; previousStatus: AgentStatus } }; + +export type BroadcastFn = (event: WsEvent) => void; + +export type ErrorFn = (error: unknown) => void; + +export interface ManifestWatcher { + stop(): void; +} + +/** + * Start watching manifest.json for changes and polling agent statuses. + * + * Two sources of change: + * 1. `fs.watch` on manifest.json — fires `manifest:updated` (debounced 300ms) + * 2. Status poll at `pollIntervalMs` — fires `agent:status` per changed agent + * + * Note: `manifest:updated` and `agent:status` are independent streams. + * A file change that adds/removes agents won't produce `agent:status` events + * until the next poll cycle. Consumers needing immediate agent awareness + * should derive it from the `manifest:updated` payload. + * + * The watcher must start after `ppg init` — if manifest.json doesn't exist + * at startup, the parent directory is watched and the file watcher is + * established once the manifest appears. + */ +export function startManifestWatcher( + projectRoot: string, + broadcast: BroadcastFn, + options?: { debounceMs?: number; pollIntervalMs?: number; onError?: ErrorFn }, +): ManifestWatcher { + const debounceMs = options?.debounceMs ?? 300; + const pollIntervalMs = options?.pollIntervalMs ?? 3000; + const onError = options?.onError; + + let debounceTimer: ReturnType | null = null; + let previousStatuses = new Map(); + let polling = false; + let stopped = false; + + // --- fs.watch on manifest.json (with directory fallback) --- + const mPath = manifestPath(projectRoot); + let fileWatcher: fs.FSWatcher | null = null; + let dirWatcher: fs.FSWatcher | null = null; + + function onFsChange(): void { + if (stopped) return; + if (debounceTimer) clearTimeout(debounceTimer); + debounceTimer = setTimeout(() => { + if (stopped) return; + onManifestFileChange().catch((err) => onError?.(err)); + }, debounceMs); + } + + function watchManifestFile(): boolean { + try { + fileWatcher = fs.watch(mPath, onFsChange); + fileWatcher.on('error', () => {}); + return true; + } catch { + return false; + } + } + + // Try to watch manifest directly; fall back to watching .ppg/ directory + if (!watchManifestFile()) { + try { + const dir = ppgDir(projectRoot); + dirWatcher = fs.watch(dir, (_event, filename) => { + if (filename === path.basename(mPath) && !fileWatcher) { + if (watchManifestFile()) { + dirWatcher?.close(); + dirWatcher = null; + } + onFsChange(); + } + }); + dirWatcher.on('error', () => {}); + } catch { + // .ppg/ doesn't exist yet either — polling still works + } + } + + async function onManifestFileChange(): Promise { + try { + const manifest = await readManifest(projectRoot); + broadcast({ type: 'manifest:updated', payload: manifest }); + } catch (err) { + onError?.(err); + } + } + + // --- Status polling --- + const pollTimer = setInterval(() => { + if (stopped) return; + pollStatuses().catch((err) => onError?.(err)); + }, pollIntervalMs); + + async function pollStatuses(): Promise { + if (polling) return; + polling = true; + try { + let manifest: Manifest; + try { + manifest = await readManifest(projectRoot); + } catch (err) { + onError?.(err); + return; + } + + let paneMap: Map; + try { + paneMap = await listSessionPanes(manifest.sessionName); + } catch (err) { + onError?.(err); + return; + } + + // Collect all agents with their worktree context + const agents = Object.values(manifest.worktrees).flatMap((wt) => + Object.values(wt.agents).map((agent) => ({ agent, worktreeId: wt.id })), + ); + + // Check statuses in parallel (checkAgentStatus does no I/O when paneMap is provided) + const results = await Promise.all( + agents.map(({ agent }) => + checkAgentStatus(agent, projectRoot, paneMap).catch(() => null), + ), + ); + + const nextStatuses = new Map(); + for (let i = 0; i < agents.length; i++) { + const result = results[i]; + if (!result) continue; + + const { agent, worktreeId } = agents[i]; + nextStatuses.set(agent.id, result.status); + + const prev = previousStatuses.get(agent.id); + if (prev !== undefined && prev !== result.status) { + broadcast({ + type: 'agent:status', + payload: { agentId: agent.id, worktreeId, status: result.status, previousStatus: prev }, + }); + } + } + previousStatuses = nextStatuses; + } finally { + polling = false; + } + } + + return { + stop() { + stopped = true; + if (debounceTimer) clearTimeout(debounceTimer); + if (fileWatcher) fileWatcher.close(); + if (dirWatcher) dirWatcher.close(); + clearInterval(pollTimer); + }, + }; +} diff --git a/src/test-fixtures.ts b/src/test-fixtures.ts index 3a4c12b..38c7c4b 100644 --- a/src/test-fixtures.ts +++ b/src/test-fixtures.ts @@ -1,4 +1,4 @@ -import type { AgentEntry, WorktreeEntry } from './types/manifest.js'; +import type { AgentEntry, Manifest, WorktreeEntry } from './types/manifest.js'; import type { PaneInfo } from './core/tmux.js'; export function makeAgent(overrides?: Partial): AgentEntry { @@ -38,3 +38,15 @@ export function makePaneInfo(overrides?: Partial): PaneInfo { ...overrides, }; } + +export function makeManifest(overrides?: Partial): Manifest { + return { + version: 1, + projectRoot: '/tmp/project', + sessionName: 'ppg', + worktrees: {}, + createdAt: '2026-01-01T00:00:00.000Z', + updatedAt: '2026-01-01T00:00:00.000Z', + ...overrides, + }; +}