From 9932a61ef4de5eb31e79360c93536e6bcfe118e0 Mon Sep 17 00:00:00 2001 From: salmonumbrella <182032677+salmonumbrella@users.noreply.github.com> Date: Tue, 31 Mar 2026 07:30:58 -0700 Subject: [PATCH 1/2] chore: add .worktrees and .specstory to gitignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index c84f53be62..16692784c6 100644 --- a/.gitignore +++ b/.gitignore @@ -19,6 +19,8 @@ target target-agent .cursorrules .github/hooks +.worktrees +.specstory # Editor directories and files .vscode/* From bcd9397665a7a08eae9ca2e908afbab29da42a0b Mon Sep 17 00:00:00 2001 From: salmonumbrella <182032677+salmonumbrella@users.noreply.github.com> Date: Tue, 31 Mar 2026 17:55:05 -0700 Subject: [PATCH 2/2] feat(cli): add cap CLI with upload, record, settings, and video management --- Cargo.lock | 202 ++++++-- Cargo.toml | 1 + apps/cli/Cargo.toml | 45 +- apps/cli/src/auth.rs | 198 ++++++++ apps/cli/src/config.rs | 460 +++++++++++++++++++ apps/cli/src/daemon/client.rs | 44 ++ apps/cli/src/daemon/mod.rs | 4 + apps/cli/src/daemon/protocol.rs | 89 ++++ apps/cli/src/daemon/server.rs | 109 +++++ apps/cli/src/daemon/state.rs | 90 ++++ apps/cli/src/feedback.rs | 131 ++++++ apps/cli/src/main.rs | 166 ++++--- apps/cli/src/orgs.rs | 45 ++ apps/cli/src/record.rs | 271 ++++++++++- apps/cli/src/s3.rs | 133 ++++++ apps/cli/src/system_info.rs | 140 ++++++ apps/cli/src/upload_cmd.rs | 183 ++++++++ apps/cli/src/videos.rs | 286 ++++++++++++ apps/web/app/api/desktop/[...route]/video.ts | 174 ++++++- crates/upload/Cargo.toml | 23 + crates/upload/src/auth.rs | 192 ++++++++ crates/upload/src/client.rs | 376 +++++++++++++++ crates/upload/src/error.rs | 60 +++ crates/upload/src/lib.rs | 17 + crates/upload/src/thumbnail.rs | 93 ++++ crates/upload/src/types.rs | 242 ++++++++++ crates/upload/src/upload.rs | 342 ++++++++++++++ crates/upload/tests/integration.rs | 121 +++++ 28 files changed, 4092 insertions(+), 145 deletions(-) create mode 100644 apps/cli/src/auth.rs create mode 100644 apps/cli/src/config.rs create mode 100644 apps/cli/src/daemon/client.rs create mode 100644 apps/cli/src/daemon/mod.rs create mode 100644 apps/cli/src/daemon/protocol.rs create mode 100644 apps/cli/src/daemon/server.rs create mode 100644 apps/cli/src/daemon/state.rs create mode 100644 apps/cli/src/feedback.rs create mode 100644 apps/cli/src/orgs.rs create mode 100644 apps/cli/src/s3.rs create mode 100644 apps/cli/src/system_info.rs create mode 100644 apps/cli/src/upload_cmd.rs create mode 100644 apps/cli/src/videos.rs create mode 100644 crates/upload/Cargo.toml create mode 100644 crates/upload/src/auth.rs create mode 100644 crates/upload/src/client.rs create mode 100644 crates/upload/src/error.rs create mode 100644 crates/upload/src/lib.rs create mode 100644 crates/upload/src/thumbnail.rs create mode 100644 crates/upload/src/types.rs create mode 100644 crates/upload/src/upload.rs create mode 100644 crates/upload/tests/integration.rs diff --git a/Cargo.lock b/Cargo.lock index 6775f75e7b..83e0cad4fa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -184,6 +184,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "aligned" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee4508988c62edf04abd8d92897fca0c2995d907ce1dfeaf369dac3716a40685" +dependencies = [ + "as-slice", +] + [[package]] name = "aligned-vec" version = "0.6.4" @@ -327,7 +336,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0348a1c054491f4bfe6ab86a7b6ab1e44e45d899005de92f58b3df180b36ddaf" dependencies = [ "clipboard-win", - "image 0.25.8", + "image 0.25.10", "log", "objc2 0.6.2", "objc2-app-kit", @@ -364,6 +373,15 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +[[package]] +name = "as-slice" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "516b6b4f0e40d50dcda9365d53964ec74560ad4284da2e7fc97122cd83174516" +dependencies = [ + "stable_deref_trait", +] + [[package]] name = "ash" version = "0.38.0+1.3.281" @@ -582,6 +600,26 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" +[[package]] +name = "av-scenechange" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f321d77c20e19b92c39e7471cf986812cbb46659d2af674adc4331ef3f18394" +dependencies = [ + "aligned", + "anyhow", + "arg_enum_proc_macro", + "arrayvec", + "log", + "num-rational", + "num-traits", + "pastey", + "rayon", + "thiserror 2.0.16", + "v_frame", + "y4m", +] + [[package]] name = "av1-grain" version = "0.2.4" @@ -851,9 +889,12 @@ dependencies = [ [[package]] name = "bitstream-io" -version = "2.6.0" +version = "4.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6099cdc01846bc367c4e7dd630dc5966dccf36b652fae7a74e17b640411a91b2" +checksum = "60d4bd9d1db2c6bdf285e223a7fa369d5ce98ec767dec949c6ca62863ce61757" +dependencies = [ + "core2", +] [[package]] name = "block" @@ -937,9 +978,9 @@ dependencies = [ [[package]] name = "built" -version = "0.7.7" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56ed6191a7e78c36abdb16ab65341eefd73d64d303fffccdbb00d51e4205967b" +checksum = "f4ad8f11f288f48ca24471bbd51ac257aaeaaa07adae295591266b792902ae64" [[package]] name = "bumpalo" @@ -1043,11 +1084,18 @@ dependencies = [ "cap-project", "cap-recording", "cap-rendering", + "cap-upload", "cap-utils", + "chrono", "cidre", "clap", + "dirs 5.0.1", "ffmpeg-next", "flume", + "indicatif", + "libc", + "open", + "percent-encoding", "scap-targets", "serde", "serde_json", @@ -1204,7 +1252,7 @@ dependencies = [ [[package]] name = "cap-desktop" -version = "0.4.81" +version = "0.4.82" dependencies = [ "aho-corasick", "anyhow", @@ -1241,7 +1289,7 @@ dependencies = [ "futures", "futures-intrusive", "global-hotkey", - "image 0.25.8", + "image 0.25.10", "kameo", "keyed_priority_queue", "lazy_static", @@ -1417,7 +1465,7 @@ dependencies = [ "ffmpeg-next", "futures", "gifski", - "image 0.25.8", + "image 0.25.10", "imgref", "inquire", "mp4", @@ -1579,7 +1627,7 @@ dependencies = [ "foreign-types-shared 0.3.1", "futures", "hex", - "image 0.25.8", + "image 0.25.10", "indexmap 2.11.4", "inquire", "kameo", @@ -1634,7 +1682,7 @@ dependencies = [ "futures", "futures-intrusive", "glyphon", - "image 0.25.8", + "image 0.25.10", "log", "metal 0.31.0", "objc2 0.6.2", @@ -1720,6 +1768,23 @@ dependencies = [ "workspace-hack", ] +[[package]] +name = "cap-upload" +version = "0.1.0" +dependencies = [ + "chrono", + "dirs 5.0.1", + "image 0.25.10", + "reqwest 0.12.24", + "serde", + "serde_json", + "tempfile", + "thiserror 1.0.69", + "tokio", + "toml 0.8.2", + "tracing", +] + [[package]] name = "cap-utils" version = "0.1.0" @@ -1956,7 +2021,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afede46921767868c5c7f8f55202bdd8bec0bab6bc9605174200f45924f93c62" dependencies = [ "clipboard-win", - "image 0.25.8", + "image 0.25.10", "objc2 0.6.2", "objc2-app-kit", "objc2-foundation 0.3.1", @@ -2250,6 +2315,15 @@ dependencies = [ "thiserror 2.0.16", ] +[[package]] +name = "core2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] + [[package]] name = "core_maths" version = "0.1.1" @@ -3058,9 +3132,9 @@ dependencies = [ [[package]] name = "exr" -version = "1.73.0" +version = "1.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f83197f59927b46c04a183a619b7c29df34e63e63c7869320862268c0ef687e0" +checksum = "4300e043a56aa2cb633c01af81ca8f699a321879a7854d3896a0ba89056363be" dependencies = [ "bit_field", "half", @@ -3643,13 +3717,23 @@ dependencies = [ "weezl", ] +[[package]] +name = "gif" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5df2ba84018d80c213569363bdcd0c64e6933c67fe4c1d60ecf822971a3c35e" +dependencies = [ + "color_quant", + "weezl", +] + [[package]] name = "gif-dispose" version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e1aa07391f3d9c279f388cea6faf291555dd891df59bed01d4378583df946ac" dependencies = [ - "gif", + "gif 0.13.3", "imgref", "rgb", ] @@ -3664,7 +3748,7 @@ dependencies = [ "crossbeam-channel", "crossbeam-utils", "dunce", - "gif", + "gif 0.13.3", "gif-dispose", "imagequant", "imgref", @@ -4458,7 +4542,7 @@ dependencies = [ "byteorder", "color_quant", "exr", - "gif", + "gif 0.13.3", "jpeg-decoder", "num-traits", "png 0.17.16", @@ -4468,15 +4552,15 @@ dependencies = [ [[package]] name = "image" -version = "0.25.8" +version = "0.25.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "529feb3e6769d234375c4cf1ee2ce713682b8e76538cb13f9fc23e1400a591e7" +checksum = "85ab80394333c02fe689eaf900ab500fbd0c2213da414687ebf995a65d5a6104" dependencies = [ "bytemuck", "byteorder-lite", "color_quant", "exr", - "gif", + "gif 0.14.1", "image-webp", "moxcms", "num-traits", @@ -4485,9 +4569,9 @@ dependencies = [ "ravif", "rayon", "rgb", - "tiff 0.10.3", - "zune-core", - "zune-jpeg", + "tiff 0.11.3", + "zune-core 0.5.1", + "zune-jpeg 0.5.15", ] [[package]] @@ -4521,9 +4605,9 @@ checksum = "edcd27d72f2f071c64249075f42e205ff93c9a4c5f6c6da53e79ed9f9832c285" [[package]] name = "imgref" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0263a3d970d5c054ed9312c0057b4f3bde9c0b33836d3637361d4a9e6e7a408" +checksum = "e7c5cedc30da3a610cac6b4ba17597bdf7152cf974e8aab3afb3d54455e371c8" [[package]] name = "impl-more" @@ -4695,6 +4779,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.15" @@ -5430,9 +5523,9 @@ dependencies = [ [[package]] name = "moxcms" -version = "0.7.5" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddd32fa8935aeadb8a8a6b6b351e40225570a37c43de67690383d87ef170cd08" +checksum = "bb85c154ba489f01b25c0d36ae69a87e4a1c73a72631fc6c0eb6dde34a73e44b" dependencies = [ "num-traits", "pxfm", @@ -6492,6 +6585,12 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +[[package]] +name = "pastey" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec" + [[package]] name = "pathdiff" version = "0.2.3" @@ -7248,19 +7347,21 @@ checksum = "f93e7e49bb0bf967717f7bd674458b3d6b0c5f48ec7e3038166026a69fc22223" [[package]] name = "rav1e" -version = "0.7.1" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd87ce80a7665b1cce111f8a16c1f3929f6547ce91ade6addf4ec86a8dda5ce9" +checksum = "43b6dd56e85d9483277cde964fd1bdb0428de4fec5ebba7540995639a21cb32b" dependencies = [ + "aligned-vec", "arbitrary", "arg_enum_proc_macro", "arrayvec", + "av-scenechange", "av1-grain", "bitstream-io", "built", "cfg-if", "interpolate_name", - "itertools 0.12.1", + "itertools 0.14.0", "libc", "libfuzzer-sys", "log", @@ -7269,23 +7370,21 @@ dependencies = [ "noop_proc_macro", "num-derive", "num-traits", - "once_cell", "paste", "profiling", - "rand 0.8.5", - "rand_chacha 0.3.1", + "rand 0.9.2", + "rand_chacha 0.9.0", "simd_helpers", - "system-deps", - "thiserror 1.0.69", + "thiserror 2.0.16", "v_frame", "wasm-bindgen", ] [[package]] name = "ravif" -version = "0.11.20" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5825c26fddd16ab9f515930d49028a630efec172e903483c94796cfe31893e6b" +checksum = "e52310197d971b0f5be7fe6b57530dcd27beb35c1b013f29d66c1ad73fbbcc45" dependencies = [ "avif-serialize", "imgref", @@ -7577,7 +7676,7 @@ version = "0.45.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8928798c0a55e03c9ca6c4c6846f76377427d2c1e1f7e6de3c06ae57942df43" dependencies = [ - "gif", + "gif 0.13.3", "image-webp", "log", "pico-args", @@ -7585,7 +7684,7 @@ dependencies = [ "svgtypes", "tiny-skia", "usvg", - "zune-jpeg", + "zune-jpeg 0.4.21", ] [[package]] @@ -9209,7 +9308,7 @@ dependencies = [ "heck 0.5.0", "http 1.3.1", "http-range", - "image 0.25.8", + "image 0.25.10", "jni", "libc", "log", @@ -9906,16 +10005,16 @@ dependencies = [ [[package]] name = "tiff" -version = "0.10.3" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af9605de7fee8d9551863fd692cce7637f548dbd9db9180fcc07ccc6d26c336f" +checksum = "b63feaf3343d35b6ca4d50483f94843803b0f51634937cc2ec519fc32232bc52" dependencies = [ "fax", "flate2", "half", "quick-error", "weezl", - "zune-jpeg", + "zune-jpeg 0.5.15", ] [[package]] @@ -12157,7 +12256,7 @@ dependencies = [ "futures-task", "futures-util", "getrandom 0.2.16", - "gif", + "gif 0.13.3", "hashbrown 0.15.5", "idna", "itertools 0.12.1", @@ -12543,6 +12642,12 @@ version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f423a2c17029964870cfaabb1f13dfab7d092a62a29a89264f4d36990ca414a" +[[package]] +name = "zune-core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb8a0807f7c01457d0379ba880ba6322660448ddebc890ce29bb64da71fb40f9" + [[package]] name = "zune-inflate" version = "0.2.54" @@ -12558,7 +12663,16 @@ version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29ce2c8a9384ad323cf564b67da86e21d3cfdff87908bc1223ed5c99bc792713" dependencies = [ - "zune-core", + "zune-core 0.4.12", +] + +[[package]] +name = "zune-jpeg" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27bc9d5b815bc103f142aa054f561d9187d191692ec7c2d1e2b4737f8dbd7296" +dependencies = [ + "zune-core 0.5.1", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 44097ca757..87caec5e5b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,6 +4,7 @@ members = [ "apps/cli", "apps/desktop/src-tauri", "crates/*", + "crates/upload", "crates/workspace-hack", ] diff --git a/apps/cli/Cargo.toml b/apps/cli/Cargo.toml index 0b3fdf9240..ff1cdedad6 100644 --- a/apps/cli/Cargo.toml +++ b/apps/cli/Cargo.toml @@ -3,30 +3,53 @@ name = "cap" version = "0.1.0" edition = "2024" +[features] +default = [] +record = [ + "dep:cap-project", + "dep:cap-rendering", + "dep:cap-editor", + "dep:cap-media", + "dep:cap-recording", + "dep:cap-export", + "dep:cap-camera", + "dep:scap-targets", + "dep:ffmpeg", + "dep:flume", + "dep:libc", +] + [dependencies] clap = { version = "4.5.23", features = ["derive"] } cap-utils = { path = "../../crates/utils" } -cap-project = { path = "../../crates/project" } -cap-rendering = { path = "../../crates/rendering" } -cap-editor = { path = "../../crates/editor" } -cap-media = { path = "../../crates/media" } +cap-project = { path = "../../crates/project", optional = true } +cap-rendering = { path = "../../crates/rendering", optional = true } +cap-editor = { path = "../../crates/editor", optional = true } +cap-media = { path = "../../crates/media", optional = true } cap-flags = { path = "../../crates/flags" } -cap-recording = { path = "../../crates/recording" } -cap-export = { path = "../../crates/export" } -cap-camera = { path = "../../crates/camera" } -scap-targets = { path = "../../crates/scap-targets" } +cap-recording = { path = "../../crates/recording", optional = true } +cap-export = { path = "../../crates/export", optional = true } +cap-camera = { path = "../../crates/camera", optional = true } +cap-upload = { path = "../../crates/upload" } +scap-targets = { path = "../../crates/scap-targets", optional = true } serde = { workspace = true } serde_json = "1.0.133" tokio.workspace = true uuid = { version = "1.11.1", features = ["v4"] } -ffmpeg = { workspace = true } +ffmpeg = { workspace = true, optional = true } tracing.workspace = true tracing-subscriber = "0.3.19" -flume.workspace = true +flume = { workspace = true, optional = true } +indicatif = "0.17" +open = "5" +percent-encoding = { workspace = true } +libc = { version = "0.2", optional = true } +dirs = "5" +chrono = { version = "0.4", features = ["serde"] } workspace-hack = { version = "0.1", path = "../../crates/workspace-hack" } [target.'cfg(target_os = "macos")'.dependencies] -cidre = { workspace = true } +cidre = { workspace = true, optional = true } [lints] workspace = true diff --git a/apps/cli/src/auth.rs b/apps/cli/src/auth.rs new file mode 100644 index 0000000000..08cec21b8b --- /dev/null +++ b/apps/cli/src/auth.rs @@ -0,0 +1,198 @@ +use cap_upload::AuthConfig; +use clap::{Args, Subcommand}; +use tokio::io::{AsyncReadExt, AsyncWriteExt}; +use tokio::net::TcpListener; + +#[derive(Args)] +pub struct AuthArgs { + #[command(subcommand)] + command: AuthCommands, +} + +#[derive(Subcommand)] +enum AuthCommands { + Login(LoginArgs), + Logout, + Status, +} + +#[derive(Args)] +struct LoginArgs { + #[arg(long, default_value = "https://cap.so")] + server: String, + #[arg(long)] + api_key: Option, +} + +impl AuthArgs { + pub async fn run(self, json: bool) -> Result<(), String> { + match self.command { + AuthCommands::Login(args) => login(args, json).await, + AuthCommands::Logout => logout(json), + AuthCommands::Status => status(json), + } + } +} + +async fn login(args: LoginArgs, json: bool) -> Result<(), String> { + let server_url = args.server.trim_end_matches('/').to_string(); + + if let Some(api_key) = args.api_key { + let path = + AuthConfig::save(&server_url, &api_key).map_err(|e| format!("Failed to save: {e}"))?; + + if json { + println!( + "{}", + serde_json::json!({ + "status": "logged_in", + "server_url": server_url, + "config_path": path.display().to_string() + }) + ); + } else { + eprintln!("Logged in to {server_url}"); + eprintln!("Config saved to {}", path.display()); + } + return Ok(()); + } + + let listener = TcpListener::bind("127.0.0.1:0") + .await + .map_err(|e| format!("Failed to bind local listener: {e}"))?; + let port = listener + .local_addr() + .map_err(|e| format!("Failed to get port: {e}"))? + .port(); + + let auth_url = format!( + "{}/api/desktop/session/request?type=api_key&port={}&platform=web", + server_url, port + ); + + eprintln!("Opening browser for login..."); + eprintln!("If the browser does not open, visit: {auth_url}"); + + if open::that(&auth_url).is_err() { + eprintln!("Could not open browser automatically."); + } + + eprintln!("Waiting for authentication..."); + + let (mut stream, _addr) = + tokio::time::timeout(std::time::Duration::from_secs(300), listener.accept()) + .await + .map_err(|_| "Login timed out after 5 minutes. Please try again.".to_string())? + .map_err(|e| format!("Failed to accept connection: {e}"))?; + + let mut buf = vec![0u8; 4096]; + let n = stream + .read(&mut buf) + .await + .map_err(|e| format!("Failed to read: {e}"))?; + let request = String::from_utf8_lossy(&buf[..n]); + + let api_key = extract_query_param(&request, "api_key"); + let user_id = extract_query_param(&request, "user_id"); + + let response_body = if api_key.is_some() { + "Authentication successful! You can close this tab." + } else { + "Authentication failed. Please try again." + }; + + let response = format!( + "HTTP/1.1 200 OK\r\nContent-Type: text/html\r\nContent-Length: {}\r\nConnection: close\r\n\r\n{}", + response_body.len(), + response_body + ); + + stream + .write_all(response.as_bytes()) + .await + .map_err(|e| format!("Failed to write response: {e}"))?; + + let api_key = api_key.ok_or("Server did not return an API key")?; + + let path = AuthConfig::save(&server_url, &api_key) + .map_err(|e| format!("Failed to save credentials: {e}"))?; + + if json { + println!( + "{}", + serde_json::json!({ + "status": "logged_in", + "server_url": server_url, + "user_id": user_id, + "config_path": path.display().to_string() + }) + ); + } else { + eprintln!("Logged in to {server_url}"); + if let Some(uid) = user_id { + eprintln!("User: {uid}"); + } + eprintln!("Config saved to {}", path.display()); + } + + Ok(()) +} + +fn logout(json: bool) -> Result<(), String> { + AuthConfig::remove().map_err(|e| format!("Failed to remove credentials: {e}"))?; + + if json { + println!("{}", serde_json::json!({"status": "logged_out"})); + } else { + eprintln!("Logged out successfully."); + } + Ok(()) +} + +fn status(json: bool) -> Result<(), String> { + match AuthConfig::resolve() { + Ok(config) => { + if json { + println!( + "{}", + serde_json::json!({ + "status": "logged_in", + "server_url": config.server_url, + }) + ); + } else { + eprintln!("Logged in to {}", config.server_url); + } + Ok(()) + } + Err(_) => { + if json { + println!("{}", serde_json::json!({"status": "not_logged_in"})); + } else { + eprintln!( + "Not logged in. Run \"cap auth login --server URL\" or set CAP_API_KEY and CAP_SERVER_URL environment variables." + ); + } + Ok(()) + } + } +} + +fn extract_query_param(request: &str, param: &str) -> Option { + let first_line = request.lines().next()?; + let path = first_line.split_whitespace().nth(1)?; + let query = path.split('?').nth(1)?; + for pair in query.split('&') { + let mut kv = pair.splitn(2, '='); + if let (Some(key), Some(value)) = (kv.next(), kv.next()) { + if key == param { + return Some( + percent_encoding::percent_decode_str(value) + .decode_utf8_lossy() + .to_string(), + ); + } + } + } + None +} diff --git a/apps/cli/src/config.rs b/apps/cli/src/config.rs new file mode 100644 index 0000000000..9254810153 --- /dev/null +++ b/apps/cli/src/config.rs @@ -0,0 +1,460 @@ +use clap::{Args, Subcommand}; +use serde::{Deserialize, Serialize}; +use std::path::{Path, PathBuf}; + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +pub struct CliSettings { + #[serde(skip_serializing_if = "Option::is_none")] + pub auto_zoom_on_clicks: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub capture_keyboard_events: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub max_fps: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub excluded_windows: Option>, +} + +pub fn config_dir() -> PathBuf { + dirs::config_dir() + .unwrap_or_else(|| PathBuf::from(".")) + .join("cap") +} + +pub fn config_path() -> PathBuf { + config_dir().join("settings.json") +} + +pub fn load_cli_settings() -> CliSettings { + let path = config_path(); + load_cli_settings_from(&path) +} + +pub fn load_cli_settings_from(path: &Path) -> CliSettings { + match std::fs::read_to_string(path) { + Ok(contents) => serde_json::from_str(&contents).unwrap_or_default(), + Err(_) => CliSettings::default(), + } +} + +pub fn save_cli_settings(settings: &CliSettings) -> Result<(), String> { + let path = config_path(); + save_cli_settings_to(settings, &path) +} + +pub fn save_cli_settings_to(settings: &CliSettings, path: &Path) -> Result<(), String> { + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent) + .map_err(|e| format!("Failed to create config directory: {e}"))?; + } + let json = serde_json::to_string_pretty(settings) + .map_err(|e| format!("Failed to serialize settings: {e}"))?; + std::fs::write(path, json).map_err(|e| format!("Failed to write config: {e}")) +} + +fn tauri_store_path() -> Option { + #[cfg(target_os = "macos")] + { + dirs::data_dir().map(|d| d.join("so.cap.desktop").join("store.json")) + } + #[cfg(target_os = "windows")] + { + dirs::config_dir().map(|d| d.join("so.cap.desktop").join("store.json")) + } + #[cfg(not(any(target_os = "macos", target_os = "windows")))] + { + dirs::config_dir().map(|d| d.join("Cap").join("store.json")) + } +} + +pub fn load_tauri_settings() -> CliSettings { + tauri_store_path() + .and_then(|p| load_tauri_settings_from(&p)) + .unwrap_or_default() +} + +pub fn load_tauri_settings_from(path: &Path) -> Option { + let contents = std::fs::read_to_string(path).ok()?; + let store: serde_json::Value = serde_json::from_str(&contents).ok()?; + let general = store.get("general_settings")?; + + Some(CliSettings { + auto_zoom_on_clicks: general.get("autoZoomOnClicks").and_then(|v| v.as_bool()), + capture_keyboard_events: general + .get("captureKeyboardEvents") + .and_then(|v| v.as_bool()), + max_fps: general + .get("maxFps") + .and_then(|v| v.as_u64()) + .map(|v| v as u32), + excluded_windows: general.get("excludedWindows").and_then(|v| { + v.as_array().map(|arr| { + arr.iter() + .filter_map(|item| { + item.get("windowTitle") + .or_else(|| item.get("window_title")) + .and_then(|t| t.as_str()) + .map(|s| s.to_string()) + }) + .collect() + }) + }), + }) +} + +const DEFAULT_EXCLUDED_WINDOWS: &[&str] = &[ + "Cap", + "Cap Settings", + "Cap Recording Controls", + "Cap Camera", +]; + +#[derive(Debug, Clone)] +pub struct ResolvedSettings { + pub auto_zoom_on_clicks: bool, + pub capture_keyboard_events: bool, + pub max_fps: u32, + pub excluded_windows: Vec, +} + +impl ResolvedSettings { + pub fn resolve(layers: &[&CliSettings]) -> Self { + let mut auto_zoom: Option = None; + let mut capture_keys: Option = None; + let mut fps: Option = None; + let mut excluded: Option> = None; + + for layer in layers { + if auto_zoom.is_none() { + auto_zoom = layer.auto_zoom_on_clicks; + } + if capture_keys.is_none() { + capture_keys = layer.capture_keyboard_events; + } + if fps.is_none() { + fps = layer.max_fps; + } + if excluded.is_none() { + excluded = layer.excluded_windows.clone(); + } + } + + Self { + auto_zoom_on_clicks: auto_zoom.unwrap_or(false), + capture_keyboard_events: capture_keys.unwrap_or(true), + max_fps: fps.unwrap_or(60), + excluded_windows: excluded.unwrap_or_else(|| { + DEFAULT_EXCLUDED_WINDOWS + .iter() + .map(|s| s.to_string()) + .collect() + }), + } + } + + pub fn resolve_with_tauri(cli_flags: &CliSettings) -> Self { + let cli_config = load_cli_settings(); + let tauri_config = load_tauri_settings(); + Self::resolve(&[cli_flags, &cli_config, &tauri_config]) + } +} + +#[derive(Args)] +pub struct ConfigArgs { + #[command(subcommand)] + command: ConfigCommands, +} + +#[derive(Subcommand)] +enum ConfigCommands { + Get(ConfigGetArgs), + Set(ConfigSetArgs), +} + +#[derive(Args)] +struct ConfigGetArgs { + #[arg(long)] + json: Option, +} + +#[derive(Args)] +struct ConfigSetArgs { + #[arg(long)] + auto_zoom: Option, + #[arg(long)] + capture_keys: Option, + #[arg(long, value_parser = clap::value_parser!(u32).range(1..=240))] + fps: Option, + #[arg(long)] + exclude_add: Vec, + #[arg(long)] + exclude_remove: Vec, + #[arg(long)] + exclude_reset: bool, +} + +impl ConfigArgs { + pub async fn run(self, json: bool) -> Result<(), String> { + match self.command { + ConfigCommands::Get(args) => config_get(args.json.unwrap_or(json)), + ConfigCommands::Set(args) => config_set(args, json), + } + } +} + +fn config_get(json: bool) -> Result<(), String> { + let resolved = ResolvedSettings::resolve_with_tauri(&CliSettings::default()); + + if json { + let output = serde_json::json!({ + "auto_zoom_on_clicks": resolved.auto_zoom_on_clicks, + "capture_keyboard_events": resolved.capture_keyboard_events, + "max_fps": resolved.max_fps, + "excluded_windows": resolved.excluded_windows, + "config_path": config_path().display().to_string(), + }); + println!("{}", serde_json::to_string_pretty(&output).unwrap()); + } else { + eprintln!("Auto zoom on clicks: {}", resolved.auto_zoom_on_clicks); + eprintln!( + "Capture keyboard: {}", + resolved.capture_keyboard_events + ); + eprintln!("Max FPS: {}", resolved.max_fps); + eprintln!( + "Excluded windows: {}", + if resolved.excluded_windows.is_empty() { + "(none)".to_string() + } else { + resolved.excluded_windows.join(", ") + } + ); + eprintln!("\nConfig file: {}", config_path().display()); + } + Ok(()) +} + +fn config_set(args: ConfigSetArgs, json: bool) -> Result<(), String> { + let mut settings = load_cli_settings(); + + if let Some(val) = args.auto_zoom { + settings.auto_zoom_on_clicks = Some(val); + } + if let Some(val) = args.capture_keys { + settings.capture_keyboard_events = Some(val); + } + if let Some(val) = args.fps { + settings.max_fps = Some(val); + } + + if args.exclude_reset { + settings.excluded_windows = None; + } else if !args.exclude_add.is_empty() || !args.exclude_remove.is_empty() { + let mut current = settings.excluded_windows.clone().unwrap_or_else(|| { + DEFAULT_EXCLUDED_WINDOWS + .iter() + .map(|s| s.to_string()) + .collect() + }); + + for window in &args.exclude_add { + if !current.contains(window) { + current.push(window.clone()); + } + } + + for window in &args.exclude_remove { + current.retain(|w| w != window); + } + + settings.excluded_windows = Some(current); + } + + save_cli_settings(&settings)?; + + if json { + println!("{}", serde_json::json!({"status": "saved"})); + } else { + eprintln!("Settings saved to {}", config_path().display()); + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn round_trip_full_settings() { + let dir = std::env::temp_dir().join("cap-test-config-rt"); + let path = dir.join("settings.json"); + let _ = std::fs::remove_dir_all(&dir); + + let settings = CliSettings { + auto_zoom_on_clicks: Some(true), + capture_keyboard_events: Some(false), + max_fps: Some(30), + excluded_windows: Some(vec!["Terminal".to_string(), "Finder".to_string()]), + }; + + save_cli_settings_to(&settings, &path).unwrap(); + let loaded = load_cli_settings_from(&path); + + assert_eq!(loaded.auto_zoom_on_clicks, Some(true)); + assert_eq!(loaded.capture_keyboard_events, Some(false)); + assert_eq!(loaded.max_fps, Some(30)); + assert_eq!( + loaded.excluded_windows, + Some(vec!["Terminal".to_string(), "Finder".to_string()]) + ); + + let _ = std::fs::remove_dir_all(&dir); + } + + #[test] + fn round_trip_partial_settings() { + let dir = std::env::temp_dir().join("cap-test-config-partial"); + let path = dir.join("settings.json"); + let _ = std::fs::remove_dir_all(&dir); + + let settings = CliSettings { + max_fps: Some(120), + ..Default::default() + }; + + save_cli_settings_to(&settings, &path).unwrap(); + let loaded = load_cli_settings_from(&path); + + assert_eq!(loaded.auto_zoom_on_clicks, None); + assert_eq!(loaded.max_fps, Some(120)); + + let _ = std::fs::remove_dir_all(&dir); + } + + #[test] + fn load_missing_file_returns_defaults() { + let loaded = load_cli_settings_from(Path::new("/tmp/cap-nonexistent-dir/settings.json")); + assert_eq!(loaded.auto_zoom_on_clicks, None); + assert_eq!(loaded.capture_keyboard_events, None); + assert_eq!(loaded.max_fps, None); + assert_eq!(loaded.excluded_windows, None); + } + + #[test] + fn load_malformed_json_returns_defaults() { + let dir = std::env::temp_dir().join("cap-test-config-malformed"); + let path = dir.join("settings.json"); + let _ = std::fs::remove_dir_all(&dir); + std::fs::create_dir_all(&dir).unwrap(); + std::fs::write(&path, "not valid json{{{").unwrap(); + + let loaded = load_cli_settings_from(&path); + assert_eq!(loaded.auto_zoom_on_clicks, None); + + let _ = std::fs::remove_dir_all(&dir); + } + + #[test] + fn tauri_store_parses_general_settings() { + let dir = std::env::temp_dir().join("cap-test-tauri-store"); + let path = dir.join("store.json"); + let _ = std::fs::remove_dir_all(&dir); + std::fs::create_dir_all(&dir).unwrap(); + + let store_json = r#"{ + "general_settings": { + "autoZoomOnClicks": true, + "captureKeyboardEvents": false, + "maxFps": 30, + "excludedWindows": [ + {"windowTitle": "Cap", "ownerName": "Cap"}, + {"windowTitle": "Terminal", "ownerName": "Terminal"} + ] + } + }"#; + std::fs::write(&path, store_json).unwrap(); + + let settings = load_tauri_settings_from(&path).unwrap(); + assert_eq!(settings.auto_zoom_on_clicks, Some(true)); + assert_eq!(settings.capture_keyboard_events, Some(false)); + assert_eq!(settings.max_fps, Some(30)); + assert_eq!( + settings.excluded_windows, + Some(vec!["Cap".to_string(), "Terminal".to_string()]) + ); + + let _ = std::fs::remove_dir_all(&dir); + } + + #[test] + fn tauri_store_missing_key_returns_none_fields() { + let dir = std::env::temp_dir().join("cap-test-tauri-nokey"); + let path = dir.join("store.json"); + let _ = std::fs::remove_dir_all(&dir); + std::fs::create_dir_all(&dir).unwrap(); + std::fs::write(&path, r#"{"some_other_key": {}}"#).unwrap(); + + let result = load_tauri_settings_from(&path); + assert!(result.is_none()); + + let _ = std::fs::remove_dir_all(&dir); + } + + #[test] + fn tauri_store_missing_file_returns_none() { + let result = load_tauri_settings_from(Path::new("/tmp/cap-no-such-store/store.json")); + assert!(result.is_none()); + } + + #[test] + fn resolver_cli_flags_win_over_config() { + let flags = CliSettings { + max_fps: Some(120), + ..Default::default() + }; + let config = CliSettings { + max_fps: Some(30), + auto_zoom_on_clicks: Some(true), + ..Default::default() + }; + + let resolved = ResolvedSettings::resolve(&[&flags, &config]); + assert_eq!(resolved.max_fps, 120); + assert!(resolved.auto_zoom_on_clicks); + } + + #[test] + fn resolver_falls_through_to_defaults() { + let empty = CliSettings::default(); + let resolved = ResolvedSettings::resolve(&[&empty]); + assert!(!resolved.auto_zoom_on_clicks); + assert!(resolved.capture_keyboard_events); + assert_eq!(resolved.max_fps, 60); + assert_eq!(resolved.excluded_windows.len(), 4); + assert_eq!(resolved.excluded_windows[0], "Cap"); + } + + #[test] + fn resolver_three_layers() { + let flags = CliSettings { + max_fps: Some(30), + ..Default::default() + }; + let cli_config = CliSettings { + auto_zoom_on_clicks: Some(true), + capture_keyboard_events: Some(false), + ..Default::default() + }; + let tauri = CliSettings { + auto_zoom_on_clicks: Some(false), + max_fps: Some(120), + excluded_windows: Some(vec!["Firefox".to_string()]), + ..Default::default() + }; + + let resolved = ResolvedSettings::resolve(&[&flags, &cli_config, &tauri]); + assert_eq!(resolved.max_fps, 30); + assert!(resolved.auto_zoom_on_clicks); + assert!(!resolved.capture_keyboard_events); + assert_eq!(resolved.excluded_windows, vec!["Firefox".to_string()]); + } +} diff --git a/apps/cli/src/daemon/client.rs b/apps/cli/src/daemon/client.rs new file mode 100644 index 0000000000..381c15427e --- /dev/null +++ b/apps/cli/src/daemon/client.rs @@ -0,0 +1,44 @@ +use super::{ + protocol::{DaemonCommand, DaemonResponse}, + state, +}; +use tokio::{ + io::{AsyncBufReadExt, AsyncWriteExt, BufReader}, + net::UnixStream, +}; + +pub async fn send_command(command: DaemonCommand) -> Result { + let sock_path = state::socket_path(); + if !sock_path.exists() { + return Err("No active recording found.".to_string()); + } + + let stream = UnixStream::connect(&sock_path) + .await + .map_err(|e| format!("Failed to connect to recording daemon: {e}"))?; + + let (reader, mut writer) = stream.into_split(); + + let msg = serde_json::to_string(&command).map_err(|e| format!("Serialize error: {e}"))?; + writer + .write_all(format!("{msg}\n").as_bytes()) + .await + .map_err(|e| format!("Write error: {e}"))?; + + let mut reader = BufReader::new(reader); + let mut line = String::new(); + reader + .read_line(&mut line) + .await + .map_err(|e| format!("Read error: {e}"))?; + + serde_json::from_str(line.trim()).map_err(|e| format!("Parse error: {e}")) +} + +pub async fn stop_recording() -> Result { + send_command(DaemonCommand::Stop).await +} + +pub async fn get_status() -> Result { + send_command(DaemonCommand::Status).await +} diff --git a/apps/cli/src/daemon/mod.rs b/apps/cli/src/daemon/mod.rs new file mode 100644 index 0000000000..8d6f215f2d --- /dev/null +++ b/apps/cli/src/daemon/mod.rs @@ -0,0 +1,4 @@ +pub mod client; +pub mod protocol; +pub mod server; +pub mod state; diff --git a/apps/cli/src/daemon/protocol.rs b/apps/cli/src/daemon/protocol.rs new file mode 100644 index 0000000000..10b5708427 --- /dev/null +++ b/apps/cli/src/daemon/protocol.rs @@ -0,0 +1,89 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "command")] +pub enum DaemonCommand { + #[serde(rename = "stop")] + Stop, + #[serde(rename = "status")] + Status, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "status")] +pub enum DaemonResponse { + #[serde(rename = "ok")] + Ok { + project_path: String, + #[serde(skip_serializing_if = "Option::is_none")] + duration_secs: Option, + }, + #[serde(rename = "recording")] + Recording { + duration_secs: f64, + project_path: String, + screen: Option, + }, + #[serde(rename = "error")] + Error { message: String }, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn serialize_stop_command() { + let cmd = DaemonCommand::Stop; + let json = serde_json::to_string(&cmd).unwrap(); + assert_eq!(json, r#"{"command":"stop"}"#); + } + + #[test] + fn serialize_status_command() { + let cmd = DaemonCommand::Status; + let json = serde_json::to_string(&cmd).unwrap(); + assert_eq!(json, r#"{"command":"status"}"#); + } + + #[test] + fn deserialize_stop_command() { + let cmd: DaemonCommand = serde_json::from_str(r#"{"command":"stop"}"#).unwrap(); + assert!(matches!(cmd, DaemonCommand::Stop)); + } + + #[test] + fn serialize_ok_response() { + let resp = DaemonResponse::Ok { + project_path: "/tmp/rec.cap".to_string(), + duration_secs: Some(42.5), + }; + let json = serde_json::to_string(&resp).unwrap(); + assert!(json.contains("\"status\":\"ok\"")); + assert!(json.contains("42.5")); + } + + #[test] + fn serialize_recording_response() { + let resp = DaemonResponse::Recording { + duration_secs: 10.0, + project_path: "/tmp/rec.cap".to_string(), + screen: Some("1".to_string()), + }; + let json = serde_json::to_string(&resp).unwrap(); + assert!(json.contains("\"status\":\"recording\"")); + } + + #[test] + fn round_trip_error_response() { + let resp = DaemonResponse::Error { + message: "something broke".to_string(), + }; + let json = serde_json::to_string(&resp).unwrap(); + let restored: DaemonResponse = serde_json::from_str(&json).unwrap(); + match restored { + DaemonResponse::Error { message } => assert_eq!(message, "something broke"), + _ => panic!("expected Error variant"), + } + } +} diff --git a/apps/cli/src/daemon/server.rs b/apps/cli/src/daemon/server.rs new file mode 100644 index 0000000000..34e7c49517 --- /dev/null +++ b/apps/cli/src/daemon/server.rs @@ -0,0 +1,109 @@ +use super::{ + protocol::{DaemonCommand, DaemonResponse}, + state::{self, RecordingState}, +}; +use cap_recording::studio_recording; +use tokio::{ + io::{AsyncBufReadExt, AsyncWriteExt, BufReader}, + net::UnixListener, +}; +use tracing::info; + +pub struct RecordingDaemon { + handle: studio_recording::ActorHandle, + state: RecordingState, +} + +impl RecordingDaemon { + pub fn new(handle: studio_recording::ActorHandle, state: RecordingState) -> Self { + Self { handle, state } + } + + pub async fn run(self) -> Result<(), String> { + let sock_path = state::socket_path(); + + if sock_path.exists() { + let _ = std::fs::remove_file(&sock_path); + } + + let listener = + UnixListener::bind(&sock_path).map_err(|e| format!("Failed to bind socket: {e}"))?; + + info!(socket = %sock_path.display(), "Recording daemon listening"); + + let start_time = std::time::Instant::now(); + + loop { + let (stream, _) = listener + .accept() + .await + .map_err(|e| format!("Accept error: {e}"))?; + + let (reader, mut writer) = stream.into_split(); + let mut reader = BufReader::new(reader); + let mut line = String::new(); + + if reader.read_line(&mut line).await.is_err() { + continue; + } + + let command: DaemonCommand = match serde_json::from_str(line.trim()) { + Ok(cmd) => cmd, + Err(e) => { + let resp = DaemonResponse::Error { + message: format!("Invalid command: {e}"), + }; + let _ = writer + .write_all( + format!("{}\n", serde_json::to_string(&resp).unwrap()).as_bytes(), + ) + .await; + continue; + } + }; + + match command { + DaemonCommand::Status => { + let elapsed = start_time.elapsed().as_secs_f64(); + let resp = DaemonResponse::Recording { + duration_secs: elapsed, + project_path: self.state.project_path.display().to_string(), + screen: self.state.screen.clone(), + }; + let _ = writer + .write_all( + format!("{}\n", serde_json::to_string(&resp).unwrap()).as_bytes(), + ) + .await; + } + DaemonCommand::Stop => { + info!("Stop command received, finalizing recording"); + let elapsed = start_time.elapsed().as_secs_f64(); + + let stop_result = self.handle.stop().await; + + let resp = match stop_result { + Ok(_) => DaemonResponse::Ok { + project_path: self.state.project_path.display().to_string(), + duration_secs: Some(elapsed), + }, + Err(e) => DaemonResponse::Error { + message: format!("Failed to stop recording: {e}"), + }, + }; + + let _ = writer + .write_all( + format!("{}\n", serde_json::to_string(&resp).unwrap()).as_bytes(), + ) + .await; + + RecordingState::remove().ok(); + let _ = std::fs::remove_file(&sock_path); + info!("Recording daemon shutting down"); + return Ok(()); + } + } + } + } +} diff --git a/apps/cli/src/daemon/state.rs b/apps/cli/src/daemon/state.rs new file mode 100644 index 0000000000..5bfb797caa --- /dev/null +++ b/apps/cli/src/daemon/state.rs @@ -0,0 +1,90 @@ +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RecordingState { + pub pid: u32, + pub recording_id: String, + pub project_path: PathBuf, + pub started_at: String, + pub screen: Option, +} + +fn state_dir() -> PathBuf { + dirs::config_dir() + .unwrap_or_else(|| PathBuf::from("/tmp")) + .join("cap") +} + +pub fn state_path() -> PathBuf { + state_dir().join("recording.json") +} + +pub fn socket_path() -> PathBuf { + state_dir().join("recording.sock") +} + +impl RecordingState { + pub fn save(&self) -> Result<(), String> { + let path = state_path(); + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent) + .map_err(|e| format!("Failed to create state dir: {e}"))?; + } + let json = + serde_json::to_string_pretty(self).map_err(|e| format!("Failed to serialize: {e}"))?; + std::fs::write(&path, json).map_err(|e| format!("Failed to write state: {e}"))?; + Ok(()) + } + + pub fn load() -> Result, String> { + let path = state_path(); + if !path.exists() { + return Ok(None); + } + let contents = + std::fs::read_to_string(&path).map_err(|e| format!("Failed to read state: {e}"))?; + let state = + serde_json::from_str(&contents).map_err(|e| format!("Failed to parse state: {e}"))?; + Ok(Some(state)) + } + + pub fn remove() -> Result<(), String> { + let path = state_path(); + if path.exists() { + std::fs::remove_file(&path).map_err(|e| format!("Failed to remove state: {e}"))?; + } + let sock = socket_path(); + if sock.exists() { + let _ = std::fs::remove_file(&sock); + } + Ok(()) + } + + pub fn is_process_alive(&self) -> bool { + unsafe { libc::kill(self.pid as i32, 0) == 0 } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn round_trip_state() { + let state = RecordingState { + pid: 12345, + recording_id: "abc123".to_string(), + project_path: PathBuf::from("/tmp/test.cap"), + started_at: "2026-03-31T13:00:00Z".to_string(), + screen: Some("1".to_string()), + }; + + let json = serde_json::to_string(&state).unwrap(); + let restored: RecordingState = serde_json::from_str(&json).unwrap(); + + assert_eq!(restored.pid, 12345); + assert_eq!(restored.recording_id, "abc123"); + assert_eq!(restored.project_path, PathBuf::from("/tmp/test.cap")); + } +} diff --git a/apps/cli/src/feedback.rs b/apps/cli/src/feedback.rs new file mode 100644 index 0000000000..08440b20fc --- /dev/null +++ b/apps/cli/src/feedback.rs @@ -0,0 +1,131 @@ +use cap_upload::{AuthConfig, CapClient}; +use clap::{Args, Subcommand}; + +#[derive(Args)] +pub struct FeedbackArgs { + message: String, +} + +#[derive(Args)] +pub struct DebugArgs { + #[command(subcommand)] + command: DebugCommands, +} + +#[derive(Subcommand)] +enum DebugCommands { + Upload, + Logs, +} + +pub fn log_dir() -> std::path::PathBuf { + dirs::config_dir() + .unwrap_or_else(|| std::path::PathBuf::from(".")) + .join("cap") + .join("logs") +} + +pub fn log_path() -> std::path::PathBuf { + log_dir().join("cap-cli.log") +} + +impl FeedbackArgs { + pub async fn run(self, json: bool) -> Result<(), String> { + let auth = AuthConfig::resolve().map_err(|e| e.to_string())?; + let client = CapClient::new(auth).map_err(|e| e.to_string())?; + + let os = std::env::consts::OS; + let version = env!("CARGO_PKG_VERSION"); + + client + .submit_feedback(&self.message, os, version) + .await + .map_err(|e| format!("Failed to submit feedback: {e}"))?; + + if json { + println!("{}", serde_json::json!({"status": "submitted"})); + } else { + eprintln!("Feedback submitted. Thank you!"); + } + Ok(()) + } +} + +impl DebugArgs { + pub async fn run(self, json: bool) -> Result<(), String> { + match self.command { + DebugCommands::Upload => debug_upload(json).await, + DebugCommands::Logs => debug_logs(json), + } + } +} + +async fn debug_upload(json: bool) -> Result<(), String> { + let path = log_path(); + + let log_data = match std::fs::read(&path) { + Ok(data) => { + let max_size = 1024 * 1024; + if data.len() > max_size { + data[data.len() - max_size..].to_vec() + } else { + data + } + } + Err(_) => { + return Err(format!( + "No log file found at {}. Run a command first to generate logs.", + path.display() + )); + } + }; + + let os = std::env::consts::OS; + let version = env!("CARGO_PKG_VERSION"); + + let auth = AuthConfig::resolve().ok(); + let client = match auth { + Some(auth) => CapClient::new(auth).map_err(|e| e.to_string())?, + None => { + return Err("Authentication required. Run 'cap auth login' first.".to_string()); + } + }; + + client + .upload_debug_logs(log_data, os, version, "{}") + .await + .map_err(|e| format!("Failed to upload logs: {e}"))?; + + if json { + println!("{}", serde_json::json!({"status": "uploaded"})); + } else { + eprintln!("Debug logs uploaded successfully."); + } + Ok(()) +} + +fn debug_logs(json: bool) -> Result<(), String> { + let path = log_path(); + let exists = path.exists(); + + if json { + println!( + "{}", + serde_json::json!({ + "path": path.display().to_string(), + "exists": exists, + }) + ); + } else { + eprintln!("Log file: {}", path.display()); + if exists { + let meta = std::fs::metadata(&path).ok(); + if let Some(meta) = meta { + eprintln!("Size: {} bytes", meta.len()); + } + } else { + eprintln!("(file does not exist yet)"); + } + } + Ok(()) +} diff --git a/apps/cli/src/main.rs b/apps/cli/src/main.rs index f02a7c31ac..61d9bd1f13 100644 --- a/apps/cli/src/main.rs +++ b/apps/cli/src/main.rs @@ -1,35 +1,71 @@ +mod auth; +mod config; +#[cfg(feature = "record")] +mod daemon; +mod feedback; +mod orgs; +#[cfg(feature = "record")] mod record; +mod s3; +#[cfg(feature = "record")] +mod system_info; +mod upload_cmd; +mod videos; +#[cfg(feature = "record")] use std::{ io::{Write, stdout}, path::PathBuf, }; -use cap_export::ExporterBase; -use cap_project::XY; -use clap::{Args, Parser, Subcommand}; +#[cfg(feature = "record")] +use clap::Args; +use clap::{Parser, Subcommand}; +#[cfg(feature = "record")] use record::RecordStart; +#[cfg(feature = "record")] use serde_json::json; +#[cfg(feature = "record")] use tracing::*; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; #[derive(Parser)] +#[command(name = "cap", about = "Screen recording and sharing")] struct Cli { + #[arg(long, global = true)] + json: bool, + #[command(subcommand)] command: Commands, } #[derive(Subcommand)] enum Commands { - /// Export a '.cap' project to an mp4 file + #[cfg(feature = "record")] Export(Export), - /// Start a recording or list available capture targets and devices + #[cfg(feature = "record")] Record(RecordArgs), + Auth(auth::AuthArgs), + Upload(upload_cmd::UploadArgs), + Config(config::ConfigArgs), + Feedback(feedback::FeedbackArgs), + Debug(feedback::DebugArgs), + #[cfg(feature = "record")] + SystemInfo(system_info::SystemInfoArgs), + List(videos::ListArgs), + Get(videos::GetArgs), + Delete(videos::DeleteArgs), + Open(videos::OpenArgs), + Info(videos::InfoArgs), + Transcript(videos::TranscriptArgs), + Password(videos::PasswordArgs), + Orgs(orgs::OrgsArgs), + S3(s3::S3Args), } +#[cfg(feature = "record")] #[derive(Args)] #[command(args_conflicts_with_subcommands = true)] -// #[command(flatten_help = true)] struct RecordArgs { #[command(subcommand)] command: Option, @@ -38,42 +74,61 @@ struct RecordArgs { args: RecordStart, } +#[cfg(feature = "record")] #[derive(Subcommand)] enum RecordCommands { - /// List screens available for capturing Screens, - /// List windows available for capturing Windows, - /// List cameras available for capturing Cameras, - // Mics, + Start(record::RecordStart), + Stop, + Status, } #[tokio::main] async fn main() -> Result<(), String> { - // let (layer, handle) = tracing_subscriber::reload::Layer::new(None::); - let registry = tracing_subscriber::registry().with(tracing_subscriber::filter::filter_fn( - (|v| v.target().starts_with("cap_")) as fn(&tracing::Metadata) -> bool, + (|v| v.target().starts_with("cap_") || v.target().starts_with("cap")) + as fn(&tracing::Metadata) -> bool, )); + let log_dir = feedback::log_dir(); + let file_layer = std::fs::create_dir_all(&log_dir) + .ok() + .and_then(|_| { + std::fs::OpenOptions::new() + .create(true) + .append(true) + .open(feedback::log_path()) + .ok() + }) + .map(|file| { + tracing_subscriber::fmt::layer() + .with_ansi(false) + .with_target(true) + .with_writer(std::sync::Mutex::new(file)) + }); + registry - // .with(layer) .with( tracing_subscriber::fmt::layer() .with_ansi(true) .with_target(true), ) + .with(file_layer) .init(); let cli = Cli::parse(); + let json_output = cli.json; match cli.command { + #[cfg(feature = "record")] Commands::Export(e) => { if let Err(e) = e.run().await { eprint!("Export failed: {e}") } } + #[cfg(feature = "record")] Commands::Record(RecordArgs { command, args }) => match command { Some(RecordCommands::Screens) => { let screens = cap_recording::screen_capture::list_displays(); @@ -114,49 +169,58 @@ window {}: let mut info = vec![]; for camera_info in cameras { - // let format = RequestedFormat::new::( - // RequestedFormatType::AbsoluteHighestFrameRate, - // ); - - // let Ok(mut camera) = Camera::new(camera_info.index().clone(), format) else { - // continue; - // }; - info.push(json!({ - // "model_id": camera_info.model_id().to_string(), "display_name": camera_info.display_name() - // "index": camera_info.index().to_string(), - // "name": camera_info.human_name(), - // "pixel_format": camera.frame_format(), - // "formats": camera - // .compatible_camera_formats() - // .unwrap() - // .into_iter() - // .map(|f| format!("{}x{}@{}fps", f.resolution().x(), f.resolution().y(), f.frame_rate())) - // .collect::>() })); } println!("{}", serde_json::to_string_pretty(&info).unwrap()); } + Some(RecordCommands::Start(start_args)) => { + record::start_daemon(start_args, json_output).await?; + } + Some(RecordCommands::Stop) => { + record::stop_recording(json_output).await?; + } + Some(RecordCommands::Status) => { + record::recording_status(json_output).await?; + } None => { args.run().await?; } }, + Commands::Auth(a) => a.run(json_output).await?, + Commands::Upload(u) => u.run(json_output).await?, + Commands::Config(c) => c.run(json_output).await?, + Commands::Feedback(f) => f.run(json_output).await?, + Commands::Debug(d) => d.run(json_output).await?, + #[cfg(feature = "record")] + Commands::SystemInfo(s) => s.run(json_output).await?, + Commands::List(l) => l.run(json_output).await?, + Commands::Get(g) => g.run(json_output).await?, + Commands::Delete(d) => d.run(json_output).await?, + Commands::Open(o) => o.run(json_output).await?, + Commands::Info(i) => i.run(json_output).await?, + Commands::Transcript(t) => t.run(json_output).await?, + Commands::Password(p) => p.run(json_output).await?, + Commands::Orgs(o) => o.run(json_output).await?, + Commands::S3(s) => s.run(json_output).await?, } Ok(()) } +#[cfg(feature = "record")] #[derive(Args)] struct Export { project_path: PathBuf, output_path: Option, } +#[cfg(feature = "record")] impl Export { async fn run(self) -> Result<(), String> { - let exporter_base = ExporterBase::builder(self.project_path) + let exporter_base = cap_export::ExporterBase::builder(self.project_path) .build() .await .map_err(|v| format!("Exporter build error: {v}"))?; @@ -165,14 +229,12 @@ impl Export { let exporter_output_path = cap_export::mp4::Mp4ExportSettings { fps: 60, - resolution_base: XY::new(1920, 1080), + resolution_base: cap_project::XY::new(1920, 1080), compression: cap_export::mp4::ExportCompression::Maximum, custom_bpp: None, force_ffmpeg_decoder: false, } .export(exporter_base, move |_f| { - // print!("\rrendered frame {f}"); - stdout.flush().unwrap(); true }) @@ -191,35 +253,3 @@ impl Export { Ok(()) } } - -// fn ffmpeg_callback_experiment() { -// unsafe { -// unsafe extern "C" fn ffmpeg_log_callback( -// arg1: *mut std::ffi::c_void, -// arg2: std::ffi::c_int, -// arg3: *const std::ffi::c_char, -// arg4: *mut std::ffi::c_char, -// ) { -// // ffmpeg::sys::AVClass; - -// if !arg1.is_null() { -// let arg1_ptr = arg1; -// let arg1 = **(arg1 as *mut *mut AVClass); -// dbg!(CStr::from_ptr(arg1.class_name)); -// if let Some(item_name_fn) = arg1.item_name { -// dbg!(CStr::from_ptr(item_name_fn(arg1_ptr))); -// } -// } - -// // let class_name = if !arg1.is_null() { -// // CStr::from_ptr((*arg1).class_name) -// // } else { -// // "unknown".to_string() -// // }; - -// // println!("[{class_name}] {arg2} {s:?}",); -// } - -// ffmpeg::sys::av_log_set_callback(Some(ffmpeg_log_callback)); -// } -// } diff --git a/apps/cli/src/orgs.rs b/apps/cli/src/orgs.rs new file mode 100644 index 0000000000..1bb391a44c --- /dev/null +++ b/apps/cli/src/orgs.rs @@ -0,0 +1,45 @@ +use cap_upload::{AuthConfig, CapClient}; +use clap::{Args, Subcommand}; + +#[derive(Args)] +pub struct OrgsArgs { + #[command(subcommand)] + command: OrgsCommands, +} + +#[derive(Subcommand)] +enum OrgsCommands { + List, +} + +impl OrgsArgs { + pub async fn run(self, json: bool) -> Result<(), String> { + match self.command { + OrgsCommands::List => list(json).await, + } + } +} + +async fn list(json: bool) -> Result<(), String> { + let auth = AuthConfig::resolve().map_err(|e| e.to_string())?; + let client = CapClient::new(auth).map_err(|e| e.to_string())?; + + let orgs = client + .list_organizations() + .await + .map_err(|e| e.to_string())?; + + if json { + println!("{}", serde_json::to_string_pretty(&orgs).unwrap()); + } else { + if orgs.is_empty() { + eprintln!("No organizations found."); + return Ok(()); + } + for org in &orgs { + println!("{}\t{}", org.id, org.name); + } + } + + Ok(()) +} diff --git a/apps/cli/src/record.rs b/apps/cli/src/record.rs index f6e90b08e8..d543d67124 100644 --- a/apps/cli/src/record.rs +++ b/apps/cli/src/record.rs @@ -1,33 +1,79 @@ use cap_recording::{screen_capture::ScreenCaptureTarget, studio_recording}; use clap::Args; use scap_targets::{DisplayId, WindowId}; -use std::{env::current_dir, path::PathBuf}; +use std::{env::current_dir, path::PathBuf, process}; + +use crate::config::{CliSettings, ResolvedSettings}; use tokio::io::AsyncBufReadExt; use uuid::Uuid; +use crate::daemon::{ + client as daemon_client, protocol::DaemonResponse, server::RecordingDaemon, + state::RecordingState, +}; + #[derive(Args)] pub struct RecordStart { #[command(flatten)] target: RecordTargets, - /// Index of the camera to record #[arg(long)] camera: Option, - /// ID of the microphone to record #[arg(long)] mic: Option, - /// Whether to capture system audio #[arg(long)] system_audio: bool, - /// Path to save the '.cap' project to #[arg(long)] path: Option, - /// Maximum fps to record at (max 60) #[arg(long)] fps: Option, + #[arg(long, conflicts_with = "no_auto_zoom")] + auto_zoom: bool, + #[arg(long, conflicts_with = "auto_zoom")] + no_auto_zoom: bool, + #[arg(long, conflicts_with = "no_capture_keys")] + capture_keys: bool, + #[arg(long, conflicts_with = "capture_keys")] + no_capture_keys: bool, + #[arg(long = "exclude")] + exclude_windows: Vec, +} + +impl RecordStart { + fn override_settings(&self) -> CliSettings { + let auto_zoom = if self.auto_zoom { + Some(true) + } else if self.no_auto_zoom { + Some(false) + } else { + None + }; + + let capture_keys = if self.capture_keys { + Some(true) + } else if self.no_capture_keys { + Some(false) + } else { + None + }; + + CliSettings { + auto_zoom_on_clicks: auto_zoom, + capture_keyboard_events: capture_keys, + max_fps: self.fps, + excluded_windows: if self.exclude_windows.is_empty() { + None + } else { + Some(self.exclude_windows.clone()) + }, + } + } } impl RecordStart { pub async fn run(self) -> Result<(), String> { + let overrides = self.override_settings(); + let settings = ResolvedSettings::resolve_with_tauri(&overrides); + let target_info = match (self.target.screen, self.target.window) { (Some(id), _) => cap_recording::screen_capture::list_displays() .into_iter() @@ -42,25 +88,16 @@ impl RecordStart { _ => Err("No target specified".to_string()), }?; - // let camera = if let Some(model_id) = self.camera { - // let _model_id: ModelID = model_id - // .try_into() - // .map_err(|_| "Invalid model ID".to_string())?; - - // todo!() - // // Some(CameraFeed::init(model_id).await.unwrap()) - // } else { - // None - // }; - let id = Uuid::new_v4().to_string(); let path = self .path .unwrap_or_else(|| current_dir().unwrap().join(format!("{id}.cap"))); - let actor = studio_recording::Actor::builder(path, target_info) + let handle = studio_recording::Actor::builder(path.clone(), target_info) .with_system_audio(self.system_audio) .with_custom_cursor(false) + .with_max_fps(settings.max_fps) + .with_keyboard_capture(settings.capture_keyboard_events) .build( #[cfg(target_os = "macos")] Some(cap_recording::SendableShareableContent::from( @@ -77,18 +114,210 @@ impl RecordStart { .await .unwrap(); - actor.stop().await.unwrap(); + handle.stop().await.map_err(|e| e.to_string())?; Ok(()) } } +pub async fn start_daemon(args: RecordStart, json: bool) -> Result<(), String> { + if let Some(existing) = RecordingState::load()? { + if existing.is_process_alive() { + return Err( + "A recording is already in progress. Run \"cap record stop\" first.".to_string(), + ); + } + RecordingState::remove()?; + } + + let target_info = match (args.target.screen, args.target.window) { + (Some(id), _) => cap_recording::screen_capture::list_displays() + .into_iter() + .find(|s| s.0.id == id) + .map(|(s, _)| ScreenCaptureTarget::Display { id: s.id }) + .ok_or(format!("Screen with id '{id}' not found")), + (_, Some(id)) => cap_recording::screen_capture::list_windows() + .into_iter() + .find(|s| s.0.id == id) + .map(|(s, _)| ScreenCaptureTarget::Window { id: s.id }) + .ok_or(format!("Window with id '{id}' not found")), + _ => Err("No target specified. Use --screen ID or --window ID.".to_string()), + }?; + + let recording_id = Uuid::new_v4().to_string(); + let project_path = args + .path + .unwrap_or_else(|| current_dir().unwrap().join(format!("{recording_id}.cap"))); + + let screen_label = match &target_info { + ScreenCaptureTarget::Display { id } => Some(id.to_string()), + ScreenCaptureTarget::Window { id } => Some(id.to_string()), + _ => None, + }; + + let overrides = args.override_settings(); + let settings = ResolvedSettings::resolve_with_tauri(&overrides); + + let handle = studio_recording::Actor::builder(project_path.clone(), target_info) + .with_system_audio(args.system_audio) + .with_custom_cursor(false) + .with_max_fps(settings.max_fps) + .with_keyboard_capture(settings.capture_keyboard_events) + .build( + #[cfg(target_os = "macos")] + Some(cap_recording::SendableShareableContent::from( + cidre::sc::ShareableContent::current().await.unwrap(), + )), + ) + .await + .map_err(|e| e.to_string())?; + + let state = RecordingState { + pid: process::id(), + recording_id: recording_id.clone(), + project_path: project_path.clone(), + started_at: chrono::Utc::now().to_rfc3339(), + screen: screen_label, + }; + state.save()?; + + if json { + println!( + "{}", + serde_json::json!({ + "status": "recording", + "recording_id": recording_id, + "project_path": project_path.display().to_string(), + }) + ); + } else { + eprintln!("Recording started (id: {recording_id})"); + eprintln!("Project: {}", project_path.display()); + eprintln!("Stop with: cap record stop"); + } + + let daemon = RecordingDaemon::new(handle, state); + daemon.run().await?; + + Ok(()) +} + +pub async fn stop_recording(json: bool) -> Result<(), String> { + let state = RecordingState::load()?.ok_or("No active recording found.")?; + + if !state.is_process_alive() { + RecordingState::remove()?; + return Err("Recording process is no longer running. State cleaned up.".to_string()); + } + + let response = daemon_client::stop_recording().await?; + + match response { + DaemonResponse::Ok { + project_path, + duration_secs, + } => { + if json { + println!( + "{}", + serde_json::json!({ + "status": "stopped", + "project_path": project_path, + "duration_secs": duration_secs, + }) + ); + } else { + if let Some(dur) = duration_secs { + let mins = dur as u64 / 60; + let secs = dur as u64 % 60; + eprintln!("Recording stopped (duration: {mins}m {secs}s)"); + } else { + eprintln!("Recording stopped."); + } + eprintln!("Project: {project_path}"); + } + } + DaemonResponse::Error { message } => { + return Err(format!("Stop failed: {message}")); + } + DaemonResponse::Recording { .. } => { + return Err("Unexpected response from daemon".to_string()); + } + } + + Ok(()) +} + +pub async fn recording_status(json: bool) -> Result<(), String> { + let state = RecordingState::load()?; + + let Some(state) = state else { + if json { + println!("{}", serde_json::json!({"status": "idle"})); + } else { + eprintln!("No active recording."); + } + return Ok(()); + }; + + if !state.is_process_alive() { + RecordingState::remove()?; + if json { + println!( + "{}", + serde_json::json!({"status": "idle", "note": "stale state cleaned up"}) + ); + } else { + eprintln!("No active recording (cleaned up stale state)."); + } + return Ok(()); + } + + match daemon_client::get_status().await { + Ok(DaemonResponse::Recording { + duration_secs, + project_path, + screen, + }) => { + if json { + println!( + "{}", + serde_json::json!({ + "status": "recording", + "duration_secs": duration_secs, + "project_path": project_path, + "screen": screen, + }) + ); + } else { + let mins = duration_secs as u64 / 60; + let secs = duration_secs as u64 % 60; + eprintln!("Recording in progress ({mins}m {secs}s)"); + eprintln!("Project: {project_path}"); + if let Some(scr) = screen { + eprintln!("Screen: {scr}"); + } + } + } + Ok(other) => { + if json { + println!("{}", serde_json::to_string(&other).unwrap()); + } else { + eprintln!("Unexpected status: {other:?}"); + } + } + Err(e) => { + return Err(format!("Failed to query daemon: {e}")); + } + } + + Ok(()) +} + #[derive(Args)] struct RecordTargets { - /// ID of the screen to capture #[arg(long, group = "target")] screen: Option, - /// ID of the window to capture #[arg(long, group = "target")] window: Option, } diff --git a/apps/cli/src/s3.rs b/apps/cli/src/s3.rs new file mode 100644 index 0000000000..e0fe46eaca --- /dev/null +++ b/apps/cli/src/s3.rs @@ -0,0 +1,133 @@ +use cap_upload::{AuthConfig, CapClient, S3ConfigInput}; +use clap::{Args, Subcommand}; + +#[derive(Args)] +pub struct S3Args { + #[command(subcommand)] + command: S3Commands, +} + +#[derive(Subcommand)] +enum S3Commands { + Config(S3BucketArgs), + Test(S3BucketArgs), + Get, + Delete, +} + +#[derive(Args)] +struct S3BucketArgs { + #[arg(long)] + provider: String, + #[arg(long)] + bucket: String, + #[arg(long)] + region: String, + #[arg(long)] + endpoint: String, + #[arg(long)] + access_key_id: String, + #[arg(long)] + secret_access_key: String, +} + +impl S3BucketArgs { + fn build_s3_input(&self) -> S3ConfigInput { + S3ConfigInput { + provider: self.provider.clone(), + access_key_id: self.access_key_id.clone(), + secret_access_key: self.secret_access_key.clone(), + endpoint: self.endpoint.clone(), + bucket_name: self.bucket.clone(), + region: self.region.clone(), + } + } +} + +impl S3Args { + pub async fn run(self, json: bool) -> Result<(), String> { + match self.command { + S3Commands::Config(args) => set_config(args, json).await, + S3Commands::Test(args) => test_config(args, json).await, + S3Commands::Get => get_config(json).await, + S3Commands::Delete => delete_config(json).await, + } + } +} + +async fn set_config(args: S3BucketArgs, json: bool) -> Result<(), String> { + let auth = AuthConfig::resolve().map_err(|e| e.to_string())?; + let client = CapClient::new(auth).map_err(|e| e.to_string())?; + + let input = args.build_s3_input(); + client + .set_s3_config(&input) + .await + .map_err(|e| e.to_string())?; + + if json { + println!("{}", serde_json::json!({"status": "saved"})); + } else { + eprintln!("S3 configuration saved."); + } + Ok(()) +} + +async fn test_config(args: S3BucketArgs, json: bool) -> Result<(), String> { + let auth = AuthConfig::resolve().map_err(|e| e.to_string())?; + let client = CapClient::new(auth).map_err(|e| e.to_string())?; + + let input = args.build_s3_input(); + client + .test_s3_config(&input) + .await + .map_err(|e| e.to_string())?; + + if json { + println!("{}", serde_json::json!({"status": "ok"})); + } else { + eprintln!("S3 connectivity test passed."); + } + Ok(()) +} + +async fn get_config(json: bool) -> Result<(), String> { + let auth = AuthConfig::resolve().map_err(|e| e.to_string())?; + let client = CapClient::new(auth).map_err(|e| e.to_string())?; + + let config = client.get_s3_config().await.map_err(|e| e.to_string())?; + + if json { + println!( + "{}", + serde_json::json!({ + "provider": config.provider, + "bucket_name": config.bucket_name, + "region": config.region, + "endpoint": config.endpoint, + "access_key_id": config.access_key_id, + }) + ); + } else { + println!("Provider: {}", config.provider); + println!("Bucket: {}", config.bucket_name); + println!("Region: {}", config.region); + println!("Endpoint: {}", config.endpoint); + println!("Access Key: {}", config.access_key_id); + } + Ok(()) +} + +async fn delete_config(json: bool) -> Result<(), String> { + let auth = AuthConfig::resolve().map_err(|e| e.to_string())?; + let client = CapClient::new(auth).map_err(|e| e.to_string())?; + + client.delete_s3_config().await.map_err(|e| e.to_string())?; + + if json { + println!("{}", serde_json::json!({"status": "deleted"})); + } else { + eprintln!("S3 configuration deleted."); + } + Ok(()) +} diff --git a/apps/cli/src/system_info.rs b/apps/cli/src/system_info.rs new file mode 100644 index 0000000000..3312bcf67b --- /dev/null +++ b/apps/cli/src/system_info.rs @@ -0,0 +1,140 @@ +use clap::Args; + +#[derive(Args)] +pub struct SystemInfoArgs { + #[arg(long)] + json: Option, +} + +impl SystemInfoArgs { + pub async fn run(self, json_default: bool) -> Result<(), String> { + let json = self.json.unwrap_or(json_default); + let diagnostics = cap_recording::diagnostics::collect_diagnostics(); + let hardware = cap_recording::diagnostics::collect_hardware_info(); + let displays = cap_recording::diagnostics::collect_displays(); + + if json { + let output = serde_json::json!({ + "diagnostics": diagnostics, + "hardware": hardware, + "displays": displays, + }); + println!("{}", serde_json::to_string_pretty(&output).unwrap()); + } else { + print_human_readable(&diagnostics, &hardware, &displays); + } + Ok(()) + } +} + +#[cfg(target_os = "macos")] +fn print_human_readable( + diag: &cap_recording::diagnostics::SystemDiagnostics, + hw: &cap_recording::diagnostics::HardwareInfo, + displays: &[cap_recording::diagnostics::DisplayDiagnostics], +) { + if let Some(ref ver) = diag.macos_version { + eprintln!( + "Operating System: macOS {}.{}.{}{}", + ver.major, + ver.minor, + ver.patch, + if ver.is_apple_silicon { + " (Apple Silicon)" + } else { + "" + } + ); + } + eprintln!( + "Capture Support: {}", + if diag.screen_capture_supported { + "Screen Capture: Supported" + } else { + "Screen Capture: Not Supported" + } + ); + if let Some(ref gpu) = diag.gpu_name { + eprintln!( + "GPU: {}{}", + gpu, + if diag.metal_supported { + " (Metal supported)" + } else { + "" + } + ); + } + eprintln!( + "CPU: {} ({} cores)", + hw.cpu_brand, hw.cpu_cores + ); + eprintln!("Memory: {} MB", hw.total_memory_mb); + + if !diag.available_encoders.is_empty() { + eprintln!("\nAvailable Encoders:"); + eprintln!(" {}", diag.available_encoders.join(" ")); + } + + if !displays.is_empty() { + eprintln!("\nDisplays:"); + for d in displays { + eprintln!( + " {} {}x{} @{}Hz ({}x scale{})", + d.name, + d.width, + d.height, + d.refresh_rate, + d.scale_factor, + if d.is_primary { ", primary" } else { "" } + ); + } + } +} + +#[cfg(target_os = "windows")] +fn print_human_readable( + diag: &cap_recording::diagnostics::SystemDiagnostics, + hw: &cap_recording::diagnostics::HardwareInfo, + displays: &[cap_recording::diagnostics::DisplayDiagnostics], +) { + if let Some(ref ver) = diag.windows_version { + eprintln!("Operating System: {}", ver.display_name); + } + eprintln!( + "Capture Support: {}", + if diag.graphics_capture_supported { + "Graphics Capture: Supported" + } else { + "Graphics Capture: Not Supported" + } + ); + if let Some(ref gpu) = diag.gpu_info { + eprintln!("GPU: {} ({})", gpu.description, gpu.vendor); + } + eprintln!( + "CPU: {} ({} cores)", + hw.cpu_brand, hw.cpu_cores + ); + eprintln!("Memory: {} MB", hw.total_memory_mb); + + if !diag.available_encoders.is_empty() { + eprintln!("\nAvailable Encoders:"); + eprintln!(" {}", diag.available_encoders.join(" ")); + } + + if !displays.is_empty() { + eprintln!("\nDisplays:"); + for d in displays { + eprintln!( + " {} {}x{} @{}Hz ({}x scale{})", + d.name, + d.width, + d.height, + d.refresh_rate, + d.scale_factor, + if d.is_primary { ", primary" } else { "" } + ); + } + } +} diff --git a/apps/cli/src/upload_cmd.rs b/apps/cli/src/upload_cmd.rs new file mode 100644 index 0000000000..c3f3ed3036 --- /dev/null +++ b/apps/cli/src/upload_cmd.rs @@ -0,0 +1,183 @@ +use cap_upload::{AuthConfig, CapClient, UploadEngine, UploadProgress, VideoMetadata}; +use clap::Args; +use indicatif::{ProgressBar, ProgressStyle}; +use std::path::PathBuf; + +#[derive(Args)] +pub struct UploadArgs { + path: PathBuf, + #[arg(long)] + password: Option, + #[arg(long)] + org: Option, +} + +struct CliProgress { + bar: ProgressBar, +} + +impl CliProgress { + fn new(total: u64, filename: &str) -> Self { + let bar = ProgressBar::new(total); + bar.set_style( + ProgressStyle::with_template( + "Uploading {msg} [{bar:40.cyan/blue}] {percent}% ({bytes}/{total_bytes}) {bytes_per_sec}", + ) + .unwrap() + .progress_chars("=>-"), + ); + bar.set_message(filename.to_string()); + Self { bar } + } +} + +impl UploadProgress for CliProgress { + fn on_chunk_uploaded(&self, bytes_uploaded: u64, _total_bytes: u64) { + self.bar.set_position(bytes_uploaded); + } + + fn on_complete(&self) { + self.bar.finish_with_message("done"); + } + + fn on_error(&self, error: &str) { + self.bar.abandon_with_message(format!("error: {error}")); + } +} + +impl UploadArgs { + pub async fn run(self, json: bool) -> Result<(), String> { + let auth = AuthConfig::resolve().map_err(|e| e.to_string())?; + let client = CapClient::new(auth).map_err(|e| e.to_string())?; + let engine = UploadEngine::new(&client); + + let path = &self.path; + if !path.exists() { + return Err(format!("File not found: {}", path.display())); + } + + let metadata = extract_metadata(path)?; + + let file_size = std::fs::metadata(path) + .map_err(|e| format!("Cannot read file: {e}"))? + .len(); + let filename = path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("video") + .to_string(); + + let progress: Option> = if json { + None + } else { + Some(Box::new(CliProgress::new(file_size, &filename))) + }; + + let result = engine + .upload_file(path, metadata, progress.as_deref(), self.org.as_deref()) + .await + .map_err(|e| e.to_string())?; + + if let Some(ref pw) = self.password { + match client.set_video_password(&result.video_id, Some(pw)).await { + Ok(()) => { + if !json { + eprintln!("Password set on video."); + } + } + Err(e) => { + eprintln!("Warning: failed to set password: {e}"); + } + } + } + + if json { + println!( + "{}", + serde_json::json!({ + "video_id": result.video_id, + "share_url": result.share_url, + "size_bytes": file_size, + "password_set": self.password.is_some(), + }) + ); + } else { + println!("{}", result.share_url); + } + + Ok(()) + } +} + +fn extract_metadata(path: &std::path::Path) -> Result { + let output = std::process::Command::new("ffprobe") + .args([ + "-v", + "quiet", + "-print_format", + "json", + "-show_format", + "-show_streams", + ]) + .arg(path) + .output() + .map_err(|e| format!("Failed to run ffprobe (is ffmpeg installed?): {e}"))?; + + if !output.status.success() { + return Err(format!( + "ffprobe failed: {}", + String::from_utf8_lossy(&output.stderr) + )); + } + + let probe: serde_json::Value = serde_json::from_slice(&output.stdout) + .map_err(|e| format!("Failed to parse ffprobe output: {e}"))?; + + let video_stream = probe["streams"] + .as_array() + .and_then(|streams| { + streams + .iter() + .find(|s| s["codec_type"].as_str() == Some("video")) + }) + .ok_or_else(|| "No video stream found in file".to_string())?; + + let duration_secs = probe["format"]["duration"] + .as_str() + .and_then(|d| d.parse::().ok()) + .unwrap_or(0.0); + + let width = video_stream["width"].as_u64().unwrap_or(0) as u32; + let height = video_stream["height"].as_u64().unwrap_or(0) as u32; + + let fps = video_stream["r_frame_rate"].as_str().and_then(|rate| { + let parts: Vec<&str> = rate.split('/').collect(); + if parts.len() == 2 { + let num = parts[0].parse::().ok()?; + let den = parts[1].parse::().ok()?; + if den > 0.0 { Some(num / den) } else { None } + } else { + rate.parse::().ok() + } + }); + + Ok(VideoMetadata { + duration_secs, + width, + height, + fps, + }) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn extract_metadata_missing_file() { + let result = extract_metadata(std::path::Path::new("/tmp/nonexistent_cap_test_video.mp4")); + assert!(result.is_err()); + let err = result.unwrap_err(); + assert!(err.contains("ffprobe")); + } +} diff --git a/apps/cli/src/videos.rs b/apps/cli/src/videos.rs new file mode 100644 index 0000000000..fd138aa32e --- /dev/null +++ b/apps/cli/src/videos.rs @@ -0,0 +1,286 @@ +use cap_upload::{AuthConfig, CapClient}; +use clap::Args; + +#[derive(Args)] +pub struct ListArgs { + #[arg(long)] + org: Option, + #[arg(long, default_value = "20")] + limit: u32, + #[arg(long, default_value = "0")] + offset: u32, +} + +#[derive(Args)] +pub struct GetArgs { + video_id: String, +} + +#[derive(Args)] +pub struct DeleteArgs { + video_id: String, +} + +#[derive(Args)] +pub struct OpenArgs { + video_id: String, +} + +#[derive(Args)] +pub struct InfoArgs { + video_id: String, +} + +#[derive(Args)] +pub struct TranscriptArgs { + video_id: String, +} + +#[derive(Args)] +pub struct PasswordArgs { + video_id: String, + #[arg(long)] + remove: bool, + #[arg(long)] + set: Option, +} + +impl ListArgs { + pub async fn run(self, json: bool) -> Result<(), String> { + let auth = AuthConfig::resolve().map_err(|e| e.to_string())?; + let client = CapClient::new(auth).map_err(|e| e.to_string())?; + + let resp = client + .list_videos(self.org.as_deref(), self.limit, self.offset) + .await + .map_err(|e| e.to_string())?; + + if json { + let enriched: Vec<_> = resp + .data + .iter() + .map(|v| { + let mut obj = serde_json::to_value(v).unwrap(); + obj.as_object_mut() + .unwrap() + .insert("share_url".into(), client.share_url(&v.id).into()); + obj + }) + .collect(); + println!("{}", serde_json::to_string_pretty(&enriched).unwrap()); + } else if resp.data.is_empty() { + eprintln!("No videos found."); + } else { + for v in &resp.data { + let dur = v.duration.map_or("--".to_string(), |d| { + format!("{}m{}s", d as u64 / 60, d as u64 % 60) + }); + println!( + "{} {} {} {}", + v.id, + dur, + v.name.as_deref().unwrap_or("(untitled)"), + client.share_url(&v.id) + ); + } + eprintln!("Showing {}/{} videos", resp.data.len(), resp.total); + } + Ok(()) + } +} + +impl GetArgs { + pub async fn run(self, json: bool) -> Result<(), String> { + let args = InfoArgs { + video_id: self.video_id, + }; + args.run(json).await + } +} + +impl DeleteArgs { + pub async fn run(self, json: bool) -> Result<(), String> { + let auth = AuthConfig::resolve().map_err(|e| e.to_string())?; + let client = CapClient::new(auth).map_err(|e| e.to_string())?; + + client + .delete_video(&self.video_id) + .await + .map_err(|e| e.to_string())?; + + if json { + println!( + "{}", + serde_json::json!({ + "status": "deleted", + "video_id": self.video_id, + }) + ); + } else { + eprintln!("Video {} deleted.", self.video_id); + } + Ok(()) + } +} + +impl OpenArgs { + pub async fn run(self, json: bool) -> Result<(), String> { + let auth = AuthConfig::resolve().map_err(|e| e.to_string())?; + let client = CapClient::new(auth).map_err(|e| e.to_string())?; + + let url = client.share_url(&self.video_id); + + if json { + println!( + "{}", + serde_json::json!({ + "video_id": self.video_id, + "share_url": url, + }) + ); + } else { + eprintln!("Opening {url}"); + if open::that(&url).is_err() { + eprintln!("Could not open browser. Visit: {url}"); + } + } + Ok(()) + } +} + +impl InfoArgs { + pub async fn run(self, json: bool) -> Result<(), String> { + let auth = AuthConfig::resolve().map_err(|e| e.to_string())?; + let client = CapClient::new(auth).map_err(|e| e.to_string())?; + + let info = client + .get_video_info(&self.video_id) + .await + .map_err(|e| e.to_string())?; + + let url = client.share_url(&self.video_id); + + if json { + let mut obj = serde_json::to_value(&info).unwrap(); + obj.as_object_mut() + .unwrap() + .insert("share_url".into(), url.clone().into()); + println!("{}", serde_json::to_string_pretty(&obj).unwrap()); + } else { + println!("{}", info.name.as_deref().unwrap_or("(untitled)")); + println!(" URL: {url}"); + if let Some(dur) = info.duration { + let mins = dur as u64 / 60; + let secs = dur as u64 % 60; + println!(" Duration: {mins}m {secs}s"); + } + if let (Some(w), Some(h)) = (info.width, info.height) { + println!(" Resolution: {w}x{h}"); + } + if let Some(status) = &info.transcription_status { + println!(" Transcription: {status}"); + } + if info.has_password { + println!(" Password: set"); + } + if let Some(title) = &info.ai_title { + println!(" AI Title: {title}"); + } + if let Some(summary) = &info.summary { + println!(" Summary: {summary}"); + } + if let Some(chapters) = &info.chapters { + println!(" Chapters:"); + for ch in chapters { + let mins = ch.start as u64 / 60; + let secs = ch.start as u64 % 60; + println!(" [{mins}:{secs:02}] {}", ch.title); + } + } + } + Ok(()) + } +} + +impl TranscriptArgs { + pub async fn run(self, json: bool) -> Result<(), String> { + let auth = AuthConfig::resolve().map_err(|e| e.to_string())?; + let client = CapClient::new(auth).map_err(|e| e.to_string())?; + + let vtt = client + .get_transcript(&self.video_id) + .await + .map_err(|e| e.to_string())?; + + if json { + println!( + "{}", + serde_json::json!({ + "video_id": self.video_id, + "format": "vtt", + "content": vtt, + }) + ); + } else { + println!("{vtt}"); + } + Ok(()) + } +} + +impl PasswordArgs { + pub async fn run(self, json: bool) -> Result<(), String> { + let auth = AuthConfig::resolve().map_err(|e| e.to_string())?; + let client = CapClient::new(auth).map_err(|e| e.to_string())?; + + if self.remove { + client + .set_video_password(&self.video_id, None) + .await + .map_err(|e| e.to_string())?; + if json { + println!( + "{}", + serde_json::json!({"status": "removed", "video_id": self.video_id}) + ); + } else { + eprintln!("Password removed from video {}.", self.video_id); + } + } else if let Some(ref pw) = self.set { + client + .set_video_password(&self.video_id, Some(pw)) + .await + .map_err(|e| e.to_string())?; + if json { + println!( + "{}", + serde_json::json!({"status": "set", "video_id": self.video_id}) + ); + } else { + eprintln!("Password set on video {}.", self.video_id); + } + } else { + return Err( + "Specify --set to set a password or --remove to remove it.".to_string(), + ); + } + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn password_args_requires_set_or_remove() { + let args = PasswordArgs { + video_id: "test123".to_string(), + remove: false, + set: None, + }; + let result = args.run(false).await; + assert!(result.is_err()); + assert!(result.unwrap_err().contains("--set")); + } +} diff --git a/apps/web/app/api/desktop/[...route]/video.ts b/apps/web/app/api/desktop/[...route]/video.ts index 168d1b5589..1c754f711c 100644 --- a/apps/web/app/api/desktop/[...route]/video.ts +++ b/apps/web/app/api/desktop/[...route]/video.ts @@ -1,4 +1,5 @@ import { db } from "@cap/database"; +import { hashPassword } from "@cap/database/crypto"; import { sendEmail } from "@cap/database/emails/config"; import { FirstShareableLink } from "@cap/database/emails/first-shareable-link"; import { nanoId } from "@cap/database/helpers"; @@ -15,7 +16,7 @@ import { dub, userIsPro } from "@cap/utils"; import { S3Buckets } from "@cap/web-backend"; import { Organisation, Video } from "@cap/web-domain"; import { zValidator } from "@hono/zod-validator"; -import { and, count, eq, lte, or } from "drizzle-orm"; +import { and, count, desc, eq, lte, or } from "drizzle-orm"; import { Effect, Option } from "effect"; import { Hono } from "hono"; import { z } from "zod"; @@ -23,6 +24,7 @@ import { runPromise } from "@/lib/server"; import { isFromDesktopSemver, UPLOAD_PROGRESS_VERSION } from "@/utils/desktop"; import { stringOrNumberOptional } from "@/utils/zod"; import { withAuth } from "../../utils"; +import type { VideoMetadata } from "@cap/database/types/metadata"; export const app = new Hono().use(withAuth); @@ -382,3 +384,173 @@ app.post( } }, ); + +app.get( + "/info", + zValidator("query", z.object({ videoId: z.string() })), + async (c) => { + const videoId = Video.VideoId.make(c.req.valid("query").videoId); + const user = c.get("user"); + + const [result] = await db() + .select() + .from(videos) + .where(and(eq(videos.id, videoId), eq(videos.ownerId, user.id))); + + if (!result) + return c.json({ error: "Video not found" }, { status: 404 }); + + const meta = (result.metadata ?? {}) as VideoMetadata; + + return c.json({ + id: result.id, + name: result.name, + createdAt: result.createdAt, + duration: result.duration, + width: result.width, + height: result.height, + public: result.public, + hasPassword: result.password !== null, + transcriptionStatus: result.transcriptionStatus, + aiTitle: meta.aiTitle ?? null, + summary: meta.summary ?? null, + chapters: meta.chapters ?? null, + }); + }, +); + +app.get( + "/transcript", + zValidator("query", z.object({ videoId: z.string() })), + async (c) => { + const videoId = Video.VideoId.make(c.req.valid("query").videoId); + const user = c.get("user"); + + const [result] = await db() + .select({ video: videos, bucket: s3Buckets }) + .from(videos) + .leftJoin(s3Buckets, eq(videos.bucket, s3Buckets.id)) + .where(and(eq(videos.id, videoId), eq(videos.ownerId, user.id))); + + if (!result?.video) + return c.json({ error: "Video not found" }, { status: 404 }); + + if (result.video.transcriptionStatus !== "COMPLETE") + return c.json({ + error: "Transcript not ready", + status: result.video.transcriptionStatus, + }, { status: 404 }); + + try { + const vttContent = await Effect.gen(function* () { + const [bucket] = yield* S3Buckets.getBucketAccess( + Option.fromNullable(result.bucket?.id), + ); + return yield* bucket.getObject( + `${result.video.ownerId}/${videoId}/transcription.vtt`, + ); + }).pipe(runPromise); + + if (Option.isNone(vttContent)) + return c.json({ error: "Transcript file not found" }, { status: 404 }); + + return c.json({ content: vttContent.value }); + } catch (error) { + console.error("Error fetching transcript:", error); + return c.json({ error: "Failed to fetch transcript" }, { status: 500 }); + } + }, +); + +app.post( + "/password", + zValidator( + "json", + z.object({ + videoId: z.string(), + password: z.string().nullable(), + }), + ), + async (c) => { + const { videoId: videoIdRaw, password } = c.req.valid("json"); + const videoId = Video.VideoId.make(videoIdRaw); + const user = c.get("user"); + + const [video] = await db() + .select() + .from(videos) + .where(and(eq(videos.id, videoId), eq(videos.ownerId, user.id))); + + if (!video) + return c.json({ error: "Video not found" }, { status: 404 }); + + if (password === null) { + await db() + .update(videos) + .set({ password: null }) + .where(eq(videos.id, videoId)); + return c.json({ success: true, message: "Password removed" }); + } + + const hashed = await hashPassword(password); + await db() + .update(videos) + .set({ password: hashed }) + .where(eq(videos.id, videoId)); + + return c.json({ success: true, message: "Password set" }); + }, +); + +app.get( + "/list", + zValidator( + "query", + z.object({ + orgId: z.string().optional(), + limit: z.coerce.number().int().min(1).max(100).default(20), + offset: z.coerce.number().int().min(0).default(0), + }), + ), + async (c) => { + const { orgId, limit, offset } = c.req.valid("query"); + const user = c.get("user"); + + const conditions = [eq(videos.ownerId, user.id)]; + + if (orgId) { + conditions.push(eq(videos.orgId, Organisation.OrganisationId.make(orgId))); + } + + const whereClause = and(...conditions); + + const [data, countResult] = await Promise.all([ + db() + .select({ + id: videos.id, + name: videos.name, + createdAt: videos.createdAt, + duration: videos.duration, + hasPassword: videos.password, + transcriptionStatus: videos.transcriptionStatus, + }) + .from(videos) + .where(whereClause) + .orderBy(desc(videos.createdAt)) + .limit(limit) + .offset(offset), + db() + .select({ total: count() }) + .from(videos) + .where(whereClause), + ]); + + return c.json({ + data: data.map((v) => ({ + ...v, + hasPassword: v.hasPassword !== null, + })), + total: countResult[0]?.total ?? 0, + }); + }, +); diff --git a/crates/upload/Cargo.toml b/crates/upload/Cargo.toml new file mode 100644 index 0000000000..16b4ed3897 --- /dev/null +++ b/crates/upload/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "cap-upload" +version = "0.1.0" +edition = "2021" + +[dependencies] +reqwest = { version = "0.12", features = ["rustls-tls", "json", "stream", "multipart"], default-features = false } +chrono = { version = "0.4", features = ["serde"] } +tokio = { workspace = true } +serde = { workspace = true } +serde_json = "1" +toml = "0.8" +dirs = "5" +thiserror = { workspace = true } +tracing = { workspace = true } +image = { version = "0.25.10", default-features = false, features = ["jpeg", "png"] } + +[dev-dependencies] +tokio = { workspace = true, features = ["macros", "rt-multi-thread"] } +tempfile = "3" + +[lints] +workspace = true diff --git a/crates/upload/src/auth.rs b/crates/upload/src/auth.rs new file mode 100644 index 0000000000..1409cffcd8 --- /dev/null +++ b/crates/upload/src/auth.rs @@ -0,0 +1,192 @@ +use crate::error::AuthError; +use serde::{Deserialize, Serialize}; +use std::path::{Path, PathBuf}; + +#[derive(Debug, Clone)] +pub struct AuthConfig { + pub server_url: String, + pub api_key: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct ConfigFile { + auth: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct AuthSection { + server_url: String, + api_key: String, +} + +fn config_dir() -> Result { + dirs::config_dir() + .map(|d| d.join("cap")) + .ok_or(AuthError::NoConfigDir) +} + +fn config_path() -> Result { + config_dir().map(|d| d.join("config.toml")) +} + +impl AuthConfig { + pub fn resolve() -> Result { + if let Some(config) = Self::from_env() { + return Ok(config); + } + if let Some(config) = Self::from_config_file()? { + return Ok(config); + } + Err(AuthError::NotConfigured) + } + + pub fn from_env() -> Option { + let api_key = std::env::var("CAP_API_KEY").ok()?; + let server_url = std::env::var("CAP_SERVER_URL").ok()?; + Some(Self { + server_url, + api_key, + }) + } + + pub fn from_config_file() -> Result, AuthError> { + let path = config_path()?; + Self::from_config_file_at(&path) + } + + fn from_config_file_at(path: &Path) -> Result, AuthError> { + if !path.exists() { + return Ok(None); + } + let contents = std::fs::read_to_string(path).map_err(|e| AuthError::ConfigRead { + path: path.to_path_buf(), + source: e, + })?; + let config: ConfigFile = toml::from_str(&contents).map_err(|e| AuthError::ConfigParse { + path: path.to_path_buf(), + source: e, + })?; + Ok(config.auth.map(|a| Self { + server_url: a.server_url, + api_key: a.api_key, + })) + } + + pub fn save(server_url: &str, api_key: &str) -> Result { + let path = config_path()?; + Self::save_to(&path, server_url, api_key) + } + + fn save_to(path: &Path, server_url: &str, api_key: &str) -> Result { + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent).map_err(|e| AuthError::ConfigWrite { + path: path.to_path_buf(), + source: e, + })?; + } + let config = ConfigFile { + auth: Some(AuthSection { + server_url: server_url.to_string(), + api_key: api_key.to_string(), + }), + }; + let contents = toml::to_string_pretty(&config).expect("AuthSection is always serializable"); + std::fs::write(path, contents).map_err(|e| AuthError::ConfigWrite { + path: path.to_path_buf(), + source: e, + })?; + Ok(path.to_path_buf()) + } + + pub fn remove() -> Result<(), AuthError> { + let path = config_path()?; + Self::remove_at(&path) + } + + fn remove_at(path: &Path) -> Result<(), AuthError> { + if !path.exists() { + return Ok(()); + } + let contents = std::fs::read_to_string(path).map_err(|e| AuthError::ConfigRead { + path: path.to_path_buf(), + source: e, + })?; + let mut config: ConfigFile = + toml::from_str(&contents).map_err(|e| AuthError::ConfigParse { + path: path.to_path_buf(), + source: e, + })?; + config.auth = None; + let contents = toml::to_string_pretty(&config).expect("ConfigFile is always serializable"); + std::fs::write(path, contents).map_err(|e| AuthError::ConfigWrite { + path: path.to_path_buf(), + source: e, + })?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[test] + fn env_vars_override_config_file() { + let tmp = TempDir::new().unwrap(); + let config_path = tmp.path().join("config.toml"); + AuthConfig::save_to(&config_path, "https://file.example.com", "file-key").unwrap(); + + std::env::set_var("CAP_API_KEY", "env-key"); + std::env::set_var("CAP_SERVER_URL", "https://env.example.com"); + + let from_env = AuthConfig::from_env().unwrap(); + assert_eq!(from_env.api_key, "env-key"); + assert_eq!(from_env.server_url, "https://env.example.com"); + + std::env::remove_var("CAP_API_KEY"); + std::env::remove_var("CAP_SERVER_URL"); + } + + #[test] + fn config_file_round_trip() { + let tmp = TempDir::new().unwrap(); + let path = tmp.path().join("config.toml"); + + AuthConfig::save_to(&path, "https://cap.example.com", "test-key-123").unwrap(); + + let loaded = AuthConfig::from_config_file_at(&path).unwrap().unwrap(); + assert_eq!(loaded.server_url, "https://cap.example.com"); + assert_eq!(loaded.api_key, "test-key-123"); + } + + #[test] + fn missing_config_returns_none() { + let tmp = TempDir::new().unwrap(); + let path = tmp.path().join("nonexistent.toml"); + let result = AuthConfig::from_config_file_at(&path).unwrap(); + assert!(result.is_none()); + } + + #[test] + fn remove_clears_auth_section() { + let tmp = TempDir::new().unwrap(); + let path = tmp.path().join("config.toml"); + + AuthConfig::save_to(&path, "https://cap.example.com", "key").unwrap(); + AuthConfig::remove_at(&path).unwrap(); + + let loaded = AuthConfig::from_config_file_at(&path).unwrap(); + assert!(loaded.is_none()); + } + + #[test] + fn missing_both_sources_returns_not_configured() { + std::env::remove_var("CAP_API_KEY"); + std::env::remove_var("CAP_SERVER_URL"); + let tmp = TempDir::new().unwrap(); + let path = tmp.path().join("nonexistent.toml"); + let result = AuthConfig::from_config_file_at(&path).unwrap(); + assert!(result.is_none()); + } +} diff --git a/crates/upload/src/client.rs b/crates/upload/src/client.rs new file mode 100644 index 0000000000..150b2814e1 --- /dev/null +++ b/crates/upload/src/client.rs @@ -0,0 +1,376 @@ +use crate::{ + types::{ + CompleteMultipartRequest, CompleteMultipartResponse, InitiateMultipartResponse, + ListVideosResponse, Organization, PresignPartResponse, S3ConfigData, S3ConfigInput, + S3ConfigResponse, TranscriptResponse, Video, VideoInfo, + }, + ApiError, AuthConfig, +}; +use std::time::Duration; + +const API_TIMEOUT: Duration = Duration::from_secs(10); + +pub struct CapClient { + http: reqwest::Client, + auth: AuthConfig, +} + +impl CapClient { + pub fn new(auth: AuthConfig) -> Result { + let http = reqwest::Client::builder().timeout(API_TIMEOUT).build()?; + Ok(Self { http, auth }) + } + + fn url(&self, path: &str) -> String { + format!("{}{}", self.auth.server_url.trim_end_matches('/'), path) + } + + fn authed(&self, builder: reqwest::RequestBuilder) -> reqwest::RequestBuilder { + builder.header("Authorization", format!("Bearer {}", self.auth.api_key)) + } + + async fn check_response(resp: reqwest::Response) -> Result { + let status = resp.status(); + if status == reqwest::StatusCode::UNAUTHORIZED { + return Err(ApiError::Unauthorized); + } + if !status.is_success() { + let body = resp + .text() + .await + .unwrap_or_else(|_| "".to_string()); + return Err(ApiError::ServerError { + status: status.as_u16(), + body, + }); + } + Ok(resp) + } + + pub async fn create_video(&self, org_id: Option<&str>) -> Result { + let mut url = self.url("/api/desktop/video/create?recordingMode=desktopMP4"); + if let Some(org) = org_id { + url.push_str(&format!("&orgId={org}")); + } + let resp = self.authed(self.http.get(&url)).send().await.map_err(|e| { + if e.is_connect() { + return ApiError::Unreachable { + url: self.auth.server_url.clone(), + }; + } + if e.is_timeout() { + return ApiError::Timeout { + timeout_secs: API_TIMEOUT.as_secs(), + }; + } + ApiError::Http(e) + })?; + let resp = Self::check_response(resp).await?; + Ok(resp.json().await?) + } + + pub async fn delete_video(&self, video_id: &str) -> Result<(), ApiError> { + let url = self.url("/api/desktop/video/delete"); + let resp = self + .authed(self.http.delete(&url).query(&[("videoId", video_id)])) + .send() + .await?; + Self::check_response(resp).await?; + Ok(()) + } + + pub async fn initiate_multipart( + &self, + video_id: &str, + content_type: &str, + ) -> Result { + let url = self.url("/api/upload/multipart/initiate"); + let resp = self + .authed( + self.http + .post(&url) + .header("Content-Type", "application/json") + .json(&serde_json::json!({ + "videoId": video_id, + "contentType": content_type + })), + ) + .send() + .await?; + let resp = Self::check_response(resp).await?; + let data: InitiateMultipartResponse = resp.json().await?; + Ok(data.upload_id) + } + + pub async fn presign_part( + &self, + video_id: &str, + upload_id: &str, + part_number: u32, + ) -> Result { + let url = self.url("/api/upload/multipart/presign-part"); + let resp = self + .authed( + self.http + .post(&url) + .header("Content-Type", "application/json") + .json(&serde_json::json!({ + "videoId": video_id, + "uploadId": upload_id, + "partNumber": part_number + })), + ) + .send() + .await?; + let resp = Self::check_response(resp).await?; + let data: PresignPartResponse = resp.json().await?; + Ok(data.presigned_url) + } + + pub async fn complete_multipart( + &self, + req: &CompleteMultipartRequest, + ) -> Result { + let url = self.url("/api/upload/multipart/complete"); + let resp = self + .authed( + self.http + .post(&url) + .header("Content-Type", "application/json") + .json(req), + ) + .send() + .await?; + let resp = Self::check_response(resp).await?; + Ok(resp.json().await?) + } + + pub async fn abort_multipart(&self, video_id: &str, upload_id: &str) -> Result<(), ApiError> { + let url = self.url("/api/upload/multipart/abort"); + let resp = self + .authed( + self.http + .post(&url) + .header("Content-Type", "application/json") + .json(&serde_json::json!({ + "videoId": video_id, + "uploadId": upload_id + })), + ) + .send() + .await?; + Self::check_response(resp).await?; + Ok(()) + } + + pub async fn list_organizations(&self) -> Result, ApiError> { + let url = self.url("/api/desktop/organizations"); + let resp = self.authed(self.http.get(&url)).send().await?; + let resp = Self::check_response(resp).await?; + Ok(resp.json().await?) + } + + pub async fn get_s3_config(&self) -> Result { + let url = self.url("/api/desktop/s3/config/get"); + let resp = self.authed(self.http.get(&url)).send().await?; + let resp = Self::check_response(resp).await?; + let wrapper: S3ConfigResponse = resp.json().await?; + Ok(wrapper.config) + } + + pub async fn set_s3_config(&self, config: &S3ConfigInput) -> Result<(), ApiError> { + let url = self.url("/api/desktop/s3/config"); + let resp = self + .authed( + self.http + .post(&url) + .header("Content-Type", "application/json") + .json(config), + ) + .send() + .await?; + Self::check_response(resp).await?; + Ok(()) + } + + pub async fn test_s3_config(&self, config: &S3ConfigInput) -> Result<(), ApiError> { + let url = self.url("/api/desktop/s3/config/test"); + let resp = self + .authed( + self.http + .post(&url) + .header("Content-Type", "application/json") + .json(config), + ) + .send() + .await?; + Self::check_response(resp).await?; + Ok(()) + } + + pub async fn delete_s3_config(&self) -> Result<(), ApiError> { + let url = self.url("/api/desktop/s3/config/delete"); + let resp = self.authed(self.http.delete(&url)).send().await?; + Self::check_response(resp).await?; + Ok(()) + } + + pub async fn upload_signed( + &self, + video_id: &str, + subpath: &str, + data: Vec, + ) -> Result<(), ApiError> { + let url = self.url("/api/upload/signed"); + let resp = self + .authed( + self.http + .post(&url) + .header("Content-Type", "application/json") + .json(&serde_json::json!({ + "videoId": video_id, + "subpath": subpath, + "method": "put" + })), + ) + .send() + .await?; + let resp = Self::check_response(resp).await?; + let body: serde_json::Value = resp.json().await?; + let presigned_url = + body["presignedPutData"]["url"] + .as_str() + .ok_or_else(|| ApiError::ServerError { + status: 500, + body: "Missing presignedPutData.url in response".to_string(), + })?; + self.http + .put(presigned_url) + .header("Content-Type", "image/jpeg") + .header("Content-Length", data.len()) + .body(data) + .timeout(Duration::from_secs(30)) + .send() + .await?; + Ok(()) + } + + pub async fn get_video_info(&self, video_id: &str) -> Result { + let url = self.url("/api/desktop/video/info"); + let resp = self + .authed(self.http.get(&url).query(&[("videoId", video_id)])) + .send() + .await?; + let resp = Self::check_response(resp).await?; + Ok(resp.json().await?) + } + + pub async fn get_transcript(&self, video_id: &str) -> Result { + let url = self.url("/api/desktop/video/transcript"); + let resp = self + .authed(self.http.get(&url).query(&[("videoId", video_id)])) + .send() + .await?; + let resp = Self::check_response(resp).await?; + let data: TranscriptResponse = resp.json().await?; + Ok(data.content) + } + + pub async fn set_video_password( + &self, + video_id: &str, + password: Option<&str>, + ) -> Result<(), ApiError> { + let url = self.url("/api/desktop/video/password"); + let resp = self + .authed( + self.http + .post(&url) + .header("Content-Type", "application/json") + .json(&serde_json::json!({ + "videoId": video_id, + "password": password + })), + ) + .send() + .await?; + Self::check_response(resp).await?; + Ok(()) + } + + pub async fn list_videos( + &self, + org_id: Option<&str>, + limit: u32, + offset: u32, + ) -> Result { + let url = self.url("/api/desktop/video/list"); + let mut query: Vec<(&str, String)> = + vec![("limit", limit.to_string()), ("offset", offset.to_string())]; + if let Some(org) = org_id { + query.push(("orgId", org.to_string())); + } + let resp = self + .authed(self.http.get(&url).query(&query)) + .send() + .await?; + let resp = Self::check_response(resp).await?; + Ok(resp.json().await?) + } + + pub fn share_url(&self, video_id: &str) -> String { + format!( + "{}/s/{}", + self.auth.server_url.trim_end_matches('/'), + video_id + ) + } + + pub async fn submit_feedback( + &self, + feedback: &str, + os: &str, + version: &str, + ) -> Result<(), ApiError> { + let resp = self + .authed(self.http.post(self.url("/api/desktop/feedback"))) + .form(&[("feedback", feedback), ("os", os), ("version", version)]) + .send() + .await?; + Self::check_response(resp).await?; + Ok(()) + } + + pub async fn upload_debug_logs( + &self, + log_data: Vec, + os: &str, + version: &str, + diagnostics_json: &str, + ) -> Result<(), ApiError> { + let filename = format!( + "cap-cli-{}-{}-{}.log", + os, + version, + chrono::Utc::now().format("%Y%m%d%H%M%S") + ); + let log_part = reqwest::multipart::Part::bytes(log_data) + .file_name(filename) + .mime_str("text/plain") + .map_err(|e| ApiError::Other(e.to_string()))?; + + let form = reqwest::multipart::Form::new() + .part("file", log_part) + .text("os", os.to_string()) + .text("version", version.to_string()) + .text("diagnostics", diagnostics_json.to_string()); + + let resp = self + .authed(self.http.post(self.url("/api/desktop/logs"))) + .multipart(form) + .send() + .await?; + Self::check_response(resp).await?; + Ok(()) + } +} diff --git a/crates/upload/src/error.rs b/crates/upload/src/error.rs new file mode 100644 index 0000000000..5d243b293d --- /dev/null +++ b/crates/upload/src/error.rs @@ -0,0 +1,60 @@ +use std::path::PathBuf; + +#[derive(Debug, thiserror::Error)] +pub enum AuthError { + #[error("Not logged in. Run \"cap auth login --server URL\" or set CAP_API_KEY and CAP_SERVER_URL environment variables.")] + NotConfigured, + #[error("Failed to read config file at {path}: {source}")] + ConfigRead { + path: PathBuf, + source: std::io::Error, + }, + #[error("Failed to parse config file at {path}: {source}")] + ConfigParse { + path: PathBuf, + source: toml::de::Error, + }, + #[error("Failed to write config file at {path}: {source}")] + ConfigWrite { + path: PathBuf, + source: std::io::Error, + }, + #[error("Could not determine config directory")] + NoConfigDir, +} + +#[derive(Debug, thiserror::Error)] +pub enum ApiError { + #[error("Authentication failed. Check your API key or run \"cap auth login\".")] + Unauthorized, + #[error("Cannot reach {url}. Check the URL and your network connection.")] + Unreachable { url: String }, + #[error("Request timed out after {timeout_secs}s")] + Timeout { timeout_secs: u64 }, + #[error("Server returned {status}: {body}")] + ServerError { status: u16, body: String }, + #[error("HTTP error: {0}")] + Http(#[from] reqwest::Error), + #[error("JSON error: {0}")] + Json(#[from] serde_json::Error), + #[error("{0}")] + Other(String), +} + +#[derive(Debug, thiserror::Error)] +pub enum UploadError { + #[error("File not found: {0}")] + FileNotFound(PathBuf), + #[error("Unsupported format: {extension}. Supported: mp4, webm, mov, mkv, avi")] + UnsupportedFormat { extension: String }, + #[error("Failed to read file: {0}")] + IoError(#[from] std::io::Error), + #[error("API error during upload: {0}")] + Api(#[from] ApiError), + #[error("All {max_retries} retries exhausted for chunk {part_number}")] + ChunkFailed { part_number: u32, max_retries: u32 }, + #[error("Upload aborted: {reason}")] + Aborted { reason: String }, + #[error("Authentication error: {0}")] + Auth(#[from] AuthError), +} diff --git a/crates/upload/src/lib.rs b/crates/upload/src/lib.rs new file mode 100644 index 0000000000..cea83a9a4c --- /dev/null +++ b/crates/upload/src/lib.rs @@ -0,0 +1,17 @@ +pub mod auth; +pub mod client; +pub mod error; +pub mod thumbnail; +pub mod types; +pub mod upload; + +pub use auth::AuthConfig; +pub use client::CapClient; +pub use error::{ApiError, AuthError, UploadError}; +pub use thumbnail::generate_and_upload_thumbnail; +pub use types::{ + Chapter, CompleteMultipartRequest, CompleteMultipartResponse, ListVideosResponse, Organization, + S3ConfigData, S3ConfigInput, S3ConfigResponse, UploadResult, UploadedPart, Video, VideoInfo, + VideoMetadata, VideoSummary, +}; +pub use upload::{detect_content_type, UploadEngine, UploadProgress}; diff --git a/crates/upload/src/thumbnail.rs b/crates/upload/src/thumbnail.rs new file mode 100644 index 0000000000..8af7e920fb --- /dev/null +++ b/crates/upload/src/thumbnail.rs @@ -0,0 +1,93 @@ +use crate::client::CapClient; +use image::codecs::jpeg::JpegEncoder; +use image::ImageReader; +use std::path::Path; +use std::process::Command; +use tracing::debug; + +pub fn extract_first_frame(video_path: &Path, output_path: &Path) -> Result<(), String> { + let status = Command::new("ffmpeg") + .args([ + "-i", + video_path.to_str().unwrap_or_default(), + "-vframes", + "1", + "-q:v", + "2", + "-y", + output_path.to_str().unwrap_or_default(), + ]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status() + .map_err(|e| format!("Failed to run ffmpeg: {e}"))?; + + if !status.success() { + return Err("ffmpeg failed to extract frame".to_string()); + } + Ok(()) +} + +pub fn compress_image(path: &Path) -> Result, String> { + let img = ImageReader::open(path) + .map_err(|e| format!("Failed to open image: {e}"))? + .decode() + .map_err(|e| format!("Failed to decode image: {e}"))?; + + let resized = img.resize( + img.width() / 2, + img.height() / 2, + image::imageops::FilterType::Nearest, + ); + + let mut buffer = Vec::new(); + let mut encoder = JpegEncoder::new_with_quality(&mut buffer, 30); + encoder + .encode( + resized.as_bytes(), + resized.width(), + resized.height(), + resized.color().into(), + ) + .map_err(|e| format!("Failed to encode JPEG: {e}"))?; + + Ok(buffer) +} + +pub async fn generate_and_upload_thumbnail( + client: &CapClient, + video_id: &str, + video_path: &Path, +) -> Result<(), String> { + let temp_dir = std::env::temp_dir(); + let frame_path = temp_dir.join(format!("cap-thumb-{video_id}.png")); + + extract_first_frame(video_path, &frame_path)?; + let jpeg_result = compress_image(&frame_path); + std::fs::remove_file(&frame_path).ok(); + let jpeg_data = jpeg_result?; + + debug!( + video_id, + size_bytes = jpeg_data.len(), + "Uploading thumbnail" + ); + + client + .upload_signed(video_id, "screenshot/screen-capture.jpg", jpeg_data) + .await + .map_err(|e| format!("Failed to upload thumbnail: {e}"))?; + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::path::PathBuf; + + #[test] + fn compress_nonexistent_file_returns_error() { + let result = compress_image(&PathBuf::from("/tmp/nonexistent-cap-test.png")); + assert!(result.is_err()); + } +} diff --git a/crates/upload/src/types.rs b/crates/upload/src/types.rs new file mode 100644 index 0000000000..151303e6d3 --- /dev/null +++ b/crates/upload/src/types.rs @@ -0,0 +1,242 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Video { + pub id: String, + pub user_id: String, + pub aws_region: String, + pub aws_bucket: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct InitiateMultipartResponse { + pub upload_id: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PresignPartResponse { + pub presigned_url: String, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct UploadedPart { + pub part_number: u32, + pub etag: String, + pub size: usize, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct CompleteMultipartRequest { + pub video_id: String, + pub upload_id: String, + pub parts: Vec, + #[serde(rename = "durationInSecs")] + pub duration_in_secs: f64, + pub width: u32, + pub height: u32, + #[serde(skip_serializing_if = "Option::is_none")] + pub fps: Option, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct CompleteMultipartResponse { + pub location: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Organization { + pub id: String, + pub name: String, + #[serde(rename = "ownerId")] + pub owner_id: String, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct S3ConfigInput { + pub provider: String, + pub access_key_id: String, + pub secret_access_key: String, + pub endpoint: String, + pub bucket_name: String, + pub region: String, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct S3ConfigResponse { + pub config: S3ConfigData, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct S3ConfigData { + pub provider: String, + pub access_key_id: String, + pub secret_access_key: String, + pub endpoint: String, + pub bucket_name: String, + pub region: String, +} + +#[derive(Debug, Clone)] +pub struct VideoMetadata { + pub duration_secs: f64, + pub width: u32, + pub height: u32, + pub fps: Option, +} + +pub struct UploadResult { + pub video_id: String, + pub share_url: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct VideoInfo { + pub id: String, + pub name: Option, + pub created_at: Option, + pub duration: Option, + pub width: Option, + pub height: Option, + pub public: Option, + pub has_password: bool, + pub transcription_status: Option, + pub ai_title: Option, + pub summary: Option, + pub chapters: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Chapter { + pub title: String, + pub start: f64, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct TranscriptResponse { + pub content: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct VideoSummary { + pub id: String, + pub name: Option, + pub created_at: Option, + pub duration: Option, + pub has_password: bool, + pub transcription_status: Option, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct ListVideosResponse { + pub data: Vec, + pub total: u64, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn deserialize_video_info() { + let json = r#"{ + "id": "abc123", + "name": "My Video", + "createdAt": "2026-03-31T12:00:00Z", + "duration": 15.5, + "width": 1920, + "height": 1080, + "public": true, + "hasPassword": false, + "transcriptionStatus": "COMPLETE", + "aiTitle": "AI Generated Title", + "summary": "A summary of the video", + "chapters": [{"title": "Intro", "start": 0.0}, {"title": "Main", "start": 5.0}] + }"#; + let info: VideoInfo = serde_json::from_str(json).unwrap(); + assert_eq!(info.id, "abc123"); + assert_eq!(info.name.as_deref(), Some("My Video")); + assert_eq!(info.duration, Some(15.5)); + assert!(!info.has_password); + assert_eq!(info.chapters.as_ref().unwrap().len(), 2); + assert_eq!(info.chapters.as_ref().unwrap()[1].title, "Main"); + } + + #[test] + fn deserialize_video_info_nulls() { + let json = r#"{ + "id": "xyz", + "name": null, + "createdAt": null, + "duration": null, + "width": null, + "height": null, + "public": null, + "hasPassword": true, + "transcriptionStatus": null, + "aiTitle": null, + "summary": null, + "chapters": null + }"#; + let info: VideoInfo = serde_json::from_str(json).unwrap(); + assert_eq!(info.id, "xyz"); + assert!(info.name.is_none()); + assert!(info.duration.is_none()); + assert!(info.has_password); + assert!(info.chapters.is_none()); + } + + #[test] + fn deserialize_list_videos_response() { + let json = r#"{ + "data": [ + { + "id": "v1", + "name": "Video One", + "createdAt": "2026-03-31T12:00:00Z", + "duration": 30.0, + "hasPassword": false, + "transcriptionStatus": "PROCESSING" + }, + { + "id": "v2", + "name": null, + "createdAt": null, + "duration": null, + "hasPassword": true, + "transcriptionStatus": null + } + ], + "total": 42 + }"#; + let resp: ListVideosResponse = serde_json::from_str(json).unwrap(); + assert_eq!(resp.data.len(), 2); + assert_eq!(resp.total, 42); + assert_eq!(resp.data[0].id, "v1"); + assert!(resp.data[1].has_password); + assert!(resp.data[1].name.is_none()); + } + + #[test] + fn deserialize_chapter() { + let json = r#"{"title": "Getting Started", "start": 12.5}"#; + let ch: Chapter = serde_json::from_str(json).unwrap(); + assert_eq!(ch.title, "Getting Started"); + assert!((ch.start - 12.5).abs() < f64::EPSILON); + } + + #[test] + fn deserialize_transcript_response() { + let json = r#"{"content": "WEBVTT\n\n1\n00:00:00.000 --> 00:00:05.000\nHello world"}"#; + let resp: TranscriptResponse = serde_json::from_str(json).unwrap(); + assert!(resp.content.starts_with("WEBVTT")); + } +} diff --git a/crates/upload/src/upload.rs b/crates/upload/src/upload.rs new file mode 100644 index 0000000000..0ab6892a5a --- /dev/null +++ b/crates/upload/src/upload.rs @@ -0,0 +1,342 @@ +use crate::{ + client::CapClient, + error::UploadError, + types::{CompleteMultipartRequest, UploadResult, UploadedPart, VideoMetadata}, +}; +use std::path::Path; +use tokio::io::{AsyncReadExt, AsyncSeekExt}; +use tracing::{debug, info, warn}; + +const CHUNK_SIZE: usize = 5 * 1024 * 1024; +const MAX_CONCURRENCY: usize = 3; +const MAX_RETRIES: u32 = 3; + +pub trait UploadProgress: Send + Sync { + fn on_chunk_uploaded(&self, bytes_uploaded: u64, total_bytes: u64); + fn on_complete(&self); + fn on_error(&self, error: &str); +} + +pub fn detect_content_type(path: &Path) -> Result<&'static str, UploadError> { + let ext = path + .extension() + .and_then(|e| e.to_str()) + .unwrap_or("") + .to_lowercase(); + match ext.as_str() { + "mp4" => Ok("video/mp4"), + "webm" => Ok("video/webm"), + "mov" => Ok("video/quicktime"), + "mkv" => Ok("video/x-matroska"), + "avi" => Ok("video/x-msvideo"), + other => Err(UploadError::UnsupportedFormat { + extension: other.to_string(), + }), + } +} + +fn calculate_parts(file_size: u64, chunk_size: usize) -> Vec<(u32, u64, usize)> { + if file_size == 0 { + return vec![]; + } + let chunk_size_u64 = chunk_size as u64; + let num_parts = file_size.div_ceil(chunk_size_u64) as u32; + (0..num_parts) + .map(|i| { + let offset = i as u64 * chunk_size_u64; + let size = std::cmp::min(chunk_size_u64, file_size - offset) as usize; + (i + 1, offset, size) + }) + .collect() +} + +pub struct UploadEngine<'a> { + client: &'a CapClient, +} + +impl<'a> UploadEngine<'a> { + pub fn new(client: &'a CapClient) -> Self { + Self { client } + } + + pub async fn upload_file( + &self, + path: &Path, + metadata: VideoMetadata, + progress: Option<&dyn UploadProgress>, + org_id: Option<&str>, + ) -> Result { + if !path.exists() { + return Err(UploadError::FileNotFound(path.to_path_buf())); + } + + let content_type = detect_content_type(path)?; + let file_size = tokio::fs::metadata(path).await?.len(); + + info!( + path = %path.display(), + size_mb = file_size as f64 / 1_000_000.0, + content_type, + "Starting upload" + ); + + let video = self.client.create_video(org_id).await?; + let video_id = video.id.clone(); + + let upload_id = self + .client + .initiate_multipart(&video_id, content_type) + .await?; + + let parts_plan = calculate_parts(file_size, CHUNK_SIZE); + + let result = self + .upload_all_chunks( + path, + &video_id, + &upload_id, + &parts_plan, + file_size, + progress, + ) + .await; + + match result { + Ok(uploaded_parts) => { + let complete_req = CompleteMultipartRequest { + video_id: video_id.clone(), + upload_id: upload_id.clone(), + parts: uploaded_parts, + duration_in_secs: metadata.duration_secs, + width: metadata.width, + height: metadata.height, + fps: metadata.fps, + }; + + self.client.complete_multipart(&complete_req).await?; + + if let Some(p) = progress { + p.on_complete(); + } + + match crate::thumbnail::generate_and_upload_thumbnail(self.client, &video_id, path) + .await + { + Ok(()) => debug!("Thumbnail uploaded"), + Err(e) => warn!(error = %e, "Thumbnail upload failed (non-fatal)"), + } + + let share_url = self.client.share_url(&video_id); + info!(share_url = %share_url, "Upload complete"); + + Ok(UploadResult { + video_id, + share_url, + }) + } + Err(e) => { + warn!(video_id = %video_id, "Upload failed, aborting multipart upload"); + self.client + .abort_multipart(&video_id, &upload_id) + .await + .ok(); + if let Some(p) = progress { + p.on_error(&e.to_string()); + } + Err(e) + } + } + } + + async fn upload_all_chunks( + &self, + path: &Path, + video_id: &str, + upload_id: &str, + parts_plan: &[(u32, u64, usize)], + total_size: u64, + progress: Option<&dyn UploadProgress>, + ) -> Result, UploadError> { + let mut uploaded_parts: Vec = Vec::with_capacity(parts_plan.len()); + let mut bytes_uploaded: u64 = 0; + + for batch in parts_plan.chunks(MAX_CONCURRENCY) { + let mut handles = tokio::task::JoinSet::new(); + + for &(part_number, offset, size) in batch { + let mut chunk_data = vec![0u8; size]; + let mut file = tokio::fs::File::open(path).await?; + file.seek(std::io::SeekFrom::Start(offset)).await?; + file.read_exact(&mut chunk_data).await?; + + let vid = video_id.to_string(); + let uid = upload_id.to_string(); + + let presigned_url = self.client.presign_part(&vid, &uid, part_number).await?; + + handles.spawn({ + let chunk_data = chunk_data; + async move { + let size = chunk_data.len(); + let etag = upload_chunk_with_retry(&presigned_url, chunk_data, part_number) + .await?; + Ok::(UploadedPart { + part_number, + etag, + size, + }) + } + }); + } + + while let Some(result) = handles.join_next().await { + let part = result.expect("chunk upload task panicked")?; + bytes_uploaded += part.size as u64; + if let Some(p) = progress { + p.on_chunk_uploaded(bytes_uploaded, total_size); + } + uploaded_parts.push(part); + } + } + + uploaded_parts.sort_by_key(|p| p.part_number); + Ok(uploaded_parts) + } +} + +async fn upload_chunk_with_retry( + presigned_url: &str, + data: Vec, + part_number: u32, +) -> Result { + let client = reqwest::Client::new(); + + for attempt in 0..MAX_RETRIES { + if attempt > 0 { + let delay = std::time::Duration::from_secs(1 << attempt); + debug!( + part_number, + attempt, + delay_secs = delay.as_secs(), + "Retrying chunk upload" + ); + tokio::time::sleep(delay).await; + } + + match client + .put(presigned_url) + .header("Content-Length", data.len()) + .body(data.clone()) + .timeout(std::time::Duration::from_secs(30)) + .send() + .await + { + Ok(resp) => { + if !resp.status().is_success() { + let status = resp.status(); + let body = resp.text().await.unwrap_or_default(); + warn!(part_number, attempt, %status, body = %body, "Chunk upload HTTP error"); + continue; + } + let etag = resp + .headers() + .get("etag") + .and_then(|v| v.to_str().ok()) + .unwrap_or("") + .to_string(); + return Ok(etag); + } + Err(e) => { + warn!(part_number, attempt, error = %e, "Chunk upload failed"); + } + } + } + + Err(UploadError::ChunkFailed { + part_number, + max_retries: MAX_RETRIES, + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::path::PathBuf; + + #[test] + fn detect_mp4() { + assert_eq!( + detect_content_type(&PathBuf::from("video.mp4")).unwrap(), + "video/mp4" + ); + } + + #[test] + fn detect_webm() { + assert_eq!( + detect_content_type(&PathBuf::from("video.webm")).unwrap(), + "video/webm" + ); + } + + #[test] + fn detect_mov() { + assert_eq!( + detect_content_type(&PathBuf::from("recording.MOV")).unwrap(), + "video/quicktime" + ); + } + + #[test] + fn detect_mkv() { + assert_eq!( + detect_content_type(&PathBuf::from("video.mkv")).unwrap(), + "video/x-matroska" + ); + } + + #[test] + fn detect_avi() { + assert_eq!( + detect_content_type(&PathBuf::from("clip.avi")).unwrap(), + "video/x-msvideo" + ); + } + + #[test] + fn detect_unsupported() { + let err = detect_content_type(&PathBuf::from("file.gif")).unwrap_err(); + assert!(err.to_string().contains("Unsupported format")); + } + + #[test] + fn calculate_parts_exact_multiple() { + let parts = calculate_parts(15 * 1024 * 1024, 5 * 1024 * 1024); + assert_eq!(parts.len(), 3); + assert_eq!(parts[0], (1, 0, 5 * 1024 * 1024)); + assert_eq!(parts[1], (2, 5u64 * 1024 * 1024, 5 * 1024 * 1024)); + assert_eq!(parts[2], (3, 10u64 * 1024 * 1024, 5 * 1024 * 1024)); + } + + #[test] + fn calculate_parts_one_byte_over() { + let file_size = 5 * 1024 * 1024 + 1; + let parts = calculate_parts(file_size as u64, 5 * 1024 * 1024); + assert_eq!(parts.len(), 2); + assert_eq!(parts[0], (1, 0, 5 * 1024 * 1024)); + assert_eq!(parts[1], (2, 5u64 * 1024 * 1024, 1)); + } + + #[test] + fn calculate_parts_small_file() { + let parts = calculate_parts(100, 5 * 1024 * 1024); + assert_eq!(parts.len(), 1); + assert_eq!(parts[0], (1, 0, 100)); + } + + #[test] + fn calculate_parts_zero_bytes() { + let parts = calculate_parts(0, 5 * 1024 * 1024); + assert_eq!(parts.len(), 0); + } +} diff --git a/crates/upload/tests/integration.rs b/crates/upload/tests/integration.rs new file mode 100644 index 0000000000..b0191ea84c --- /dev/null +++ b/crates/upload/tests/integration.rs @@ -0,0 +1,121 @@ +use cap_upload::{AuthConfig, CapClient}; + +fn skip_unless_integration() -> Option { + let api_key = std::env::var("CAP_API_KEY").ok()?; + let server_url = + std::env::var("CAP_SERVER_URL").unwrap_or_else(|_| "https://cap.so".to_string()); + Some(AuthConfig { + server_url, + api_key, + }) +} + +#[tokio::test] +async fn create_and_delete_video() { + let Some(auth) = skip_unless_integration() else { + eprintln!("Skipping integration test: CAP_API_KEY not set"); + return; + }; + + let client = CapClient::new(auth).unwrap(); + + let video = client.create_video(None).await.unwrap(); + assert!(!video.id.is_empty()); + + client.delete_video(&video.id).await.unwrap(); +} + +#[tokio::test] +async fn list_organizations() { + let Some(auth) = skip_unless_integration() else { + eprintln!("Skipping integration test: CAP_API_KEY not set"); + return; + }; + + let client = CapClient::new(auth).unwrap(); + let orgs = client.list_organizations().await.unwrap(); + assert!(!orgs.is_empty(), "Expected at least one organization"); +} + +#[tokio::test] +async fn get_s3_config() { + let Some(auth) = skip_unless_integration() else { + eprintln!("Skipping integration test: CAP_API_KEY not set"); + return; + }; + + let client = CapClient::new(auth).unwrap(); + let config = client.get_s3_config().await.unwrap(); + assert!(!config.provider.is_empty()); +} + +#[tokio::test] +async fn list_videos() { + let Some(auth) = skip_unless_integration() else { + eprintln!("Skipping integration test: CAP_API_KEY not set"); + return; + }; + + let client = CapClient::new(auth).unwrap(); + let resp = client.list_videos(None, 5, 0).await.unwrap(); + assert!(resp.total >= 0); +} + +#[tokio::test] +async fn get_video_info() { + let Some(auth) = skip_unless_integration() else { + eprintln!("Skipping integration test: CAP_API_KEY not set"); + return; + }; + + let client = CapClient::new(auth).unwrap(); + let video = client.create_video(None).await.unwrap(); + + let info = client.get_video_info(&video.id).await.unwrap(); + assert_eq!(info.id, video.id); + assert!(!info.has_password); + + client.delete_video(&video.id).await.unwrap(); +} + +#[tokio::test] +async fn set_and_remove_password() { + let Some(auth) = skip_unless_integration() else { + eprintln!("Skipping integration test: CAP_API_KEY not set"); + return; + }; + + let client = CapClient::new(auth).unwrap(); + let video = client.create_video(None).await.unwrap(); + + client + .set_video_password(&video.id, Some("secret123")) + .await + .unwrap(); + + let info = client.get_video_info(&video.id).await.unwrap(); + assert!(info.has_password); + + client.set_video_password(&video.id, None).await.unwrap(); + + let info = client.get_video_info(&video.id).await.unwrap(); + assert!(!info.has_password); + + client.delete_video(&video.id).await.unwrap(); +} + +#[tokio::test] +async fn get_transcript_not_ready() { + let Some(auth) = skip_unless_integration() else { + eprintln!("Skipping integration test: CAP_API_KEY not set"); + return; + }; + + let client = CapClient::new(auth).unwrap(); + let video = client.create_video(None).await.unwrap(); + + let result = client.get_transcript(&video.id).await; + assert!(result.is_err()); + + client.delete_video(&video.id).await.unwrap(); +}