diff --git a/.github/workflows/node-sdk.yml b/.github/workflows/node-sdk.yml new file mode 100644 index 00000000..45f800da --- /dev/null +++ b/.github/workflows/node-sdk.yml @@ -0,0 +1,102 @@ +name: Node SDK + +on: + push: + branches: [main] + paths: + - 'packages/auths-node/**' + - 'crates/**' + pull_request: + branches: [main] + paths: + - 'packages/auths-node/**' + - 'crates/**' + +permissions: + contents: read + +env: + CARGO_TERM_COLOR: always + +jobs: + build-and-test: + name: Build & Test (${{ matrix.os }}) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-latest + target: x86_64-unknown-linux-gnu + - os: macos-latest + target: aarch64-apple-darwin + steps: + - uses: actions/checkout@v4 + + - uses: dtolnay/rust-toolchain@stable + + - uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + packages/auths-node/target + key: ${{ runner.os }}-node-sdk-${{ hashFiles('packages/auths-node/Cargo.lock') }} + restore-keys: ${{ runner.os }}-node-sdk- + + - uses: actions/setup-node@v4 + with: + node-version: 22 + + - name: Install pnpm + run: npm install -g pnpm + + - name: Install dependencies + working-directory: packages/auths-node + run: pnpm install + + - name: Build native module + working-directory: packages/auths-node + run: pnpm build + + - name: Configure Git + run: | + git config --global user.name "CI" + git config --global user.email "ci@auths.dev" + + - name: Run tests + working-directory: packages/auths-node + run: pnpm test + + - name: Upload native module + uses: actions/upload-artifact@v4 + with: + name: bindings-${{ matrix.target }} + path: packages/auths-node/auths.*.node + + lint: + name: Lint (Rust) + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: dtolnay/rust-toolchain@stable + with: + components: rustfmt, clippy + + - uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + packages/auths-node/target + key: ${{ runner.os }}-node-lint-${{ hashFiles('packages/auths-node/Cargo.lock') }} + restore-keys: ${{ runner.os }}-node-lint- + + - name: Check formatting + working-directory: packages/auths-node + run: cargo fmt --check + + - name: Clippy + working-directory: packages/auths-node + run: cargo clippy --all-features -- -D warnings diff --git a/.github/workflows/publish-node.yml b/.github/workflows/publish-node.yml new file mode 100644 index 00000000..1ffb9a62 --- /dev/null +++ b/.github/workflows/publish-node.yml @@ -0,0 +1,163 @@ +name: Publish Node SDK + +on: + push: + tags: ["v*"] + workflow_dispatch: + inputs: + target: + description: "Publish target" + required: true + type: choice + options: + - npm + - dry-run + default: dry-run + +permissions: + contents: read + +jobs: + build: + name: Build (${{ matrix.target }}) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-latest + target: x86_64-unknown-linux-gnu + - os: ubuntu-latest + target: x86_64-unknown-linux-musl + use-cross: true + - os: ubuntu-latest + target: aarch64-unknown-linux-gnu + use-cross: true + - os: ubuntu-latest + target: aarch64-unknown-linux-musl + use-cross: true + - os: macos-latest + target: x86_64-apple-darwin + - os: macos-latest + target: aarch64-apple-darwin + - os: windows-latest + target: x86_64-pc-windows-msvc + - os: windows-latest + target: aarch64-pc-windows-msvc + steps: + - uses: actions/checkout@v4 + + - uses: dtolnay/rust-toolchain@stable + with: + targets: ${{ matrix.target }} + + - uses: actions/setup-node@v4 + with: + node-version: 22 + + - name: Install pnpm + run: npm install -g pnpm + + - name: Install dependencies + working-directory: packages/auths-node + run: pnpm install + + - name: Install cross + if: matrix.use-cross + run: cargo install cross --git https://github.com/cross-rs/cross + + - name: Build native module + working-directory: packages/auths-node + run: pnpm build -- --target ${{ matrix.target }} + + - uses: actions/upload-artifact@v4 + with: + name: bindings-${{ matrix.target }} + path: packages/auths-node/auths.*.node + + test: + name: Test (${{ matrix.os }}) + needs: [build] + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-latest + artifact: bindings-x86_64-unknown-linux-gnu + - os: macos-latest + artifact: bindings-aarch64-apple-darwin + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: 22 + + - name: Install pnpm + run: npm install -g pnpm + + - name: Install dependencies + working-directory: packages/auths-node + run: pnpm install + + - uses: actions/download-artifact@v4 + with: + name: ${{ matrix.artifact }} + path: packages/auths-node + + - name: Configure Git + run: | + git config --global user.name "CI" + git config --global user.email "ci@auths.dev" + + - name: Run tests + working-directory: packages/auths-node + run: pnpm test + + publish: + name: Publish to npm + needs: [build, test] + runs-on: ubuntu-latest + if: startsWith(github.ref, 'refs/tags/v') || (github.event_name == 'workflow_dispatch' && github.event.inputs.target == 'npm') + permissions: + id-token: write + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: 22 + registry-url: https://registry.npmjs.org + + - name: Install pnpm + run: npm install -g pnpm + + - name: Install dependencies + working-directory: packages/auths-node + run: pnpm install + + - uses: actions/download-artifact@v4 + with: + path: packages/auths-node/artifacts + merge-multiple: true + + - name: Move artifacts + working-directory: packages/auths-node + run: pnpm artifacts + + - name: Prepare npm packages + working-directory: packages/auths-node + run: pnpm prepublishOnly + + - name: Publish + working-directory: packages/auths-node + run: | + if [ "${{ github.event_name }}" = "workflow_dispatch" ] && [ "${{ github.event.inputs.target }}" = "dry-run" ]; then + echo "Dry run - skipping publish" + npm pack + else + npm publish --provenance --access public + fi + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1cc14046..3d1b0245 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,7 +23,7 @@ repos: - id: cargo-fmt name: cargo fmt - entry: cargo fmt --all -- + entry: bash -c 'cargo fmt --all && cargo fmt --all --manifest-path packages/auths-node/Cargo.toml && cargo fmt --all --manifest-path packages/auths-python/Cargo.toml' language: system types: [rust] pass_filenames: false diff --git a/CLAUDE.md b/CLAUDE.md index 49e2f4e0..7437785b 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -146,3 +146,13 @@ When the user is getting errors locally, don't forget to remind them to reinstal ``` The existing SDK error types (`SetupError`, `DeviceError`, `RegistrationError` in `crates/auths-sdk/src/error.rs`) currently wrap `anyhow::Error` in their `StorageError` and `NetworkError` variants (e.g., `StorageError(#[source] anyhow::Error)`). These must be migrated to domain-specific `thiserror` variants during Epic 1/2 execution — the `anyhow` wrapping is a transitional pattern, not a permanent design. The `map_storage_err()` and `map_device_storage_err()` helper functions should be replaced with direct `From` impls on the domain storage errors. 6. **No reverse dependencies**: Core and SDK must never reference presentation layer crates. +7. **`unwrap()` / `expect()` Policy**: The workspace denies `clippy::unwrap_used` and `clippy::expect_used` globally. `clippy.toml` sets `allow-unwrap-in-tests = true`, so test code is exempt. For production code: + - **Default**: Use `?` (in functions returning `Result`), `.ok_or_else(|| ...)`, `.unwrap_or_default()`, or `match` instead of `.unwrap()` / `.expect()`. + - **Provably safe unwraps**: When an unwrap is provably infallible (e.g., `try_into()` after a length check, `ProgressStyle::with_template()` on a compile-time constant, `Regex::new()` on a literal), use an inline `#[allow]` with an `INVARIANT:` comment explaining why it cannot fail: + ```rust + #[allow(clippy::expect_used)] // INVARIANT: length validated to be 32 bytes on line N + let arr: [u8; 32] = vec.try_into().expect("validated above"); + ``` + - **FFI boundaries**: `expect()` is acceptable in FFI/WASM `extern "C"` functions where panicking is the only option (no `Result` return). Annotate with `#[allow]`. + - **Mutex/RwLock poisoning**: `lock().expect()` / `write().expect()` on stdlib mutexes is acceptable — a poisoned mutex means another thread panicked, which is unrecoverable. Annotate with `#[allow]` and an INVARIANT comment. + - **Never** add blanket `#![allow(clippy::unwrap_used, clippy::expect_used)]` to crate roots. Fix each site individually. diff --git a/crates/auths-cli/src/bin/sign.rs b/crates/auths-cli/src/bin/sign.rs index ea0cf464..c59ca218 100644 --- a/crates/auths-cli/src/bin/sign.rs +++ b/crates/auths-cli/src/bin/sign.rs @@ -2,9 +2,7 @@ clippy::print_stdout, clippy::print_stderr, clippy::disallowed_methods, - clippy::exit, - clippy::unwrap_used, - clippy::expect_used + clippy::exit )] //! auths-sign: Git SSH signing program compatible with `gpg.ssh.program` //! @@ -190,8 +188,8 @@ fn run_verify(args: &Args) -> Result<()> { "-n", namespace, "-s", - sig_file.to_str().unwrap(), ]); + cmd.arg(sig_file); for opt in &args.verify_options { cmd.arg("-O").arg(opt); } diff --git a/crates/auths-cli/src/bin/verify.rs b/crates/auths-cli/src/bin/verify.rs index 9e3339fc..0723eef8 100644 --- a/crates/auths-cli/src/bin/verify.rs +++ b/crates/auths-cli/src/bin/verify.rs @@ -2,9 +2,7 @@ clippy::print_stdout, clippy::print_stderr, clippy::disallowed_methods, - clippy::exit, - clippy::unwrap_used, - clippy::expect_used + clippy::exit )] //! auths-verify: SSH signature verification for Auths identities //! @@ -189,18 +187,10 @@ fn verify_with_ssh_keygen( // Run ssh-keygen -Y verify let output = Command::new("ssh-keygen") - .args([ - "-Y", - "verify", - "-f", - allowed_signers.to_str().unwrap(), - "-I", - identity, - "-n", - namespace, - "-s", - signature_file.to_str().unwrap(), - ]) + .args(["-Y", "verify", "-f"]) + .arg(allowed_signers) + .args(["-I", identity, "-n", namespace, "-s"]) + .arg(signature_file) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) @@ -240,14 +230,10 @@ fn find_signer( allowed_signers: &std::path::Path, ) -> Result> { let output = Command::new("ssh-keygen") - .args([ - "-Y", - "find-principals", - "-f", - allowed_signers.to_str().unwrap(), - "-s", - signature_file.to_str().unwrap(), - ]) + .args(["-Y", "find-principals", "-f"]) + .arg(allowed_signers) + .arg("-s") + .arg(signature_file) .output(); if let Ok(out) = output diff --git a/crates/auths-cli/src/commands/agent/mod.rs b/crates/auths-cli/src/commands/agent/mod.rs index 1ba2646e..d6515ab2 100644 --- a/crates/auths-cli/src/commands/agent/mod.rs +++ b/crates/auths-cli/src/commands/agent/mod.rs @@ -246,7 +246,10 @@ fn start_agent( create_restricted_dir(&auths_dir) .with_context(|| format!("Failed to create auths directory: {:?}", auths_dir))?; - let socket = socket_path.unwrap_or_else(|| get_default_socket_path().unwrap()); + let socket = match socket_path { + Some(s) => s, + None => get_default_socket_path()?, + }; let pid_path = get_pid_file_path()?; let env_path = get_env_file_path()?; let timeout = parse_timeout(timeout_str)?; diff --git a/crates/auths-cli/src/commands/artifact/verify.rs b/crates/auths-cli/src/commands/artifact/verify.rs index bf71f35d..eae8e53f 100644 --- a/crates/auths-cli/src/commands/artifact/verify.rs +++ b/crates/auths-cli/src/commands/artifact/verify.rs @@ -300,7 +300,7 @@ fn output_error(file: &str, exit_code: i32, message: &str) -> Result<()> { issuer: None, error: Some(message.to_string()), }; - println!("{}", serde_json::to_string(&result).unwrap()); + println!("{}", serde_json::to_string(&result)?); } else { eprintln!("Error: {}", message); } @@ -310,7 +310,7 @@ fn output_error(file: &str, exit_code: i32, message: &str) -> Result<()> { /// Output the verification result. fn output_result(exit_code: i32, result: VerifyArtifactResult) -> Result<()> { if is_json_mode() { - println!("{}", serde_json::to_string(&result).unwrap()); + println!("{}", serde_json::to_string(&result)?); } else if result.valid { print!("Artifact verified"); if let Some(ref issuer) = result.issuer { diff --git a/crates/auths-cli/src/commands/device/authorization.rs b/crates/auths-cli/src/commands/device/authorization.rs index 09ee7915..14f3f483 100644 --- a/crates/auths-cli/src/commands/device/authorization.rs +++ b/crates/auths-cli/src/commands/device/authorization.rs @@ -493,6 +493,7 @@ fn list_devices( let mut entries: Vec = Vec::new(); for (device_did_str, att_entries) in grouped.by_device.iter() { + #[allow(clippy::expect_used)] // INVARIANT: BTreeMap groups are never empty by construction let latest = att_entries .last() .expect("Grouped attestations should not be empty"); diff --git a/crates/auths-cli/src/commands/device/pair/common.rs b/crates/auths-cli/src/commands/device/pair/common.rs index d55208cb..29810a75 100644 --- a/crates/auths-cli/src/commands/device/pair/common.rs +++ b/crates/auths-cli/src/commands/device/pair/common.rs @@ -28,6 +28,7 @@ pub(crate) static WARN: Emoji<'_, '_> = Emoji("⚠️ ", "[!] "); /// Create a braille-style wait spinner. pub(crate) fn create_wait_spinner(message: &str) -> ProgressBar { let pb = ProgressBar::new_spinner(); + #[allow(clippy::unwrap_used)] // INVARIANT: template is a compile-time constant pb.set_style( ProgressStyle::with_template("{spinner:.cyan} {msg}") .unwrap() diff --git a/crates/auths-cli/src/commands/device/pair/online.rs b/crates/auths-cli/src/commands/device/pair/online.rs index d9d0719b..e86d5896 100644 --- a/crates/auths-cli/src/commands/device/pair/online.rs +++ b/crates/auths-cli/src/commands/device/pair/online.rs @@ -47,11 +47,11 @@ pub(crate) async fn handle_initiate_online( let wait_spinner = ProgressBar::new_spinner(); { use indicatif::ProgressStyle; - wait_spinner.set_style( - ProgressStyle::with_template("{spinner:.cyan} {msg}") - .unwrap() - .tick_strings(&["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]), - ); + #[allow(clippy::unwrap_used)] // INVARIANT: template is a compile-time constant + let style = ProgressStyle::with_template("{spinner:.cyan} {msg}") + .unwrap() + .tick_strings(&["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]); + wait_spinner.set_style(style); } let wait_sp = wait_spinner.clone(); diff --git a/crates/auths-cli/src/commands/device/verify_attestation.rs b/crates/auths-cli/src/commands/device/verify_attestation.rs index d18b0055..be8ddccf 100644 --- a/crates/auths-cli/src/commands/device/verify_attestation.rs +++ b/crates/auths-cli/src/commands/device/verify_attestation.rs @@ -106,7 +106,7 @@ pub async fn handle_verify(cmd: VerifyCommand) -> Result<()> { match result { Ok(verify_result) => { if is_json_mode() { - println!("{}", serde_json::to_string(&verify_result).unwrap()); + println!("{}", serde_json::to_string(&verify_result)?); } if verify_result.valid { @@ -135,7 +135,7 @@ pub async fn handle_verify(cmd: VerifyCommand) -> Result<()> { available_capabilities: None, witness_quorum: None, }; - println!("{}", serde_json::to_string(&error_result).unwrap()); + println!("{}", serde_json::to_string(&error_result)?); } else { eprintln!("Error: {}", e); } @@ -361,11 +361,8 @@ async fn run_verify(cmd: &VerifyCommand) -> Result { if !is_json_mode() { println!("Attestation verified successfully."); - if required_capability.is_some() { - println!( - "Required capability '{}' is present.", - cmd.require_capability.as_ref().unwrap() - ); + if let Some(ref cap_str) = cmd.require_capability { + println!("Required capability '{}' is present.", cap_str); } } Ok(VerifyResult { diff --git a/crates/auths-cli/src/commands/init_helpers.rs b/crates/auths-cli/src/commands/init_helpers.rs index 14b7280d..cb6ceb07 100644 --- a/crates/auths-cli/src/commands/init_helpers.rs +++ b/crates/auths-cli/src/commands/init_helpers.rs @@ -110,11 +110,10 @@ pub(crate) fn write_allowed_signers(key_alias: &str, out: &Output) -> Result<()> let signers_path = ssh_dir.join("allowed_signers"); std::fs::write(&signers_path, content)?; - set_git_config( - "gpg.ssh.allowedSignersFile", - signers_path.to_str().unwrap(), - "--global", - )?; + let signers_str = signers_path + .to_str() + .ok_or_else(|| anyhow!("allowed signers path is not valid UTF-8"))?; + set_git_config("gpg.ssh.allowedSignersFile", signers_str, "--global")?; out.println(&format!( " Wrote {} allowed signer(s) to {}", diff --git a/crates/auths-cli/src/commands/key.rs b/crates/auths-cli/src/commands/key.rs index a352c3a7..3ca9d147 100644 --- a/crates/auths-cli/src/commands/key.rs +++ b/crates/auths-cli/src/commands/key.rs @@ -312,6 +312,7 @@ fn key_import(alias: &str, seed_file_path: &PathBuf, controller_did: &IdentityDI seed_bytes.len() )); } + #[allow(clippy::expect_used)] // INVARIANT: length validated to be 32 bytes on line 309 let seed: [u8; 32] = seed_bytes.try_into().expect("validated 32 bytes above"); let seed = Zeroizing::new(seed); diff --git a/crates/auths-cli/src/commands/org.rs b/crates/auths-cli/src/commands/org.rs index da834f3f..d37d6b55 100644 --- a/crates/auths-cli/src/commands/org.rs +++ b/crates/auths-cli/src/commands/org.rs @@ -609,7 +609,9 @@ pub fn handle_org(cmd: OrgCommand, ctx: &crate::config::CliConfig) -> Result<()> let group = AttestationGroup::from_list(attestation_storage.load_all_attestations()?); for (subject, list) in group.by_device.iter() { - let latest = list.last().unwrap(); + let Some(latest) = list.last() else { + continue; + }; if !include_revoked && (latest.is_revoked() || latest.expires_at.is_some_and(|e| Utc::now() > e)) { diff --git a/crates/auths-cli/src/commands/scim.rs b/crates/auths-cli/src/commands/scim.rs index 0047cf80..d5585fd0 100644 --- a/crates/auths-cli/src/commands/scim.rs +++ b/crates/auths-cli/src/commands/scim.rs @@ -227,6 +227,7 @@ fn handle_test_connection(cmd: ScimTestConnectionCommand) -> Result<()> { println!(" Testing SCIM connection to {}...", cmd.url); println!(); + #[allow(clippy::expect_used)] // INVARIANT: tokio runtime creation is fatal if it fails let rt = tokio::runtime::Handle::try_current() .ok() .map(|_| None) @@ -389,6 +390,7 @@ async fn run_test_connection(base_url: &str, token: &str) -> Result<()> { fn generate_token_b64() -> String { use base64::Engine; let mut bytes = [0u8; 32]; + #[allow(clippy::expect_used)] // INVARIANT: system RNG failure is unrecoverable ring::rand::SystemRandom::new() .fill(&mut bytes) .expect("random bytes"); diff --git a/crates/auths-cli/src/commands/utils.rs b/crates/auths-cli/src/commands/utils.rs index 8254e080..0ddfc4a4 100644 --- a/crates/auths-cli/src/commands/utils.rs +++ b/crates/auths-cli/src/commands/utils.rs @@ -64,9 +64,8 @@ pub fn handle_util(cmd: UtilCommand) -> Result<()> { } // Convert Vec to [u8; 32] - let seed: [u8; 32] = bytes - .try_into() - .expect("Length already checked, conversion should succeed"); // Safe due to check above + #[allow(clippy::expect_used)] // INVARIANT: length validated to be 32 bytes on line 59 + let seed: [u8; 32] = bytes.try_into().expect("Length already checked"); // Create keypair from seed by encoding as PKCS#8 first let pkcs8_der = diff --git a/crates/auths-cli/src/commands/verify_commit.rs b/crates/auths-cli/src/commands/verify_commit.rs index 98c40a55..3c714554 100644 --- a/crates/auths-cli/src/commands/verify_commit.rs +++ b/crates/auths-cli/src/commands/verify_commit.rs @@ -440,9 +440,9 @@ fn output_results(results: &[VerifyCommitResult]) -> Result<()> { if is_json_mode() { if results.len() == 1 { - println!("{}", serde_json::to_string(&results[0]).unwrap()); + println!("{}", serde_json::to_string(&results[0])?); } else { - println!("{}", serde_json::to_string(&results).unwrap()); + println!("{}", serde_json::to_string(&results)?); } } else if results.len() == 1 { let r = &results[0]; @@ -722,14 +722,10 @@ fn verify_ssh_signature(signers_path: &Path, signature: &str, payload: &str) -> // This must come before verify because `-I "*"` is not a valid wildcard for ssh-keygen // on all OpenSSH versions; using the actual identity is required for verify to succeed. let find_output = Command::new("ssh-keygen") - .args([ - "-Y", - "find-principals", - "-f", - signers_path.to_str().unwrap(), - "-s", - sig_file.path().to_str().unwrap(), - ]) + .args(["-Y", "find-principals", "-f"]) + .arg(signers_path) + .arg("-s") + .arg(sig_file.path()) .output() .context("Failed to run ssh-keygen find-principals")?; @@ -755,18 +751,10 @@ fn verify_ssh_signature(signers_path: &Path, signature: &str, payload: &str) -> std::fs::File::open(payload_file.path()).context("Failed to open payload file as stdin")?; let output = Command::new("ssh-keygen") - .args([ - "-Y", - "verify", - "-f", - signers_path.to_str().unwrap(), - "-I", - &identity, - "-n", - "git", - "-s", - sig_file.path().to_str().unwrap(), - ]) + .args(["-Y", "verify", "-f"]) + .arg(signers_path) + .args(["-I", &identity, "-n", "git", "-s"]) + .arg(sig_file.path()) .stdin(stdin_file) .stdout(Stdio::piped()) .stderr(Stdio::piped()) @@ -810,7 +798,7 @@ fn check_ssh_keygen() -> Result<()> { fn handle_error(cmd: &VerifyCommitCommand, exit_code: i32, message: &str) -> Result<()> { if is_json_mode() { let result = VerifyCommitResult::failure(cmd.commit.clone(), message.to_string()); - println!("{}", serde_json::to_string(&result).unwrap()); + println!("{}", serde_json::to_string(&result)?); } else { eprintln!("Error: {}", message); } diff --git a/crates/auths-cli/src/lib.rs b/crates/auths-cli/src/lib.rs index 65616c34..79526e9e 100644 --- a/crates/auths-cli/src/lib.rs +++ b/crates/auths-cli/src/lib.rs @@ -3,9 +3,7 @@ clippy::print_stdout, clippy::print_stderr, clippy::disallowed_methods, - clippy::exit, - clippy::unwrap_used, - clippy::expect_used + clippy::exit )] pub mod adapters; pub mod cli; diff --git a/crates/auths-cli/src/main.rs b/crates/auths-cli/src/main.rs index 34e93b11..f8915e89 100644 --- a/crates/auths-cli/src/main.rs +++ b/crates/auths-cli/src/main.rs @@ -3,9 +3,7 @@ clippy::print_stdout, clippy::print_stderr, clippy::disallowed_methods, - clippy::exit, - clippy::unwrap_used, - clippy::expect_used + clippy::exit )] use anyhow::Result; use clap::Parser; diff --git a/packages/auths-node/.gitignore b/packages/auths-node/.gitignore new file mode 100644 index 00000000..64e137b5 --- /dev/null +++ b/packages/auths-node/.gitignore @@ -0,0 +1,4 @@ +target/ +node_modules/ +*.node +Cargo.lock diff --git a/packages/auths-node/.npmrc b/packages/auths-node/.npmrc new file mode 100644 index 00000000..b6f27f13 --- /dev/null +++ b/packages/auths-node/.npmrc @@ -0,0 +1 @@ +engine-strict=true diff --git a/packages/auths-node/Cargo.toml b/packages/auths-node/Cargo.toml new file mode 100644 index 00000000..e0ae0680 --- /dev/null +++ b/packages/auths-node/Cargo.toml @@ -0,0 +1,45 @@ +[package] +name = "auths-node" +version = "0.1.0" +edition = "2024" +description = "Node.js bindings for the Auths decentralized identity SDK" +license = "Apache-2.0" +publish = false + +[workspace] + +[lib] +crate-type = ["cdylib"] + +[dependencies] +napi = { version = "3", default-features = false, features = ["napi4", "async", "tokio_rt", "serde-json"] } +napi-derive = "3" + +# Core auths crates (same as auths-python) +auths-sdk = { path = "../../crates/auths-sdk" } +auths-core = { path = "../../crates/auths-core", features = ["keychain-file-fallback"] } +auths-id = { path = "../../crates/auths-id" } +auths-crypto = { path = "../../crates/auths-crypto" } +auths-verifier = { path = "../../crates/auths-verifier" } +auths-storage = { path = "../../crates/auths-storage", features = ["backend-git"] } +auths-policy = { path = "../../crates/auths-policy" } +auths-pairing-daemon = { path = "../../crates/auths-pairing-daemon" } +auths-infra-git = { path = "../../crates/auths-infra-git" } + +# Utilities +serde = { version = "1", features = ["derive"] } +serde_json = "1" +tokio = { version = "1", features = ["sync", "net"] } +reqwest = { version = "0.12", default-features = false, features = ["rustls-tls", "json"] } +hex = "0.4" +chrono = "0.4" +ring = "0.17" +json-canon = "=0.1.3" +sha2 = "0.10" +uuid = { version = "1", features = ["v4"] } +shellexpand = "3" +url = "2" +axum = "0.8" + +[build-dependencies] +napi-build = "2" diff --git a/packages/auths-node/README.md b/packages/auths-node/README.md new file mode 100644 index 00000000..0a495950 --- /dev/null +++ b/packages/auths-node/README.md @@ -0,0 +1,166 @@ +# Auths Node SDK + +Decentralized identity for developers and AI agents. Sign, verify, and manage cryptographic identities with Git-native storage. + +## Install + +```bash +npm install @auths-dev/node +``` + +## Quick start + +```typescript +import { Auths, verifyAttestation } from '@auths-dev/node' + +const auths = new Auths() + +// Verify an attestation +const result = verifyAttestation(attestationJson, publicKeyHex) +console.log(result.valid) // true + +// Create an identity and sign +const identity = auths.identities.create({ label: 'laptop' }) +const sig = auths.signAs({ message: Buffer.from('hello world'), identityDid: identity.did }) +console.log(sig.signature) // hex-encoded Ed25519 signature +``` + +## Identity management + +```typescript +import { Auths } from '@auths-dev/node' + +const auths = new Auths({ repoPath: '~/.auths' }) + +// Create a cryptographic identity +const identity = auths.identities.create({ label: 'laptop' }) +console.log(identity.did) // did:keri:EBfd... + +// Provision an agent (for CI, MCP servers, etc.) +const agent = auths.identities.delegateAgent({ + identityDid: identity.did, + name: 'deploy-bot', + capabilities: ['sign'], +}) + +// Sign using the keychain-stored identity key +const result = auths.signAs({ + message: Buffer.from('hello world'), + identityDid: identity.did, +}) + +// Link and manage devices +const device = auths.devices.link({ + identityDid: identity.did, + capabilities: ['sign'], +}) +auths.devices.revoke({ + deviceDid: device.did, + identityDid: identity.did, + note: 'replaced', +}) +``` + +## Policy engine + +```typescript +import { PolicyBuilder, evaluatePolicy } from '@auths-dev/node' + +// Build a standard policy +const policy = PolicyBuilder.standard('sign_commit') + +// Evaluate against a context +const decision = policy.evaluate({ + issuer: 'did:keri:EOrg', + subject: 'did:key:zDevice', + capabilities: ['sign_commit'], +}) +console.log(decision.allowed) // true + +// Compose complex policies +const ciPolicy = new PolicyBuilder() + .notRevoked() + .notExpired() + .requireCapability('sign') + .requireAgent() + .requireRepo('org/repo') + .toJson() +``` + +## Organization management + +```typescript +const org = auths.orgs.create({ label: 'my-team' }) + +const member = auths.orgs.addMember({ + orgDid: org.orgDid, + memberDid: devIdentity.did, + role: 'member', + memberPublicKeyHex: devIdentity.publicKey, +}) + +const members = auths.orgs.listMembers({ orgDid: org.orgDid }) +``` + +## Verification + +```typescript +import { + verifyAttestation, + verifyChain, + verifyAttestationWithCapability, +} from '@auths-dev/node' + +// Single attestation +const result = verifyAttestation(attestationJson, issuerPublicKeyHex) + +// Attestation chain +const report = verifyChain(attestationChain, rootPublicKeyHex) +console.log(report.status.statusType) // 'Valid' | 'Invalid' | ... + +// Capability-scoped verification +const capResult = verifyAttestationWithCapability( + attestationJson, issuerPublicKeyHex, 'sign_commit' +) +``` + +## Error handling + +```typescript +import { Auths, VerificationError, CryptoError, NetworkError } from '@auths-dev/node' + +const auths = new Auths() +try { + const result = auths.signAs({ message: data, identityDid: did }) +} catch (e) { + if (e instanceof CryptoError) { + console.log(e.code) // 'key_not_found' + console.log(e.message) // 'No key found for identity...' + } + if (e instanceof NetworkError && e.shouldRetry) { + // safe to retry + } +} +``` + +All errors inherit from `AuthsError` and carry `.code` and `.message`. + +## Configuration + +```typescript +// Auto-discover (uses ~/.auths) +const auths = new Auths() + +// Explicit repo path +const auths = new Auths({ repoPath: '/path/to/identity-repo' }) + +// With passphrase (or set AUTHS_PASSPHRASE env var) +const auths = new Auths({ passphrase: 'my-secret' }) + +// Headless / CI mode +// Set AUTHS_KEYCHAIN_BACKEND=file for environments without a system keychain +``` + +## License + +Apache-2.0 diff --git a/packages/auths-node/__test__/client.spec.ts b/packages/auths-node/__test__/client.spec.ts new file mode 100644 index 00000000..9b7cc1cf --- /dev/null +++ b/packages/auths-node/__test__/client.spec.ts @@ -0,0 +1,78 @@ +import { describe, it, expect } from 'vitest' +import { Auths } from '../lib/client' +import { + AuthsError, + VerificationError, + CryptoError, + KeychainError, + StorageError, + NetworkError, + IdentityError, + OrgError, + PairingError, +} from '../lib/errors' + +describe('Auths client', () => { + it('instantiates with defaults', () => { + const auths = new Auths() + expect(auths.repoPath).toBe('~/.auths') + expect(auths.passphrase).toBeUndefined() + }) + + it('instantiates with custom config', () => { + const auths = new Auths({ repoPath: '/tmp/test-repo', passphrase: 'secret' }) + expect(auths.repoPath).toBe('/tmp/test-repo') + expect(auths.passphrase).toBe('secret') + }) + + it('exposes all service properties', () => { + const auths = new Auths() + expect(auths.identities).toBeDefined() + expect(auths.devices).toBeDefined() + expect(auths.signing).toBeDefined() + expect(auths.orgs).toBeDefined() + expect(auths.trust).toBeDefined() + expect(auths.witnesses).toBeDefined() + expect(auths.attestations).toBeDefined() + expect(auths.artifacts).toBeDefined() + expect(auths.commits).toBeDefined() + expect(auths.audit).toBeDefined() + expect(auths.pairing).toBeDefined() + }) +}) + +describe('error hierarchy', () => { + it('all error subclasses extend AuthsError', () => { + for (const Cls of [ + VerificationError, + CryptoError, + KeychainError, + StorageError, + NetworkError, + IdentityError, + OrgError, + PairingError, + ]) { + const err = new Cls('test') + expect(err).toBeInstanceOf(AuthsError) + expect(err).toBeInstanceOf(Error) + } + }) + + it('AuthsError has code and message', () => { + const err = new AuthsError('something broke') + err.code = 'AUTHS_TEST' + expect(err.message).toBe('something broke') + expect(err.code).toBe('AUTHS_TEST') + }) + + it('NetworkError has shouldRetry', () => { + const err = new NetworkError('timeout') + expect(err.shouldRetry).toBe(true) + }) + + it('PairingError has shouldRetry', () => { + const err = new PairingError('session expired') + expect(err.shouldRetry).toBe(true) + }) +}) diff --git a/packages/auths-node/__test__/exports.spec.ts b/packages/auths-node/__test__/exports.spec.ts new file mode 100644 index 00000000..63e54142 --- /dev/null +++ b/packages/auths-node/__test__/exports.spec.ts @@ -0,0 +1,57 @@ +import { describe, it, expect } from 'vitest' +import * as auths from '../lib/index' + +describe('top-level exports', () => { + it('exports Auths client', () => { + expect(auths.Auths).toBeDefined() + }) + + it('exports service classes', () => { + expect(auths.IdentityService).toBeDefined() + expect(auths.DeviceService).toBeDefined() + expect(auths.SigningService).toBeDefined() + expect(auths.OrgService).toBeDefined() + expect(auths.TrustService).toBeDefined() + expect(auths.WitnessService).toBeDefined() + expect(auths.AttestationService).toBeDefined() + expect(auths.ArtifactService).toBeDefined() + expect(auths.CommitService).toBeDefined() + expect(auths.AuditService).toBeDefined() + expect(auths.PairingService).toBeDefined() + }) + + it('exports PolicyBuilder and policy functions', () => { + expect(auths.PolicyBuilder).toBeDefined() + expect(auths.compilePolicy).toBeDefined() + expect(auths.evaluatePolicy).toBeDefined() + }) + + it('exports verification functions', () => { + expect(auths.verifyAttestation).toBeDefined() + expect(auths.verifyChain).toBeDefined() + expect(auths.verifyDeviceAuthorization).toBeDefined() + expect(auths.verifyAttestationWithCapability).toBeDefined() + expect(auths.verifyChainWithCapability).toBeDefined() + expect(auths.verifyAtTime).toBeDefined() + expect(auths.verifyAtTimeWithCapability).toBeDefined() + expect(auths.verifyChainWithWitnesses).toBeDefined() + }) + + it('exports error classes', () => { + expect(auths.AuthsError).toBeDefined() + expect(auths.VerificationError).toBeDefined() + expect(auths.CryptoError).toBeDefined() + expect(auths.KeychainError).toBeDefined() + expect(auths.StorageError).toBeDefined() + expect(auths.NetworkError).toBeDefined() + expect(auths.IdentityError).toBeDefined() + expect(auths.OrgError).toBeDefined() + expect(auths.PairingError).toBeDefined() + expect(auths.mapNativeError).toBeDefined() + }) + + it('exports version function', () => { + expect(auths.version).toBeDefined() + expect(typeof auths.version).toBe('function') + }) +}) diff --git a/packages/auths-node/__test__/integration.spec.ts b/packages/auths-node/__test__/integration.spec.ts new file mode 100644 index 00000000..bb8f239a --- /dev/null +++ b/packages/auths-node/__test__/integration.spec.ts @@ -0,0 +1,407 @@ +import { describe, it, expect, beforeAll, afterAll } from 'vitest' +import { execSync } from 'child_process' +import { mkdtempSync, writeFileSync, mkdirSync, rmSync } from 'fs' +import { join } from 'path' +import { tmpdir } from 'os' +import { Auths } from '../lib/client' +import type { Identity } from '../lib/identity' + +const tmpDirs: string[] = [] + +function makeTmpDir(): string { + const dir = mkdtempSync(join(tmpdir(), 'auths-test-')) + tmpDirs.push(dir) + return dir +} + +afterAll(() => { + for (const dir of tmpDirs) { + rmSync(dir, { recursive: true, force: true }) + } +}) + +function makeClient(dir?: string): Auths { + const repoPath = dir ?? makeTmpDir() + return new Auths({ repoPath, passphrase: 'Test-pass-123' }) +} + +function initGitRepo(dir: string): void { + mkdirSync(dir, { recursive: true }) + execSync('git init', { cwd: dir, stdio: 'pipe' }) + execSync('git config user.name "Test User"', { cwd: dir, stdio: 'pipe' }) + execSync('git config user.email "test@example.com"', { cwd: dir, stdio: 'pipe' }) + execSync('git config commit.gpgsign false', { cwd: dir, stdio: 'pipe' }) + writeFileSync(join(dir, 'README.md'), '# Test Repo\n') + execSync('git add .', { cwd: dir, stdio: 'pipe' }) + execSync('git commit -m "initial commit"', { cwd: dir, stdio: 'pipe' }) +} + +describe('identity lifecycle', () => { + let auths: Auths + let identity: Identity + + beforeAll(() => { + auths = makeClient() + identity = auths.identities.create({ label: 'test-key' }) + }) + + it('creates identity with did:keri prefix', () => { + expect(identity.did).toMatch(/^did:keri:/) + expect(identity.keyAlias).toBeDefined() + expect(identity.publicKey).toBeDefined() + expect(identity.publicKey.length).toBe(64) + }) + + it('getPublicKey returns hex string', () => { + const pk = auths.getPublicKey({ identityDid: identity.did }) + expect(pk).toBe(identity.publicKey) + }) + + it('delegates an agent', () => { + const agent = auths.identities.delegateAgent({ + identityDid: identity.did, + name: 'ci-bot', + capabilities: ['sign'], + }) + expect(agent.did).toMatch(/^did:key:/) + expect(agent.keyAlias).toBeDefined() + expect(agent.attestation).toBeDefined() + }) + + it('creates standalone agent', () => { + const agent = auths.identities.createAgent({ + name: 'standalone', + capabilities: ['sign'], + }) + expect(agent.did).toMatch(/^did:keri:/) + expect(agent.keyAlias).toBeDefined() + }) +}) + +describe('device lifecycle', () => { + it('link and revoke device', () => { + const auths = makeClient() + const identity = auths.identities.create({ label: 'dev-test' }) + + const device = auths.devices.link({ + identityDid: identity.did, + capabilities: ['sign'], + expiresInDays: 90, + }) + expect(device.did).toMatch(/^did:key:/) + expect(device.attestationId).toBeDefined() + + auths.devices.revoke({ + deviceDid: device.did, + identityDid: identity.did, + note: 'test revocation', + }) + }) + + it('extend device authorization', () => { + const auths = makeClient() + const identity = auths.identities.create({ label: 'ext-test' }) + const device = auths.devices.link({ + identityDid: identity.did, + capabilities: ['sign'], + expiresInDays: 30, + }) + + const ext = auths.devices.extend({ + deviceDid: device.did, + identityDid: identity.did, + days: 60, + }) + expect(ext.deviceDid).toBe(device.did) + expect(ext.newExpiresAt).toBeDefined() + }) +}) + +describe('signing', () => { + let auths: Auths + let identity: Identity + + beforeAll(() => { + auths = makeClient() + identity = auths.identities.create({ label: 'sign-test' }) + }) + + it('sign as identity returns signature', () => { + const result = auths.signAs({ + message: Buffer.from('hello world'), + identityDid: identity.did, + }) + expect(result.signature).toBeDefined() + expect(result.signerDid).toBeDefined() + }) + + it('sign action as identity returns envelope', () => { + const result = auths.signActionAs({ + actionType: 'tool_call', + payloadJson: '{"tool":"read_file"}', + identityDid: identity.did, + }) + expect(result.envelopeJson).toBeDefined() + expect(result.signatureHex).toBeDefined() + expect(result.signerDid).toBeDefined() + }) +}) + +describe('trust', () => { + it('pin and list', () => { + const auths = makeClient() + const identity = auths.identities.create({ label: 'trust-test' }) + + const entry = auths.trust.pin({ did: identity.did, label: 'my-peer' }) + expect(entry.did).toBe(identity.did) + expect(entry.label).toBe('my-peer') + expect(entry.trustLevel).toBeDefined() + + const entries = auths.trust.list() + expect(entries.length).toBeGreaterThanOrEqual(1) + expect(entries.some(e => e.did === identity.did)).toBe(true) + }) + + it('remove pinned identity', () => { + const auths = makeClient() + const identity = auths.identities.create({ label: 'trust-rm' }) + auths.trust.pin({ did: identity.did }) + auths.trust.remove(identity.did) + const result = auths.trust.get(identity.did) + expect(result).toBeNull() + }) + + it('get returns null for unknown', () => { + const auths = makeClient() + const result = auths.trust.get('did:keri:ENOTREAL') + expect(result).toBeNull() + }) +}) + +describe('witness', () => { + it('add and list witnesses', () => { + const auths = makeClient() + auths.identities.create({ label: 'witness-test' }) + + const w = auths.witnesses.add({ url: 'http://witness.example.com:3333' }) + expect(w.url).toBe('http://witness.example.com:3333') + + const witnesses = auths.witnesses.list() + expect(witnesses.length).toBe(1) + }) + + it('remove witness', () => { + const auths = makeClient() + auths.identities.create({ label: 'witness-rm' }) + + auths.witnesses.add({ url: 'http://witness.example.com:3333' }) + auths.witnesses.remove('http://witness.example.com:3333') + + expect(auths.witnesses.list().length).toBe(0) + }) + + it('duplicate add is idempotent', () => { + const auths = makeClient() + auths.identities.create({ label: 'witness-dup' }) + + auths.witnesses.add({ url: 'http://witness.example.com:3333' }) + auths.witnesses.add({ url: 'http://witness.example.com:3333' }) + + expect(auths.witnesses.list().length).toBe(1) + }) +}) + +describe('attestations', () => { + it('list returns array', () => { + const auths = makeClient() + auths.identities.create({ label: 'att-test' }) + const atts = auths.attestations.list() + expect(Array.isArray(atts)).toBe(true) + }) +}) + +describe('audit', () => { + it('generates report for unsigned repo', () => { + const auths = makeClient() + const gitDir = join(makeTmpDir(), 'git-repo') + initGitRepo(gitDir) + + const report = auths.audit.report({ targetRepoPath: gitDir }) + expect(report.summary.total_commits).toBe(1) + expect(report.summary.unsigned_commits).toBe(1) + expect(report.summary.signed_commits).toBe(0) + expect(Array.isArray(report.commits)).toBe(true) + }) + + it('isCompliant returns false for unsigned', () => { + const auths = makeClient() + const gitDir = join(makeTmpDir(), 'git-repo') + initGitRepo(gitDir) + expect(auths.audit.isCompliant({ targetRepoPath: gitDir })).toBe(false) + }) +}) + +describe('org', () => { + it('creates organization', () => { + const auths = makeClient() + auths.identities.create({ label: 'org-admin' }) + + const org = auths.orgs.create({ label: 'my-team' }) + expect(org.orgDid).toMatch(/^did:keri:/) + expect(org.label).toBe('my-team') + }) + + it('add and list members', () => { + const adminDir = makeTmpDir() + const admin = makeClient(adminDir) + admin.identities.create({ label: 'admin' }) + const org = admin.orgs.create({ label: 'team' }) + + const devDir = makeTmpDir() + const devClient = makeClient(devDir) + const devId = devClient.identities.create({ label: 'dev' }) + + const member = admin.orgs.addMember({ + orgDid: org.orgDid, + memberDid: devId.did, + role: 'member', + memberPublicKeyHex: devId.publicKey, + }) + expect(member.memberDid).toBe(devId.did) + expect(member.role).toBe('member') + expect(member.revoked).toBe(false) + + const members = admin.orgs.listMembers({ orgDid: org.orgDid }) + expect(members.length).toBeGreaterThanOrEqual(1) + }) +}) + +describe('doctor', () => { + it('returns diagnostics string', () => { + const auths = makeClient() + const result = auths.doctor() + expect(typeof result).toBe('string') + expect(result.length).toBeGreaterThan(0) + }) +}) + +describe('version', () => { + it('returns version string', () => { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const native = require('../index.js') + expect(typeof native.version()).toBe('string') + expect(native.version()).toMatch(/^\d+\.\d+\.\d+/) + }) +}) + +describe('pairing', () => { + it('creates session and stops cleanly', async () => { + const auths = makeClient() + auths.identities.create({ label: 'pair-test' }) + + const session = await auths.pairing.createSession({ + bindAddress: '127.0.0.1', + enableMdns: false, + capabilities: ['sign:commit'], + }) + expect(session.shortCode.length).toBe(6) + expect(session.endpoint).toMatch(/^http:\/\/127\.0\.0\.1:/) + expect(session.controllerDid).toMatch(/^did:keri:/) + + await auths.pairing.stop() + }) + + it('stop is idempotent', async () => { + const auths = makeClient() + auths.identities.create({ label: 'pair-stop' }) + + await auths.pairing.createSession({ + bindAddress: '127.0.0.1', + enableMdns: false, + }) + await auths.pairing.stop() + await auths.pairing.stop() + }) + + it('multiple concurrent sessions on separate clients', async () => { + const auths1 = makeClient() + auths1.identities.create({ label: 'pair-multi-1' }) + + const auths2 = makeClient() + auths2.identities.create({ label: 'pair-multi-2' }) + + const session1 = await auths1.pairing.createSession({ + bindAddress: '127.0.0.1', + enableMdns: false, + }) + const session2 = await auths2.pairing.createSession({ + bindAddress: '127.0.0.1', + enableMdns: false, + }) + + expect(session1.endpoint).not.toBe(session2.endpoint) + expect(session1.shortCode).not.toBe(session2.shortCode) + + await auths1.pairing.stop() + await auths2.pairing.stop() + }) + + it('waitForResponse without session throws', async () => { + const auths = makeClient() + auths.identities.create({ label: 'pair-no-session' }) + + await expect(auths.pairing.waitForResponse()).rejects.toThrow( + /No active pairing session/, + ) + }) + + it('complete without session throws', async () => { + const auths = makeClient() + auths.identities.create({ label: 'pair-no-session-complete' }) + + await expect( + auths.pairing.complete({ + deviceDid: 'did:key:fake', + devicePublicKeyHex: 'a'.repeat(64), + }), + ).rejects.toThrow(/No active pairing session/) + }) +}) + +describe('verify async', () => { + it('verifyAttestation returns a Promise', async () => { + const { verifyAttestation } = await import('../lib/verify') + const result = verifyAttestation('{}', 'a'.repeat(64)) + expect(result).toBeInstanceOf(Promise) + const resolved = await result + expect(resolved.valid).toBe(false) + }) + + it('verifyChain returns a Promise', async () => { + const { verifyChain } = await import('../lib/verify') + const result = verifyChain([], 'a'.repeat(64)) + expect(result).toBeInstanceOf(Promise) + const resolved = await result + expect(resolved.status).toBeDefined() + }) +}) + +describe('agent attestation', () => { + it('createAgent produces a signed attestation with required fields', () => { + const auths = makeClient() + auths.identities.create({ label: 'agent-att-test' }) + const agent = auths.identities.createAgent({ + name: 'test-bot', + capabilities: ['sign'], + }) + expect(agent.attestation).toBeDefined() + const att = JSON.parse(agent.attestation) + expect(att.issuer).toBeDefined() + expect(att.subject).toBeDefined() + expect(att.device_signature).toBeDefined() + expect(att.identity_signature).toBeDefined() + expect(att.rid).toBeDefined() + expect(att.version).toBeDefined() + expect(att.device_public_key).toBeDefined() + }) +}) diff --git a/packages/auths-node/__test__/policy.spec.ts b/packages/auths-node/__test__/policy.spec.ts new file mode 100644 index 00000000..37c4d8b1 --- /dev/null +++ b/packages/auths-node/__test__/policy.spec.ts @@ -0,0 +1,202 @@ +import { describe, it, expect } from 'vitest' +import { PolicyBuilder, compilePolicy, evaluatePolicy } from '../lib/policy' + +describe('PolicyBuilder', () => { + it('standard factory creates not_revoked + not_expired + capability', () => { + const json = PolicyBuilder.standard('sign_commit').toJson() + const parsed = JSON.parse(json) + expect(parsed.op).toBe('And') + expect(parsed.args).toHaveLength(3) + expect(parsed.args[0].op).toBe('NotRevoked') + expect(parsed.args[1].op).toBe('NotExpired') + expect(parsed.args[2].op).toBe('HasCapability') + expect(parsed.args[2].args).toBe('sign_commit') + }) + + it('fluent chaining builds correct expression', () => { + const json = new PolicyBuilder() + .notRevoked() + .requireCapability('sign') + .requireIssuer('did:keri:EOrg') + .requireHuman() + .maxChainDepth(3) + .toJson() + const parsed = JSON.parse(json) + expect(parsed.op).toBe('And') + expect(parsed.args).toHaveLength(5) + }) + + it('anyOf creates OR combinator', () => { + const a = PolicyBuilder.standard('admin') + const b = PolicyBuilder.standard('superadmin') + const json = PolicyBuilder.anyOf(a, b).toJson() + const parsed = JSON.parse(json) + expect(parsed.op).toBe('And') + expect(parsed.args[0].op).toBe('Or') + expect(parsed.args[0].args).toHaveLength(2) + }) + + it('negate wraps in Not', () => { + const json = new PolicyBuilder().notRevoked().negate().toJson() + const parsed = JSON.parse(json) + expect(parsed.args[0].op).toBe('Not') + }) + + it('orPolicy combines two builders', () => { + const a = new PolicyBuilder().requireCapability('admin') + const b = new PolicyBuilder().requireCapability('superadmin') + const json = a.orPolicy(b).toJson() + const parsed = JSON.parse(json) + expect(parsed.args[0].op).toBe('Or') + }) + + it('empty builder throws on build', () => { + expect(() => new PolicyBuilder().build()).toThrow('empty policy') + }) + + it('empty builder throws on toJson', () => { + expect(() => new PolicyBuilder().toJson()).toThrow('empty policy') + }) + + it('expiresAfter adds correct predicate', () => { + const json = new PolicyBuilder().expiresAfter(3600).toJson() + const parsed = JSON.parse(json) + expect(parsed.args[0].op).toBe('ExpiresAfter') + expect(parsed.args[0].args).toBe(3600) + }) + + it('issuedWithin adds correct predicate', () => { + const json = new PolicyBuilder().issuedWithin(86400).toJson() + const parsed = JSON.parse(json) + expect(parsed.args[0].op).toBe('IssuedWithin') + expect(parsed.args[0].args).toBe(86400) + }) + + it('requireAllCapabilities adds multiple HasCapability', () => { + const json = new PolicyBuilder().requireAllCapabilities(['sign', 'deploy']).toJson() + const parsed = JSON.parse(json) + expect(parsed.args).toHaveLength(2) + expect(parsed.args[0].op).toBe('HasCapability') + expect(parsed.args[1].op).toBe('HasCapability') + }) + + it('requireAnyCapability creates OR', () => { + const json = new PolicyBuilder().requireAnyCapability(['sign', 'deploy']).toJson() + const parsed = JSON.parse(json) + expect(parsed.args[0].op).toBe('Or') + expect(parsed.args[0].args).toHaveLength(2) + }) + + it('requireIssuerIn creates OR of IssuerIs', () => { + const json = new PolicyBuilder().requireIssuerIn(['did:keri:A', 'did:keri:B']).toJson() + const parsed = JSON.parse(json) + expect(parsed.args[0].op).toBe('Or') + }) + + it('signer type predicates', () => { + expect(JSON.parse(new PolicyBuilder().requireAgent().toJson()).args[0].op).toBe('IsAgent') + expect(JSON.parse(new PolicyBuilder().requireHuman().toJson()).args[0].op).toBe('IsHuman') + expect(JSON.parse(new PolicyBuilder().requireWorkload().toJson()).args[0].op).toBe('IsWorkload') + }) + + it('scope predicates', () => { + expect(JSON.parse(new PolicyBuilder().requireRepo('org/repo').toJson()).args[0].op).toBe('RepoIs') + expect(JSON.parse(new PolicyBuilder().requireEnv('production').toJson()).args[0].op).toBe('EnvIs') + expect(JSON.parse(new PolicyBuilder().refMatches('refs/heads/*').toJson()).args[0].op).toBe('RefMatches') + expect(JSON.parse(new PolicyBuilder().pathAllowed(['src/**']).toJson()).args[0].op).toBe('PathAllowed') + }) + + it('attribute predicates', () => { + expect(JSON.parse(new PolicyBuilder().attrEquals('team', 'infra').toJson()).args[0].op).toBe('AttrEquals') + expect(JSON.parse(new PolicyBuilder().attrIn('team', ['infra', 'platform']).toJson()).args[0].op).toBe('AttrIn') + }) +}) + +describe('compilePolicy', () => { + it('compiles a valid policy expression', () => { + const result = compilePolicy('{"op":"NotRevoked"}') + expect(result).toBeDefined() + expect(typeof result).toBe('string') + }) + + it('rejects invalid JSON', () => { + expect(() => compilePolicy('not json')).toThrow() + }) + + it('rejects unknown op', () => { + expect(() => compilePolicy('{"op":"BogusOp"}')).toThrow() + }) +}) + +describe('evaluatePolicy', () => { + it('allows when policy is True', () => { + const compiled = compilePolicy('{"op":"True"}') + const decision = evaluatePolicy(compiled, { + issuer: 'did:keri:ETest', + subject: 'did:key:zTest', + }) + expect(decision.outcome).toBe('allow') + expect(decision.allowed).toBe(true) + expect(decision.denied).toBe(false) + }) + + it('denies when policy is False', () => { + const compiled = compilePolicy('{"op":"False"}') + const decision = evaluatePolicy(compiled, { + issuer: 'did:keri:ETest', + subject: 'did:key:zTest', + }) + expect(decision.outcome).toBe('deny') + expect(decision.allowed).toBe(false) + expect(decision.denied).toBe(true) + }) + + it('checks capability present', () => { + const compiled = compilePolicy('{"op":"HasCapability","args":"sign_commit"}') + const decision = evaluatePolicy(compiled, { + issuer: 'did:keri:ETest', + subject: 'did:key:zTest', + capabilities: ['sign_commit'], + }) + expect(decision.allowed).toBe(true) + }) + + it('checks capability missing', () => { + const compiled = compilePolicy('{"op":"HasCapability","args":"sign_commit"}') + const decision = evaluatePolicy(compiled, { + issuer: 'did:keri:ETest', + subject: 'did:key:zTest', + capabilities: ['read'], + }) + expect(decision.denied).toBe(true) + }) + + it('checks NotRevoked passes', () => { + const compiled = compilePolicy('{"op":"NotRevoked"}') + const decision = evaluatePolicy(compiled, { + issuer: 'did:keri:ETest', + subject: 'did:key:zTest', + revoked: false, + }) + expect(decision.allowed).toBe(true) + }) + + it('checks NotRevoked denied when revoked', () => { + const compiled = compilePolicy('{"op":"NotRevoked"}') + const decision = evaluatePolicy(compiled, { + issuer: 'did:keri:ETest', + subject: 'did:key:zTest', + revoked: true, + }) + expect(decision.denied).toBe(true) + }) + + it('PolicyBuilder.evaluate convenience method', () => { + const decision = PolicyBuilder.standard('sign_commit').evaluate({ + issuer: 'did:keri:ETest', + subject: 'did:key:zTest', + capabilities: ['sign_commit'], + }) + expect(decision.allowed).toBe(true) + }) +}) diff --git a/packages/auths-node/__test__/verify.spec.ts b/packages/auths-node/__test__/verify.spec.ts new file mode 100644 index 00000000..15ed1fc7 --- /dev/null +++ b/packages/auths-node/__test__/verify.spec.ts @@ -0,0 +1,88 @@ +import { describe, it, expect } from 'vitest' +import { + verifyAttestation, + verifyChain, + verifyDeviceAuthorization, + verifyAttestationWithCapability, + verifyChainWithCapability, + verifyAtTime, + verifyAtTimeWithCapability, +} from '../lib/verify' +import type { VerificationResult, VerificationReport } from '../lib/verify' + +describe('verifyAttestation', () => { + it('invalid JSON returns error result', async () => { + const result: VerificationResult = await verifyAttestation('not valid json', 'a'.repeat(64)) + expect(result.valid).toBe(false) + expect(result.error).toBeDefined() + }) + + it('invalid hex key throws VerificationError', async () => { + await expect(verifyAttestation('{}', 'not-hex')).rejects.toThrow() + }) + + it('wrong key length throws VerificationError', async () => { + await expect(verifyAttestation('{}', 'abcd')).rejects.toThrow() + }) + + it('empty attestation returns invalid', async () => { + const result = await verifyAttestation('{}', 'a'.repeat(64)) + expect(result.valid).toBe(false) + }) +}) + +describe('verifyChain', () => { + it('empty chain returns report', async () => { + const report: VerificationReport = await verifyChain([], 'a'.repeat(64)) + expect(report.status).toBeDefined() + expect(report.status.statusType).toBeDefined() + expect(Array.isArray(report.chain)).toBe(true) + expect(Array.isArray(report.warnings)).toBe(true) + }) + + it('invalid JSON in chain throws', async () => { + await expect(verifyChain(['not valid json'], 'a'.repeat(64))).rejects.toThrow() + }) + + it('invalid root key throws', async () => { + await expect(verifyChain([], 'not-hex')).rejects.toThrow() + }) +}) + +describe('verifyDeviceAuthorization', () => { + it('empty attestations returns report', async () => { + const report = await verifyDeviceAuthorization( + 'did:key:identity', 'did:key:device', [], 'a'.repeat(64), + ) + expect(report.status).toBeDefined() + expect(report.status.statusType).not.toBe('Valid') + }) +}) + +describe('verifyAttestationWithCapability', () => { + it('invalid attestation returns error', async () => { + const result = await verifyAttestationWithCapability('{}', 'a'.repeat(64), 'sign') + expect(result.valid).toBe(false) + }) +}) + +describe('verifyChainWithCapability', () => { + it('empty chain returns report', async () => { + const report = await verifyChainWithCapability([], 'a'.repeat(64), 'sign') + expect(report.status).toBeDefined() + }) +}) + +describe('verifyAtTime', () => { + it('invalid attestation returns error', async () => { + const result = await verifyAtTime('{}', 'a'.repeat(64), '2025-01-01T00:00:00Z') + expect(result.valid).toBe(false) + }) +}) + +describe('verifyAtTimeWithCapability', () => { + it('invalid attestation returns error', async () => { + const result = await verifyAtTimeWithCapability('{}', 'a'.repeat(64), '2025-01-01T00:00:00Z', 'sign') + expect(result.valid).toBe(false) + }) +}) diff --git a/packages/auths-node/build.rs b/packages/auths-node/build.rs new file mode 100644 index 00000000..9fc23678 --- /dev/null +++ b/packages/auths-node/build.rs @@ -0,0 +1,5 @@ +extern crate napi_build; + +fn main() { + napi_build::setup(); +} diff --git a/packages/auths-node/index.d.ts b/packages/auths-node/index.d.ts new file mode 100644 index 00000000..75b853b4 --- /dev/null +++ b/packages/auths-node/index.d.ts @@ -0,0 +1,259 @@ +/* auto-generated by NAPI-RS */ +/* eslint-disable */ +export declare class NapiPairingHandle { + static createSession(repoPath: string, capabilitiesJson?: string | undefined | null, timeoutSecs?: number | undefined | null, bindAddress?: string | undefined | null, enableMdns?: boolean | undefined | null, passphrase?: string | undefined | null): Promise + get session(): NapiPairingSession + waitForResponse(timeoutSecs?: number | undefined | null): Promise + complete(deviceDid: string, devicePublicKeyHex: string, repoPath: string, capabilitiesJson?: string | undefined | null, passphrase?: string | undefined | null): Promise + stop(): Promise +} + +export declare function addOrgMember(orgDid: string, memberDid: string, role: string, repoPath: string, capabilitiesJson?: string | undefined | null, passphrase?: string | undefined | null, note?: string | undefined | null, memberPublicKeyHex?: string | undefined | null): NapiOrgMember + +export declare function addWitness(urlStr: string, repoPath: string, label?: string | undefined | null): NapiWitnessResult + +export declare function compilePolicy(policyJson: string): string + +export declare function createAgentIdentity(agentName: string, capabilities: Array, repoPath: string, passphrase?: string | undefined | null): NapiAgentIdentityBundle + +export declare function createIdentity(keyAlias: string, repoPath: string, passphrase?: string | undefined | null): NapiIdentityResult + +export declare function createOrg(label: string, repoPath: string, passphrase?: string | undefined | null): NapiOrgResult + +export declare function delegateAgent(agentName: string, capabilities: Array, parentRepoPath: string, passphrase?: string | undefined | null, expiresInDays?: number | undefined | null, identityDid?: string | undefined | null): NapiDelegatedAgentBundle + +export declare function evaluatePolicy(policyJson: string, issuer: string, subject: string, capabilities?: Array | undefined | null, role?: string | undefined | null, revoked?: boolean | undefined | null, expiresAt?: string | undefined | null, repo?: string | undefined | null, environment?: string | undefined | null, signerType?: string | undefined | null, delegatedBy?: string | undefined | null, chainDepth?: number | undefined | null): NapiPolicyDecision + +export declare function extendDeviceAuthorization(deviceDid: string, identityKeyAlias: string, days: number, repoPath: string, passphrase?: string | undefined | null): NapiExtensionResult + +export declare function generateAuditReport(targetRepoPath: string, authsRepoPath: string, since?: string | undefined | null, until?: string | undefined | null, author?: string | undefined | null, limit?: number | undefined | null): string + +export declare function getIdentityPublicKey(identityDid: string, repoPath: string, passphrase?: string | undefined | null): string + +export declare function getLatestAttestation(repoPath: string, deviceDid: string): NapiAttestation | null + +export declare function getPinnedIdentity(did: string, repoPath: string): NapiPinnedIdentity | null + +export declare function joinPairingSession(shortCode: string, endpoint: string, token: string, repoPath: string, deviceName?: string | undefined | null, passphrase?: string | undefined | null): Promise + +export declare function linkDeviceToIdentity(identityKeyAlias: string, capabilities: Array, repoPath: string, passphrase?: string | undefined | null, expiresInDays?: number | undefined | null): NapiLinkResult + +export declare function listAttestations(repoPath: string): Array + +export declare function listAttestationsByDevice(repoPath: string, deviceDid: string): Array + +export declare function listOrgMembers(orgDid: string, includeRevoked: boolean, repoPath: string): string + +export declare function listPinnedIdentities(repoPath: string): string + +export declare function listWitnesses(repoPath: string): string + +export interface NapiActionEnvelope { + envelopeJson: string + signatureHex: string + signerDid: string +} + +export interface NapiAgentIdentityBundle { + agentDid: string + keyAlias: string + attestationJson: string + publicKeyHex: string + repoPath?: string +} + +export interface NapiArtifactResult { + attestationJson: string + rid: string + digest: string + fileSize: number +} + +export interface NapiAttestation { + rid: string + issuer: string + subject: string + deviceDid: string + capabilities: Array + signerType?: string + expiresAt?: string + revokedAt?: string + createdAt?: string + delegatedBy?: string + json: string +} + +export interface NapiChainLink { + issuer: string + subject: string + valid: boolean + error?: string +} + +export interface NapiCommitSignPemResult { + signaturePem: string + method: string + namespace: string +} + +export interface NapiCommitSignResult { + signature: string + signerDid: string +} + +export interface NapiDelegatedAgentBundle { + agentDid: string + keyAlias: string + attestationJson: string + publicKeyHex: string + repoPath?: string +} + +export interface NapiExtensionResult { + deviceDid: string + newExpiresAt: string + previousExpiresAt?: string +} + +export interface NapiIdentityResult { + did: string + keyAlias: string + publicKeyHex: string +} + +export interface NapiLinkResult { + deviceDid: string + attestationId: string +} + +export interface NapiOrgMember { + memberDid: string + role: string + capabilitiesJson: string + issuerDid: string + attestationRid: string + revoked: boolean + expiresAt?: string +} + +export interface NapiOrgResult { + orgPrefix: string + orgDid: string + label: string + repoPath: string +} + +export interface NapiPairingResponse { + deviceDid: string + deviceName?: string + devicePublicKeyHex: string +} + +export interface NapiPairingResult { + deviceDid: string + deviceName?: string + attestationRid: string +} + +export interface NapiPairingSession { + sessionId: string + shortCode: string + endpoint: string + token: string + controllerDid: string +} + +export interface NapiPinnedIdentity { + did: string + label?: string + trustLevel: string + firstSeen: string + kelSequence?: number + pinnedAt: string +} + +export interface NapiPolicyDecision { + outcome: string + reason: string + message: string +} + +export interface NapiRotationResult { + controllerDid: string + newKeyFingerprint: string + previousKeyFingerprint: string + sequence: number +} + +export interface NapiVerificationReport { + status: NapiVerificationStatus + chain: Array + warnings: Array +} + +export interface NapiVerificationResult { + valid: boolean + error?: string + errorCode?: string +} + +export interface NapiVerificationStatus { + statusType: string + at?: string + step?: number + missingLink?: string + required?: number + verified?: number +} + +export interface NapiWitnessResult { + url: string + did?: string + label?: string +} + +export declare function pinIdentity(did: string, repoPath: string, label?: string | undefined | null, trustLevel?: string | undefined | null): NapiPinnedIdentity + +export declare function removePinnedIdentity(did: string, repoPath: string): void + +export declare function removeWitness(urlStr: string, repoPath: string): void + +export declare function revokeDeviceFromIdentity(deviceDid: string, identityKeyAlias: string, repoPath: string, passphrase?: string | undefined | null, note?: string | undefined | null): void + +export declare function revokeOrgMember(orgDid: string, memberDid: string, repoPath: string, passphrase?: string | undefined | null, note?: string | undefined | null, memberPublicKeyHex?: string | undefined | null): NapiOrgMember + +export declare function rotateIdentityKeys(repoPath: string, identityKeyAlias?: string | undefined | null, nextKeyAlias?: string | undefined | null, passphrase?: string | undefined | null): NapiRotationResult + +export declare function runDiagnostics(repoPath: string, passphrase?: string | undefined | null): string + +export declare function signActionAsAgent(actionType: string, payloadJson: string, keyAlias: string, agentDid: string, repoPath: string, passphrase?: string | undefined | null): NapiActionEnvelope + +export declare function signActionAsIdentity(actionType: string, payloadJson: string, identityDid: string, repoPath: string, passphrase?: string | undefined | null): NapiActionEnvelope + +export declare function signArtifact(filePath: string, identityKeyAlias: string, repoPath: string, passphrase?: string | undefined | null, expiresInDays?: number | undefined | null, note?: string | undefined | null): NapiArtifactResult + +export declare function signArtifactBytes(data: Buffer, identityKeyAlias: string, repoPath: string, passphrase?: string | undefined | null, expiresInDays?: number | undefined | null, note?: string | undefined | null): NapiArtifactResult + +export declare function signAsAgent(message: Buffer, keyAlias: string, repoPath: string, passphrase?: string | undefined | null): NapiCommitSignResult + +export declare function signAsIdentity(message: Buffer, identityDid: string, repoPath: string, passphrase?: string | undefined | null): NapiCommitSignResult + +export declare function signCommit(data: Buffer, identityKeyAlias: string, repoPath: string, passphrase?: string | undefined | null): NapiCommitSignPemResult + +export declare function verifyAttestation(attestationJson: string, issuerPkHex: string): Promise + +export declare function verifyAttestationWithCapability(attestationJson: string, issuerPkHex: string, requiredCapability: string): Promise + +export declare function verifyAtTime(attestationJson: string, issuerPkHex: string, atRfc3339: string): Promise + +export declare function verifyAtTimeWithCapability(attestationJson: string, issuerPkHex: string, atRfc3339: string, requiredCapability: string): Promise + +export declare function verifyChain(attestationsJson: Array, rootPkHex: string): Promise + +export declare function verifyChainWithCapability(attestationsJson: Array, rootPkHex: string, requiredCapability: string): Promise + +export declare function verifyChainWithWitnesses(attestationsJson: Array, rootPkHex: string, receiptsJson: Array, witnessKeysJson: Array, threshold: number): Promise + +export declare function verifyDeviceAuthorization(identityDid: string, deviceDid: string, attestationsJson: Array, identityPkHex: string): Promise + +export declare function version(): string diff --git a/packages/auths-node/index.js b/packages/auths-node/index.js new file mode 100644 index 00000000..19a192ba --- /dev/null +++ b/packages/auths-node/index.js @@ -0,0 +1,622 @@ +// prettier-ignore +/* eslint-disable */ +// @ts-nocheck +/* auto-generated by NAPI-RS */ + +const { readFileSync } = require('node:fs') +let nativeBinding = null +const loadErrors = [] + +const isMusl = () => { + let musl = false + if (process.platform === 'linux') { + musl = isMuslFromFilesystem() + if (musl === null) { + musl = isMuslFromReport() + } + if (musl === null) { + musl = isMuslFromChildProcess() + } + } + return musl +} + +const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-') + +const isMuslFromFilesystem = () => { + try { + return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl') + } catch { + return null + } +} + +const isMuslFromReport = () => { + let report = null + if (typeof process.report?.getReport === 'function') { + process.report.excludeNetwork = true + report = process.report.getReport() + } + if (!report) { + return null + } + if (report.header && report.header.glibcVersionRuntime) { + return false + } + if (Array.isArray(report.sharedObjects)) { + if (report.sharedObjects.some(isFileMusl)) { + return true + } + } + return false +} + +const isMuslFromChildProcess = () => { + try { + return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl') + } catch (e) { + // If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false + return false + } +} + +function requireNative() { + if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) { + try { + return require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH); + } catch (err) { + loadErrors.push(err) + } + } else if (process.platform === 'android') { + if (process.arch === 'arm64') { + try { + return require('./auths.android-arm64.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-android-arm64') + const bindingPackageVersion = require('@auths-dev/node-android-arm64/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else if (process.arch === 'arm') { + try { + return require('./auths.android-arm-eabi.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-android-arm-eabi') + const bindingPackageVersion = require('@auths-dev/node-android-arm-eabi/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else { + loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`)) + } + } else if (process.platform === 'win32') { + if (process.arch === 'x64') { + if (process.config?.variables?.shlib_suffix === 'dll.a' || process.config?.variables?.node_target_type === 'shared_library') { + try { + return require('./auths.win32-x64-gnu.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-win32-x64-gnu') + const bindingPackageVersion = require('@auths-dev/node-win32-x64-gnu/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else { + try { + return require('./auths.win32-x64-msvc.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-win32-x64-msvc') + const bindingPackageVersion = require('@auths-dev/node-win32-x64-msvc/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } + } else if (process.arch === 'ia32') { + try { + return require('./auths.win32-ia32-msvc.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-win32-ia32-msvc') + const bindingPackageVersion = require('@auths-dev/node-win32-ia32-msvc/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else if (process.arch === 'arm64') { + try { + return require('./auths.win32-arm64-msvc.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-win32-arm64-msvc') + const bindingPackageVersion = require('@auths-dev/node-win32-arm64-msvc/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else { + loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`)) + } + } else if (process.platform === 'darwin') { + try { + return require('./auths.darwin-universal.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-darwin-universal') + const bindingPackageVersion = require('@auths-dev/node-darwin-universal/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + if (process.arch === 'x64') { + try { + return require('./auths.darwin-x64.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-darwin-x64') + const bindingPackageVersion = require('@auths-dev/node-darwin-x64/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else if (process.arch === 'arm64') { + try { + return require('./auths.darwin-arm64.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-darwin-arm64') + const bindingPackageVersion = require('@auths-dev/node-darwin-arm64/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else { + loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`)) + } + } else if (process.platform === 'freebsd') { + if (process.arch === 'x64') { + try { + return require('./auths.freebsd-x64.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-freebsd-x64') + const bindingPackageVersion = require('@auths-dev/node-freebsd-x64/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else if (process.arch === 'arm64') { + try { + return require('./auths.freebsd-arm64.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-freebsd-arm64') + const bindingPackageVersion = require('@auths-dev/node-freebsd-arm64/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else { + loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`)) + } + } else if (process.platform === 'linux') { + if (process.arch === 'x64') { + if (isMusl()) { + try { + return require('./auths.linux-x64-musl.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-linux-x64-musl') + const bindingPackageVersion = require('@auths-dev/node-linux-x64-musl/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else { + try { + return require('./auths.linux-x64-gnu.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-linux-x64-gnu') + const bindingPackageVersion = require('@auths-dev/node-linux-x64-gnu/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } + } else if (process.arch === 'arm64') { + if (isMusl()) { + try { + return require('./auths.linux-arm64-musl.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-linux-arm64-musl') + const bindingPackageVersion = require('@auths-dev/node-linux-arm64-musl/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else { + try { + return require('./auths.linux-arm64-gnu.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-linux-arm64-gnu') + const bindingPackageVersion = require('@auths-dev/node-linux-arm64-gnu/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } + } else if (process.arch === 'arm') { + if (isMusl()) { + try { + return require('./auths.linux-arm-musleabihf.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-linux-arm-musleabihf') + const bindingPackageVersion = require('@auths-dev/node-linux-arm-musleabihf/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else { + try { + return require('./auths.linux-arm-gnueabihf.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-linux-arm-gnueabihf') + const bindingPackageVersion = require('@auths-dev/node-linux-arm-gnueabihf/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } + } else if (process.arch === 'loong64') { + if (isMusl()) { + try { + return require('./auths.linux-loong64-musl.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-linux-loong64-musl') + const bindingPackageVersion = require('@auths-dev/node-linux-loong64-musl/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else { + try { + return require('./auths.linux-loong64-gnu.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-linux-loong64-gnu') + const bindingPackageVersion = require('@auths-dev/node-linux-loong64-gnu/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } + } else if (process.arch === 'riscv64') { + if (isMusl()) { + try { + return require('./auths.linux-riscv64-musl.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-linux-riscv64-musl') + const bindingPackageVersion = require('@auths-dev/node-linux-riscv64-musl/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else { + try { + return require('./auths.linux-riscv64-gnu.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-linux-riscv64-gnu') + const bindingPackageVersion = require('@auths-dev/node-linux-riscv64-gnu/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } + } else if (process.arch === 'ppc64') { + try { + return require('./auths.linux-ppc64-gnu.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-linux-ppc64-gnu') + const bindingPackageVersion = require('@auths-dev/node-linux-ppc64-gnu/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else if (process.arch === 's390x') { + try { + return require('./auths.linux-s390x-gnu.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-linux-s390x-gnu') + const bindingPackageVersion = require('@auths-dev/node-linux-s390x-gnu/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else { + loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`)) + } + } else if (process.platform === 'openharmony') { + if (process.arch === 'arm64') { + try { + return require('./auths.openharmony-arm64.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-openharmony-arm64') + const bindingPackageVersion = require('@auths-dev/node-openharmony-arm64/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else if (process.arch === 'x64') { + try { + return require('./auths.openharmony-x64.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-openharmony-x64') + const bindingPackageVersion = require('@auths-dev/node-openharmony-x64/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else if (process.arch === 'arm') { + try { + return require('./auths.openharmony-arm.node') + } catch (e) { + loadErrors.push(e) + } + try { + const binding = require('@auths-dev/node-openharmony-arm') + const bindingPackageVersion = require('@auths-dev/node-openharmony-arm/package.json').version + if (bindingPackageVersion !== '0.1.0' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') { + throw new Error(`Native binding package version mismatch, expected 0.1.0 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`) + } + return binding + } catch (e) { + loadErrors.push(e) + } + } else { + loadErrors.push(new Error(`Unsupported architecture on OpenHarmony: ${process.arch}`)) + } + } else { + loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`)) + } +} + +nativeBinding = requireNative() + +if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) { + let wasiBinding = null + let wasiBindingError = null + try { + wasiBinding = require('./auths.wasi.cjs') + nativeBinding = wasiBinding + } catch (err) { + if (process.env.NAPI_RS_FORCE_WASI) { + wasiBindingError = err + } + } + if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) { + try { + wasiBinding = require('@auths-dev/node-wasm32-wasi') + nativeBinding = wasiBinding + } catch (err) { + if (process.env.NAPI_RS_FORCE_WASI) { + if (!wasiBindingError) { + wasiBindingError = err + } else { + wasiBindingError.cause = err + } + loadErrors.push(err) + } + } + } + if (process.env.NAPI_RS_FORCE_WASI === 'error' && !wasiBinding) { + const error = new Error('WASI binding not found and NAPI_RS_FORCE_WASI is set to error') + error.cause = wasiBindingError + throw error + } +} + +if (!nativeBinding) { + if (loadErrors.length > 0) { + throw new Error( + `Cannot find native binding. ` + + `npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` + + 'Please try `npm i` again after removing both package-lock.json and node_modules directory.', + { + cause: loadErrors.reduce((err, cur) => { + cur.cause = err + return cur + }), + }, + ) + } + throw new Error(`Failed to load native binding`) +} + +module.exports = nativeBinding +module.exports.NapiPairingHandle = nativeBinding.NapiPairingHandle +module.exports.addOrgMember = nativeBinding.addOrgMember +module.exports.addWitness = nativeBinding.addWitness +module.exports.compilePolicy = nativeBinding.compilePolicy +module.exports.createAgentIdentity = nativeBinding.createAgentIdentity +module.exports.createIdentity = nativeBinding.createIdentity +module.exports.createOrg = nativeBinding.createOrg +module.exports.delegateAgent = nativeBinding.delegateAgent +module.exports.evaluatePolicy = nativeBinding.evaluatePolicy +module.exports.extendDeviceAuthorization = nativeBinding.extendDeviceAuthorization +module.exports.generateAuditReport = nativeBinding.generateAuditReport +module.exports.getIdentityPublicKey = nativeBinding.getIdentityPublicKey +module.exports.getLatestAttestation = nativeBinding.getLatestAttestation +module.exports.getPinnedIdentity = nativeBinding.getPinnedIdentity +module.exports.joinPairingSession = nativeBinding.joinPairingSession +module.exports.linkDeviceToIdentity = nativeBinding.linkDeviceToIdentity +module.exports.listAttestations = nativeBinding.listAttestations +module.exports.listAttestationsByDevice = nativeBinding.listAttestationsByDevice +module.exports.listOrgMembers = nativeBinding.listOrgMembers +module.exports.listPinnedIdentities = nativeBinding.listPinnedIdentities +module.exports.listWitnesses = nativeBinding.listWitnesses +module.exports.pinIdentity = nativeBinding.pinIdentity +module.exports.removePinnedIdentity = nativeBinding.removePinnedIdentity +module.exports.removeWitness = nativeBinding.removeWitness +module.exports.revokeDeviceFromIdentity = nativeBinding.revokeDeviceFromIdentity +module.exports.revokeOrgMember = nativeBinding.revokeOrgMember +module.exports.rotateIdentityKeys = nativeBinding.rotateIdentityKeys +module.exports.runDiagnostics = nativeBinding.runDiagnostics +module.exports.signActionAsAgent = nativeBinding.signActionAsAgent +module.exports.signActionAsIdentity = nativeBinding.signActionAsIdentity +module.exports.signArtifact = nativeBinding.signArtifact +module.exports.signArtifactBytes = nativeBinding.signArtifactBytes +module.exports.signAsAgent = nativeBinding.signAsAgent +module.exports.signAsIdentity = nativeBinding.signAsIdentity +module.exports.signCommit = nativeBinding.signCommit +module.exports.verifyAttestation = nativeBinding.verifyAttestation +module.exports.verifyAttestationWithCapability = nativeBinding.verifyAttestationWithCapability +module.exports.verifyAtTime = nativeBinding.verifyAtTime +module.exports.verifyAtTimeWithCapability = nativeBinding.verifyAtTimeWithCapability +module.exports.verifyChain = nativeBinding.verifyChain +module.exports.verifyChainWithCapability = nativeBinding.verifyChainWithCapability +module.exports.verifyChainWithWitnesses = nativeBinding.verifyChainWithWitnesses +module.exports.verifyDeviceAuthorization = nativeBinding.verifyDeviceAuthorization +module.exports.version = nativeBinding.version diff --git a/packages/auths-node/lib/artifacts.ts b/packages/auths-node/lib/artifacts.ts new file mode 100644 index 00000000..4e9c2157 --- /dev/null +++ b/packages/auths-node/lib/artifacts.ts @@ -0,0 +1,58 @@ +import native from './native' +import { mapNativeError, CryptoError } from './errors' +import type { Auths } from './client' + +export interface ArtifactResult { + attestationJson: string + rid: string + digest: string + fileSize: number +} + +export class ArtifactService { + constructor(private client: Auths) {} + + sign(opts: { + filePath: string + identityDid: string + expiresInDays?: number + note?: string + passphrase?: string + }): ArtifactResult { + const pp = opts.passphrase ?? this.client.passphrase + try { + return native.signArtifact( + opts.filePath, + opts.identityDid, + this.client.repoPath, + pp, + opts.expiresInDays ?? null, + opts.note ?? null, + ) + } catch (err) { + throw mapNativeError(err, CryptoError) + } + } + + signBytes(opts: { + data: Buffer + identityDid: string + expiresInDays?: number + note?: string + passphrase?: string + }): ArtifactResult { + const pp = opts.passphrase ?? this.client.passphrase + try { + return native.signArtifactBytes( + opts.data, + opts.identityDid, + this.client.repoPath, + pp, + opts.expiresInDays ?? null, + opts.note ?? null, + ) + } catch (err) { + throw mapNativeError(err, CryptoError) + } + } +} diff --git a/packages/auths-node/lib/attestations.ts b/packages/auths-node/lib/attestations.ts new file mode 100644 index 00000000..b46673f6 --- /dev/null +++ b/packages/auths-node/lib/attestations.ts @@ -0,0 +1,83 @@ +import native from './native' +import { mapNativeError, StorageError } from './errors' +import type { Auths } from './client' + +export interface AttestationInfo { + rid: string + issuer: string + subject: string + deviceDid: string + capabilities: string[] + signerType: string | null + expiresAt: string | null + revokedAt: string | null + createdAt: string | null + delegatedBy: string | null + json: string +} + +export class AttestationService { + constructor(private client: Auths) {} + + list(): AttestationInfo[] { + try { + return native.listAttestations(this.client.repoPath).map(a => ({ + rid: a.rid, + issuer: a.issuer, + subject: a.subject, + deviceDid: a.deviceDid, + capabilities: a.capabilities, + signerType: a.signerType ?? null, + expiresAt: a.expiresAt ?? null, + revokedAt: a.revokedAt ?? null, + createdAt: a.createdAt ?? null, + delegatedBy: a.delegatedBy ?? null, + json: a.json, + })) + } catch (err) { + throw mapNativeError(err, StorageError) + } + } + + listByDevice(deviceDid: string): AttestationInfo[] { + try { + return native.listAttestationsByDevice(this.client.repoPath, deviceDid).map(a => ({ + rid: a.rid, + issuer: a.issuer, + subject: a.subject, + deviceDid: a.deviceDid, + capabilities: a.capabilities, + signerType: a.signerType ?? null, + expiresAt: a.expiresAt ?? null, + revokedAt: a.revokedAt ?? null, + createdAt: a.createdAt ?? null, + delegatedBy: a.delegatedBy ?? null, + json: a.json, + })) + } catch (err) { + throw mapNativeError(err, StorageError) + } + } + + getLatest(deviceDid: string): AttestationInfo | null { + try { + const a = native.getLatestAttestation(this.client.repoPath, deviceDid) + if (!a) return null + return { + rid: a.rid, + issuer: a.issuer, + subject: a.subject, + deviceDid: a.deviceDid, + capabilities: a.capabilities, + signerType: a.signerType ?? null, + expiresAt: a.expiresAt ?? null, + revokedAt: a.revokedAt ?? null, + createdAt: a.createdAt ?? null, + delegatedBy: a.delegatedBy ?? null, + json: a.json, + } + } catch (err) { + throw mapNativeError(err, StorageError) + } + } +} diff --git a/packages/auths-node/lib/audit.ts b/packages/auths-node/lib/audit.ts new file mode 100644 index 00000000..4db2f036 --- /dev/null +++ b/packages/auths-node/lib/audit.ts @@ -0,0 +1,66 @@ +import native from './native' +import { mapNativeError, VerificationError } from './errors' +import type { Auths } from './client' + +export interface AuditReport { + commits: AuditCommit[] + summary: AuditSummary +} + +export interface AuditCommit { + oid: string + author_name: string + author_email: string + date: string + message: string + signature_type: string | null + signer_did: string | null + verified: boolean | null +} + +export interface AuditSummary { + total_commits: number + signed_commits: number + unsigned_commits: number + auths_signed: number + gpg_signed: number + ssh_signed: number + verification_passed: number + verification_failed: number +} + +export class AuditService { + constructor(private client: Auths) {} + + report(opts: { + targetRepoPath: string + since?: string + until?: string + author?: string + limit?: number + }): AuditReport { + try { + const json = native.generateAuditReport( + opts.targetRepoPath, + this.client.repoPath, + opts.since ?? null, + opts.until ?? null, + opts.author ?? null, + opts.limit ?? null, + ) + return JSON.parse(json) + } catch (err) { + throw mapNativeError(err, VerificationError) + } + } + + isCompliant(opts: { + targetRepoPath: string + since?: string + until?: string + author?: string + }): boolean { + const report = this.report(opts) + return report.summary.unsigned_commits === 0 + } +} diff --git a/packages/auths-node/lib/client.ts b/packages/auths-node/lib/client.ts new file mode 100644 index 00000000..dc98ca09 --- /dev/null +++ b/packages/auths-node/lib/client.ts @@ -0,0 +1,156 @@ +import { IdentityService } from './identity' +import { DeviceService } from './devices' +import { SigningService, type SignResult, type ActionEnvelope } from './signing' +import { OrgService } from './org' +import { TrustService } from './trust' +import { WitnessService } from './witness' +import { AttestationService } from './attestations' +import { ArtifactService } from './artifacts' +import { CommitService } from './commits' +import { AuditService } from './audit' +import { PairingService } from './pairing' +import { mapNativeError, CryptoError, VerificationError } from './errors' +import { + verifyAttestation, + verifyAttestationWithCapability, + verifyAtTime, + verifyAtTimeWithCapability, + verifyChain as verifyChainFn, + verifyChainWithCapability, + verifyChainWithWitnesses, + type VerificationResult, + type VerificationReport, + type WitnessConfig, +} from './verify' +import native from './native' + +export interface ClientConfig { + repoPath?: string + passphrase?: string +} + +export class Auths { + readonly repoPath: string + readonly passphrase: string | undefined + + readonly identities: IdentityService + readonly devices: DeviceService + readonly signing: SigningService + readonly orgs: OrgService + readonly trust: TrustService + readonly witnesses: WitnessService + readonly attestations: AttestationService + readonly artifacts: ArtifactService + readonly commits: CommitService + readonly audit: AuditService + readonly pairing: PairingService + + constructor(config: ClientConfig = {}) { + this.repoPath = config.repoPath ?? '~/.auths' + this.passphrase = config.passphrase + + this.identities = new IdentityService(this) + this.devices = new DeviceService(this) + this.signing = new SigningService(this) + this.orgs = new OrgService(this) + this.trust = new TrustService(this) + this.witnesses = new WitnessService(this) + this.attestations = new AttestationService(this) + this.artifacts = new ArtifactService(this) + this.commits = new CommitService(this) + this.audit = new AuditService(this) + this.pairing = new PairingService(this) + } + + async verify(opts: { + attestationJson: string + issuerKey: string + requiredCapability?: string + at?: string + }): Promise { + if (opts.at && opts.requiredCapability) { + return verifyAtTimeWithCapability(opts.attestationJson, opts.issuerKey, opts.at, opts.requiredCapability) + } + if (opts.at) { + return verifyAtTime(opts.attestationJson, opts.issuerKey, opts.at) + } + if (opts.requiredCapability) { + return verifyAttestationWithCapability(opts.attestationJson, opts.issuerKey, opts.requiredCapability) + } + return verifyAttestation(opts.attestationJson, opts.issuerKey) + } + + async verifyChain(opts: { + attestations: string[] + rootKey: string + requiredCapability?: string + witnesses?: WitnessConfig + }): Promise { + if (opts.witnesses) { + return verifyChainWithWitnesses(opts.attestations, opts.rootKey, opts.witnesses) + } + if (opts.requiredCapability) { + return verifyChainWithCapability(opts.attestations, opts.rootKey, opts.requiredCapability) + } + return verifyChainFn(opts.attestations, opts.rootKey) + } + + signAs(opts: { + message: Buffer + identityDid: string + passphrase?: string + }): SignResult { + return this.signing.signAsIdentity({ + message: opts.message, + identityDid: opts.identityDid, + passphrase: opts.passphrase, + }) + } + + signActionAs(opts: { + actionType: string + payloadJson: string + identityDid: string + passphrase?: string + }): ActionEnvelope { + return this.signing.signActionAsIdentity({ + actionType: opts.actionType, + payloadJson: opts.payloadJson, + identityDid: opts.identityDid, + passphrase: opts.passphrase, + }) + } + + signAsAgent(opts: { + message: Buffer + keyAlias: string + passphrase?: string + }): SignResult { + return this.signing.signAsAgent({ + message: opts.message, + keyAlias: opts.keyAlias, + passphrase: opts.passphrase, + }) + } + + signActionAsAgent(opts: { + actionType: string + payloadJson: string + keyAlias: string + agentDid: string + passphrase?: string + }): ActionEnvelope { + return this.signing.signActionAsAgent(opts) + } + + getPublicKey(opts: { + identityDid: string + passphrase?: string + }): string { + return this.identities.getPublicKey(opts) + } + + doctor(): string { + return native.runDiagnostics(this.repoPath, this.passphrase) + } +} diff --git a/packages/auths-node/lib/commits.ts b/packages/auths-node/lib/commits.ts new file mode 100644 index 00000000..e0dbb7bf --- /dev/null +++ b/packages/auths-node/lib/commits.ts @@ -0,0 +1,31 @@ +import native from './native' +import { mapNativeError, CryptoError } from './errors' +import type { Auths } from './client' + +export interface CommitSignResult { + signaturePem: string + method: string + namespace: string +} + +export class CommitService { + constructor(private client: Auths) {} + + sign(opts: { + data: Buffer + identityDid: string + passphrase?: string + }): CommitSignResult { + const pp = opts.passphrase ?? this.client.passphrase + try { + return native.signCommit( + opts.data, + opts.identityDid, + this.client.repoPath, + pp, + ) + } catch (err) { + throw mapNativeError(err, CryptoError) + } + } +} diff --git a/packages/auths-node/lib/devices.ts b/packages/auths-node/lib/devices.ts new file mode 100644 index 00000000..ff7c0f0d --- /dev/null +++ b/packages/auths-node/lib/devices.ts @@ -0,0 +1,87 @@ +import native from './native' +import { mapNativeError, IdentityError } from './errors' +import type { Auths } from './client' + +export interface Device { + did: string + attestationId: string +} + +export interface DeviceExtension { + deviceDid: string + newExpiresAt: string + previousExpiresAt: string | null +} + +export class DeviceService { + constructor(private client: Auths) {} + + link(opts: { + identityDid: string + capabilities?: string[] + expiresInDays?: number + passphrase?: string + }): Device { + const pp = opts.passphrase ?? this.client.passphrase + try { + const result = native.linkDeviceToIdentity( + opts.identityDid, + opts.capabilities ?? [], + this.client.repoPath, + pp, + opts.expiresInDays ?? null, + ) + return { + did: result.deviceDid, + attestationId: result.attestationId, + } + } catch (err) { + throw mapNativeError(err, IdentityError) + } + } + + revoke(opts: { + deviceDid: string + identityDid: string + note?: string + passphrase?: string + }): void { + const pp = opts.passphrase ?? this.client.passphrase + try { + native.revokeDeviceFromIdentity( + opts.deviceDid, + opts.identityDid, + this.client.repoPath, + pp, + opts.note ?? null, + ) + } catch (err) { + throw mapNativeError(err, IdentityError) + } + } + + extend(opts: { + deviceDid: string + identityDid: string + days?: number + passphrase?: string + }): DeviceExtension { + const pp = opts.passphrase ?? this.client.passphrase + try { + const result = native.extendDeviceAuthorization( + opts.deviceDid, + opts.identityDid, + opts.days ?? 90, + this.client.repoPath, + pp, + ) + return { + deviceDid: result.deviceDid, + newExpiresAt: result.newExpiresAt, + previousExpiresAt: result.previousExpiresAt ?? null, + } + } catch (err) { + throw mapNativeError(err, IdentityError) + } + } +} diff --git a/packages/auths-node/lib/errors.ts b/packages/auths-node/lib/errors.ts new file mode 100644 index 00000000..25e57b0c --- /dev/null +++ b/packages/auths-node/lib/errors.ts @@ -0,0 +1,128 @@ +export class AuthsError extends Error { + code: string + constructor(message: string, code: string) { + super(message) + this.name = 'AuthsError' + this.code = code + } +} + +export class VerificationError extends AuthsError { + constructor(message: string, code: string) { + super(message, code) + this.name = 'VerificationError' + } +} + +export class CryptoError extends AuthsError { + constructor(message: string, code: string) { + super(message, code) + this.name = 'CryptoError' + } +} + +export class KeychainError extends AuthsError { + constructor(message: string, code: string) { + super(message, code) + this.name = 'KeychainError' + } +} + +export class StorageError extends AuthsError { + constructor(message: string, code: string) { + super(message, code) + this.name = 'StorageError' + } +} + +export class NetworkError extends AuthsError { + shouldRetry: boolean + constructor(message: string, code: string, shouldRetry = true) { + super(message, code) + this.name = 'NetworkError' + this.shouldRetry = shouldRetry + } +} + +export class IdentityError extends AuthsError { + constructor(message: string, code: string) { + super(message, code) + this.name = 'IdentityError' + } +} + +export class OrgError extends AuthsError { + constructor(message: string, code: string) { + super(message, code) + this.name = 'OrgError' + } +} + +export class PairingError extends AuthsError { + shouldRetry: boolean + constructor(message: string, code: string, shouldRetry = true) { + super(message, code) + this.name = 'PairingError' + this.shouldRetry = shouldRetry + } +} + +const ERROR_CODE_MAP: Record AuthsError]> = { + AUTHS_ISSUER_SIG_FAILED: ['invalid_signature', VerificationError], + AUTHS_DEVICE_SIG_FAILED: ['invalid_signature', VerificationError], + AUTHS_ATTESTATION_EXPIRED: ['expired_attestation', VerificationError], + AUTHS_ATTESTATION_REVOKED: ['revoked_device', VerificationError], + AUTHS_TIMESTAMP_IN_FUTURE: ['future_timestamp', VerificationError], + AUTHS_MISSING_CAPABILITY: ['missing_capability', VerificationError], + AUTHS_CRYPTO_ERROR: ['invalid_key', CryptoError], + AUTHS_DID_RESOLUTION_ERROR: ['invalid_key', CryptoError], + AUTHS_INVALID_INPUT: ['invalid_signature', VerificationError], + AUTHS_SERIALIZATION_ERROR: ['invalid_signature', VerificationError], + AUTHS_BUNDLE_EXPIRED: ['expired_attestation', VerificationError], + AUTHS_KEY_NOT_FOUND: ['key_not_found', CryptoError], + AUTHS_INCORRECT_PASSPHRASE: ['signing_failed', CryptoError], + AUTHS_SIGNING_FAILED: ['signing_failed', CryptoError], + AUTHS_SIGNING_ERROR: ['signing_failed', CryptoError], + AUTHS_INPUT_TOO_LARGE: ['invalid_signature', VerificationError], + AUTHS_INTERNAL_ERROR: ['unknown', VerificationError], + AUTHS_ORG_VERIFICATION_FAILED: ['invalid_signature', VerificationError], + AUTHS_ORG_ATTESTATION_EXPIRED: ['expired_attestation', VerificationError], + AUTHS_ORG_DID_RESOLUTION_FAILED: ['invalid_key', CryptoError], + AUTHS_REGISTRY_ERROR: ['repo_not_found', StorageError], + AUTHS_KEYCHAIN_ERROR: ['keychain_locked', KeychainError], + AUTHS_IDENTITY_ERROR: ['identity_not_found', IdentityError], + AUTHS_DEVICE_ERROR: ['unknown', IdentityError], + AUTHS_ROTATION_ERROR: ['unknown', IdentityError], + AUTHS_NETWORK_ERROR: ['server_error', NetworkError], + AUTHS_VERIFICATION_FAILED: ['invalid_signature', VerificationError], + AUTHS_ORG_ERROR: ['org_error', OrgError], + AUTHS_PAIRING_ERROR: ['pairing_error', PairingError], + AUTHS_PAIRING_TIMEOUT: ['timeout', PairingError], + AUTHS_TRUST_ERROR: ['trust_error', StorageError], + AUTHS_WITNESS_ERROR: ['witness_error', StorageError], + AUTHS_AUDIT_ERROR: ['audit_error', VerificationError], + AUTHS_DIAGNOSTIC_ERROR: ['diagnostic_error', VerificationError], +} + +export function mapNativeError(err: unknown, defaultCls: new (message: string, code: string) => AuthsError = VerificationError): AuthsError { + const msg = err instanceof Error ? err.message : String(err) + + // Parse [AUTHS_CODE] prefix from native errors + if (msg.startsWith('[AUTHS_') && msg.includes('] ')) { + const code = msg.substring(1, msg.indexOf(']')) + const message = msg.substring(msg.indexOf('] ') + 2) + const mapping = ERROR_CODE_MAP[code] + if (mapping) { + const [pyCode, Cls] = mapping + return new Cls(message, pyCode) + } + } + + // Fallback heuristics + const low = msg.toLowerCase() + if (low.includes('public key') || low.includes('private key') || low.includes('invalid key') || low.includes('hex')) { + return new CryptoError(msg, 'invalid_key') + } + + return new defaultCls(msg, 'unknown') +} diff --git a/packages/auths-node/lib/identity.ts b/packages/auths-node/lib/identity.ts new file mode 100644 index 00000000..f65071d5 --- /dev/null +++ b/packages/auths-node/lib/identity.ts @@ -0,0 +1,144 @@ +import native from './native' +import { mapNativeError, CryptoError, IdentityError } from './errors' +import type { Auths } from './client' + +export interface Identity { + did: string + keyAlias: string + label: string + repoPath: string + publicKey: string +} + +export interface AgentIdentity { + did: string + keyAlias: string + attestation: string + publicKey: string +} + +export interface DelegatedAgent { + did: string + keyAlias: string + attestation: string + publicKey: string +} + +export interface RotationResult { + controllerDid: string + newKeyFingerprint: string + previousKeyFingerprint: string + sequence: number +} + +export class IdentityService { + constructor(private client: Auths) {} + + create(opts: { + label?: string + repoPath?: string + passphrase?: string + } = {}): Identity { + const rp = opts.repoPath ?? this.client.repoPath + const pp = opts.passphrase ?? this.client.passphrase + try { + const result = native.createIdentity(opts.label ?? 'main', rp, pp) + return { + did: result.did, + keyAlias: result.keyAlias, + label: opts.label ?? 'main', + repoPath: rp, + publicKey: result.publicKeyHex, + } + } catch (err) { + throw mapNativeError(err, IdentityError) + } + } + + createAgent(opts: { + name: string + capabilities: string[] + passphrase?: string + }): AgentIdentity { + const pp = opts.passphrase ?? this.client.passphrase + try { + const bundle = native.createAgentIdentity( + opts.name, + opts.capabilities, + this.client.repoPath, + pp, + ) + return { + did: bundle.agentDid, + keyAlias: bundle.keyAlias, + attestation: bundle.attestationJson, + publicKey: bundle.publicKeyHex, + } + } catch (err) { + throw mapNativeError(err, IdentityError) + } + } + + delegateAgent(opts: { + identityDid: string + name: string + capabilities: string[] + expiresInDays?: number + passphrase?: string + }): DelegatedAgent { + const pp = opts.passphrase ?? this.client.passphrase + try { + const bundle = native.delegateAgent( + opts.name, + opts.capabilities, + this.client.repoPath, + pp, + opts.expiresInDays ?? null, + opts.identityDid, + ) + return { + did: bundle.agentDid, + keyAlias: bundle.keyAlias, + attestation: bundle.attestationJson, + publicKey: bundle.publicKeyHex, + } + } catch (err) { + throw mapNativeError(err, IdentityError) + } + } + + rotate(opts: { + identityDid?: string + passphrase?: string + } = {}): RotationResult { + const pp = opts.passphrase ?? this.client.passphrase + try { + const result = native.rotateIdentityKeys( + this.client.repoPath, + opts.identityDid ?? null, + null, + pp, + ) + return { + controllerDid: result.controllerDid, + newKeyFingerprint: result.newKeyFingerprint, + previousKeyFingerprint: result.previousKeyFingerprint, + sequence: result.sequence, + } + } catch (err) { + throw mapNativeError(err, IdentityError) + } + } + + getPublicKey(opts: { + identityDid: string + passphrase?: string + }): string { + const pp = opts.passphrase ?? this.client.passphrase + try { + return native.getIdentityPublicKey(opts.identityDid, this.client.repoPath, pp) + } catch (err) { + throw mapNativeError(err, CryptoError) + } + } +} diff --git a/packages/auths-node/lib/index.ts b/packages/auths-node/lib/index.ts new file mode 100644 index 00000000..c556a322 --- /dev/null +++ b/packages/auths-node/lib/index.ts @@ -0,0 +1,44 @@ +export { Auths, type ClientConfig } from './client' +export { IdentityService, type Identity, type AgentIdentity, type DelegatedAgent, type RotationResult } from './identity' +export { DeviceService, type Device, type DeviceExtension } from './devices' +export { SigningService, type SignResult, type ActionEnvelope } from './signing' +export { OrgService, type OrgResult, type OrgMember } from './org' +export { TrustService, type PinnedIdentity } from './trust' +export { WitnessService, type WitnessEntry } from './witness' +export { AttestationService, type AttestationInfo } from './attestations' +export { ArtifactService, type ArtifactResult } from './artifacts' +export { CommitService, type CommitSignResult } from './commits' +export { AuditService, type AuditReport, type AuditCommit, type AuditSummary } from './audit' +export { PolicyBuilder, compilePolicy, evaluatePolicy, type PolicyDecision, type EvalContextOpts } from './policy' +export { PairingService, type PairingSession, type PairingResponse, type PairingResult } from './pairing' +export { + verifyAttestation, + verifyAttestationWithCapability, + verifyChain, + verifyChainWithCapability, + verifyDeviceAuthorization, + verifyAtTime, + verifyAtTimeWithCapability, + verifyChainWithWitnesses, + type VerificationResult, + type VerificationReport, + type VerificationStatus, + type ChainLink, + type WitnessConfig, + type WitnessKey, +} from './verify' +export { + AuthsError, + VerificationError, + CryptoError, + KeychainError, + StorageError, + NetworkError, + IdentityError, + OrgError, + PairingError, + mapNativeError, +} from './errors' + +import native from './native' +export const version: () => string = native.version diff --git a/packages/auths-node/lib/native.ts b/packages/auths-node/lib/native.ts new file mode 100644 index 00000000..eb02dcd5 --- /dev/null +++ b/packages/auths-node/lib/native.ts @@ -0,0 +1,255 @@ +// Type declarations for native napi-rs bindings (auto-generated at build time) +// This file provides typed access to the Rust #[napi] functions + +export interface NapiVerificationResult { + valid: boolean + error?: string | null + errorCode?: string | null +} + +export interface NapiVerificationStatus { + statusType: string + at?: string | null + step?: number | null + missingLink?: string | null + required?: number | null + verified?: number | null +} + +export interface NapiChainLink { + issuer: string + subject: string + valid: boolean + error?: string | null +} + +export interface NapiVerificationReport { + status: NapiVerificationStatus + chain: NapiChainLink[] + warnings: string[] +} + +export interface NapiIdentityResult { + did: string + keyAlias: string + publicKeyHex: string +} + +export interface NapiAgentIdentityBundle { + agentDid: string + keyAlias: string + attestationJson: string + publicKeyHex: string + repoPath?: string | null +} + +export interface NapiDelegatedAgentBundle { + agentDid: string + keyAlias: string + attestationJson: string + publicKeyHex: string + repoPath?: string | null +} + +export interface NapiRotationResult { + controllerDid: string + newKeyFingerprint: string + previousKeyFingerprint: string + sequence: number +} + +export interface NapiLinkResult { + deviceDid: string + attestationId: string +} + +export interface NapiExtensionResult { + deviceDid: string + newExpiresAt: string + previousExpiresAt?: string | null +} + +export interface NapiCommitSignResult { + signature: string + signerDid: string +} + +export interface NapiActionEnvelope { + envelopeJson: string + signatureHex: string + signerDid: string +} + +export interface NapiCommitSignPemResult { + signaturePem: string + method: string + namespace: string +} + +export interface NapiOrgResult { + orgPrefix: string + orgDid: string + label: string + repoPath: string +} + +export interface NapiOrgMember { + memberDid: string + role: string + capabilitiesJson: string + issuerDid: string + attestationRid: string + revoked: boolean + expiresAt?: string | null +} + +export interface NapiAttestation { + rid: string + issuer: string + subject: string + deviceDid: string + capabilities: string[] + signerType?: string | null + expiresAt?: string | null + revokedAt?: string | null + createdAt?: string | null + delegatedBy?: string | null + json: string +} + +export interface NapiPinnedIdentity { + did: string + label?: string | null + trustLevel: string + firstSeen: string + kelSequence?: number | null + pinnedAt: string +} + +export interface NapiWitnessResult { + url: string + did?: string | null + label?: string | null +} + +export interface NapiArtifactResult { + attestationJson: string + rid: string + digest: string + fileSize: number +} + +export interface NapiPolicyDecision { + outcome: string + reason: string + message: string +} + +export interface NapiPairingSession { + sessionId: string + shortCode: string + endpoint: string + token: string + controllerDid: string +} + +export interface NapiPairingResponse { + deviceDid: string + deviceName?: string | null + devicePublicKeyHex: string +} + +export interface NapiPairingResult { + deviceDid: string + deviceName?: string | null + attestationRid: string +} + +export interface NapiPairingHandleInstance { + session: NapiPairingSession + waitForResponse(timeoutSecs?: number | null): Promise + complete(deviceDid: string, devicePublicKeyHex: string, repoPath: string, capabilitiesJson?: string | null, passphrase?: string | null): Promise + stop(): Promise +} + +export interface NativeBindings { + version(): string + + // Identity + createIdentity(keyAlias: string, repoPath: string, passphrase?: string | null): NapiIdentityResult + createAgentIdentity(agentName: string, capabilities: string[], repoPath: string, passphrase?: string | null): NapiAgentIdentityBundle + delegateAgent(agentName: string, capabilities: string[], parentRepoPath: string, passphrase?: string | null, expiresInDays?: number | null, identityDid?: string | null): NapiDelegatedAgentBundle + rotateIdentityKeys(repoPath: string, identityKeyAlias?: string | null, nextKeyAlias?: string | null, passphrase?: string | null): NapiRotationResult + getIdentityPublicKey(identityDid: string, repoPath: string, passphrase?: string | null): string + + // Device + linkDeviceToIdentity(identityKeyAlias: string, capabilities: string[], repoPath: string, passphrase?: string | null, expiresInDays?: number | null): NapiLinkResult + revokeDeviceFromIdentity(deviceDid: string, identityKeyAlias: string, repoPath: string, passphrase?: string | null, note?: string | null): void + extendDeviceAuthorization(deviceDid: string, identityKeyAlias: string, days: number, repoPath: string, passphrase?: string | null): NapiExtensionResult + + // Signing + signAsIdentity(message: Buffer, identityDid: string, repoPath: string, passphrase?: string | null): NapiCommitSignResult + signActionAsIdentity(actionType: string, payloadJson: string, identityDid: string, repoPath: string, passphrase?: string | null): NapiActionEnvelope + signAsAgent(message: Buffer, keyAlias: string, repoPath: string, passphrase?: string | null): NapiCommitSignResult + signActionAsAgent(actionType: string, payloadJson: string, keyAlias: string, agentDid: string, repoPath: string, passphrase?: string | null): NapiActionEnvelope + + // Commit signing + signCommit(data: Buffer, identityKeyAlias: string, repoPath: string, passphrase?: string | null): NapiCommitSignPemResult + + // Org + createOrg(label: string, repoPath: string, passphrase?: string | null): NapiOrgResult + addOrgMember(orgDid: string, memberDid: string, role: string, repoPath: string, capabilitiesJson?: string | null, passphrase?: string | null, note?: string | null, memberPublicKeyHex?: string | null): NapiOrgMember + revokeOrgMember(orgDid: string, memberDid: string, repoPath: string, passphrase?: string | null, note?: string | null, memberPublicKeyHex?: string | null): NapiOrgMember + listOrgMembers(orgDid: string, includeRevoked: boolean, repoPath: string): string + + // Attestation query + listAttestations(repoPath: string): NapiAttestation[] + listAttestationsByDevice(repoPath: string, deviceDid: string): NapiAttestation[] + getLatestAttestation(repoPath: string, deviceDid: string): NapiAttestation | null + + // Trust + pinIdentity(did: string, repoPath: string, label?: string | null, trustLevel?: string | null): NapiPinnedIdentity + removePinnedIdentity(did: string, repoPath: string): void + listPinnedIdentities(repoPath: string): string + getPinnedIdentity(did: string, repoPath: string): NapiPinnedIdentity | null + + // Witness + addWitness(urlStr: string, repoPath: string, label?: string | null): NapiWitnessResult + removeWitness(urlStr: string, repoPath: string): void + listWitnesses(repoPath: string): string + + // Artifact + signArtifact(filePath: string, identityKeyAlias: string, repoPath: string, passphrase?: string | null, expiresInDays?: number | null, note?: string | null): NapiArtifactResult + signArtifactBytes(data: Buffer, identityKeyAlias: string, repoPath: string, passphrase?: string | null, expiresInDays?: number | null, note?: string | null): NapiArtifactResult + + // Audit + generateAuditReport(targetRepoPath: string, authsRepoPath: string, since?: string | null, until?: string | null, author?: string | null, limit?: number | null): string + + // Diagnostics + runDiagnostics(repoPath: string, passphrase?: string | null): string + + // Policy + compilePolicy(policyJson: string): string + evaluatePolicy(compiledPolicyJson: string, issuer: string, subject: string, capabilities?: string[] | null, role?: string | null, revoked?: boolean | null, expiresAt?: string | null, repo?: string | null, environment?: string | null, signerType?: string | null, delegatedBy?: string | null, chainDepth?: number | null): NapiPolicyDecision + + // Pairing + NapiPairingHandle: { + createSession(repoPath: string, capabilitiesJson?: string | null, timeoutSecs?: number | null, bindAddress?: string | null, enableMdns?: boolean | null, passphrase?: string | null): Promise + } + joinPairingSession(shortCode: string, endpoint: string, token: string, repoPath: string, deviceName?: string | null, passphrase?: string | null): Promise + + // Verification + verifyAttestation(attestationJson: string, issuerPkHex: string): Promise + verifyChain(attestationsJson: string[], rootPkHex: string): Promise + verifyDeviceAuthorization(identityDid: string, deviceDid: string, attestationsJson: string[], identityPkHex: string): Promise + verifyAttestationWithCapability(attestationJson: string, issuerPkHex: string, requiredCapability: string): Promise + verifyChainWithCapability(attestationsJson: string[], rootPkHex: string, requiredCapability: string): Promise + verifyAtTime(attestationJson: string, issuerPkHex: string, atRfc3339: string): Promise + verifyAtTimeWithCapability(attestationJson: string, issuerPkHex: string, atRfc3339: string, requiredCapability: string): Promise + verifyChainWithWitnesses(attestationsJson: string[], rootPkHex: string, receiptsJson: string[], witnessKeysJson: string[], threshold: number): Promise +} + +// eslint-disable-next-line @typescript-eslint/no-var-requires +const native: NativeBindings = require('../index.js') + +export default native diff --git a/packages/auths-node/lib/org.ts b/packages/auths-node/lib/org.ts new file mode 100644 index 00000000..d488677b --- /dev/null +++ b/packages/auths-node/lib/org.ts @@ -0,0 +1,117 @@ +import native from './native' +import { mapNativeError, OrgError } from './errors' +import type { Auths } from './client' + +export interface OrgResult { + orgPrefix: string + orgDid: string + label: string + repoPath: string +} + +export interface OrgMember { + memberDid: string + role: string + capabilities: string[] + issuerDid: string + attestationRid: string + revoked: boolean + expiresAt: string | null +} + +export class OrgService { + constructor(private client: Auths) {} + + create(opts: { + label: string + repoPath?: string + passphrase?: string + }): OrgResult { + const rp = opts.repoPath ?? this.client.repoPath + const pp = opts.passphrase ?? this.client.passphrase + try { + return native.createOrg(opts.label, rp, pp) + } catch (err) { + throw mapNativeError(err, OrgError) + } + } + + addMember(opts: { + orgDid: string + memberDid: string + role: string + capabilities?: string[] + passphrase?: string + note?: string + memberPublicKeyHex?: string + }): OrgMember { + const pp = opts.passphrase ?? this.client.passphrase + const capsJson = opts.capabilities ? JSON.stringify(opts.capabilities) : null + try { + const result = native.addOrgMember( + opts.orgDid, + opts.memberDid, + opts.role, + this.client.repoPath, + capsJson, + pp, + opts.note ?? null, + opts.memberPublicKeyHex ?? null, + ) + return { + memberDid: result.memberDid, + role: result.role, + capabilities: JSON.parse(result.capabilitiesJson || '[]'), + issuerDid: result.issuerDid, + attestationRid: result.attestationRid, + revoked: result.revoked, + expiresAt: result.expiresAt ?? null, + } + } catch (err) { + throw mapNativeError(err, OrgError) + } + } + + revokeMember(opts: { + orgDid: string + memberDid: string + passphrase?: string + note?: string + memberPublicKeyHex?: string + }): OrgMember { + const pp = opts.passphrase ?? this.client.passphrase + try { + const result = native.revokeOrgMember( + opts.orgDid, + opts.memberDid, + this.client.repoPath, + pp, + opts.note ?? null, + opts.memberPublicKeyHex ?? null, + ) + return { + memberDid: result.memberDid, + role: result.role, + capabilities: JSON.parse(result.capabilitiesJson || '[]'), + issuerDid: result.issuerDid, + attestationRid: result.attestationRid, + revoked: result.revoked, + expiresAt: result.expiresAt ?? null, + } + } catch (err) { + throw mapNativeError(err, OrgError) + } + } + + listMembers(opts: { + orgDid: string + includeRevoked?: boolean + }): OrgMember[] { + try { + const json = native.listOrgMembers(opts.orgDid, opts.includeRevoked ?? false, this.client.repoPath) + return JSON.parse(json) + } catch (err) { + throw mapNativeError(err, OrgError) + } + } +} diff --git a/packages/auths-node/lib/pairing.ts b/packages/auths-node/lib/pairing.ts new file mode 100644 index 00000000..f2ddceeb --- /dev/null +++ b/packages/auths-node/lib/pairing.ts @@ -0,0 +1,153 @@ +import native from './native' +import { mapNativeError, PairingError } from './errors' +import type { Auths } from './client' + +export interface PairingSession { + sessionId: string + shortCode: string + endpoint: string + token: string + controllerDid: string +} + +export interface PairingResponse { + deviceDid: string + deviceName: string | null + devicePublicKeyHex: string +} + +export interface PairingResult { + deviceDid: string + deviceName: string | null + attestationRid: string +} + +export class PairingService { + private handle: any | null = null + + constructor(private client: Auths) {} + + async createSession(opts?: { + capabilities?: string[] + timeoutSecs?: number + bindAddress?: string + enableMdns?: boolean + passphrase?: string + }): Promise { + const pp = opts?.passphrase ?? this.client.passphrase + const capsJson = opts?.capabilities ? JSON.stringify(opts.capabilities) : null + try { + this.handle = await native.NapiPairingHandle.createSession( + this.client.repoPath, + capsJson, + opts?.timeoutSecs ?? null, + opts?.bindAddress ?? null, + opts?.enableMdns ?? null, + pp, + ) + const session = this.handle.session + return { + sessionId: session.sessionId, + shortCode: session.shortCode, + endpoint: session.endpoint, + token: session.token, + controllerDid: session.controllerDid, + } + } catch (err) { + throw mapNativeError(err, PairingError) + } + } + + async waitForResponse(opts?: { timeoutSecs?: number }): Promise { + if (!this.handle) { + throw new PairingError('No active pairing session. Call createSession first.', 'AUTHS_PAIRING_ERROR') + } + try { + const result = await this.handle.waitForResponse(opts?.timeoutSecs ?? null) + return { + deviceDid: result.deviceDid, + deviceName: result.deviceName ?? null, + devicePublicKeyHex: result.devicePublicKeyHex, + } + } catch (err) { + throw mapNativeError(err, PairingError) + } + } + + async stop(): Promise { + if (this.handle) { + try { + await this.handle.stop() + } catch (err) { + throw mapNativeError(err, PairingError) + } finally { + this.handle = null + } + } + } + + async join(opts: { + shortCode: string + endpoint: string + token: string + deviceName?: string + passphrase?: string + }): Promise { + const pp = opts.passphrase ?? this.client.passphrase + try { + const result = await native.joinPairingSession( + opts.shortCode, + opts.endpoint, + opts.token, + this.client.repoPath, + opts.deviceName ?? null, + pp, + ) + return { + deviceDid: result.deviceDid, + deviceName: result.deviceName ?? null, + devicePublicKeyHex: result.devicePublicKeyHex, + } + } catch (err) { + throw mapNativeError(err, PairingError) + } + } + + async complete(opts: { + deviceDid: string + devicePublicKeyHex: string + capabilities?: string[] + passphrase?: string + }): Promise { + if (!this.handle) { + throw new PairingError('No active pairing session. Call createSession first.', 'AUTHS_PAIRING_ERROR') + } + const pp = opts.passphrase ?? this.client.passphrase + const capsJson = opts.capabilities ? JSON.stringify(opts.capabilities) : null + try { + const result = await this.handle.complete( + opts.deviceDid, + opts.devicePublicKeyHex, + this.client.repoPath, + capsJson, + pp, + ) + return { + deviceDid: result.deviceDid, + deviceName: result.deviceName ?? null, + attestationRid: result.attestationRid, + } + } catch (err) { + throw mapNativeError(err, PairingError) + } + } + + [Symbol.dispose](): void { + // Fire-and-forget stop for sync dispose + this.stop().catch(() => {}) + } + + async [Symbol.asyncDispose](): Promise { + await this.stop() + } +} diff --git a/packages/auths-node/lib/policy.ts b/packages/auths-node/lib/policy.ts new file mode 100644 index 00000000..5528f499 --- /dev/null +++ b/packages/auths-node/lib/policy.ts @@ -0,0 +1,249 @@ +import native from './native' +import { mapNativeError, AuthsError } from './errors' + +export interface PolicyDecision { + outcome: string + reason: string + message: string + allowed: boolean + denied: boolean +} + +export interface EvalContextOpts { + issuer: string + subject: string + capabilities?: string[] + role?: string + revoked?: boolean + expiresAt?: string + repo?: string + environment?: string + signerType?: 'human' | 'agent' | 'workload' + delegatedBy?: string + chainDepth?: number +} + +type Predicate = Record + +export class PolicyBuilder { + private predicates: Predicate[] = [] + + static standard(capability: string): PolicyBuilder { + return new PolicyBuilder() + .notRevoked() + .notExpired() + .requireCapability(capability) + } + + static anyOf(...builders: PolicyBuilder[]): PolicyBuilder { + const result = new PolicyBuilder() + const orArgs = builders.map(b => ({ op: 'And', args: b.predicates })) + result.predicates = [{ op: 'Or', args: orArgs }] + return result + } + + notRevoked(): PolicyBuilder { + this.predicates.push({ op: 'NotRevoked' }) + return this + } + + notExpired(): PolicyBuilder { + this.predicates.push({ op: 'NotExpired' }) + return this + } + + expiresAfter(seconds: number): PolicyBuilder { + this.predicates.push({ op: 'ExpiresAfter', args: seconds }) + return this + } + + issuedWithin(seconds: number): PolicyBuilder { + this.predicates.push({ op: 'IssuedWithin', args: seconds }) + return this + } + + requireCapability(cap: string): PolicyBuilder { + this.predicates.push({ op: 'HasCapability', args: cap }) + return this + } + + requireAllCapabilities(caps: string[]): PolicyBuilder { + for (const cap of caps) { + this.requireCapability(cap) + } + return this + } + + requireAnyCapability(caps: string[]): PolicyBuilder { + const orArgs = caps.map(c => ({ op: 'HasCapability', args: c })) + this.predicates.push({ op: 'Or', args: orArgs }) + return this + } + + requireIssuer(did: string): PolicyBuilder { + this.predicates.push({ op: 'IssuerIs', args: did }) + return this + } + + requireIssuerIn(dids: string[]): PolicyBuilder { + const orArgs = dids.map(d => ({ op: 'IssuerIs', args: d })) + this.predicates.push({ op: 'Or', args: orArgs }) + return this + } + + requireSubject(did: string): PolicyBuilder { + this.predicates.push({ op: 'SubjectIs', args: did }) + return this + } + + requireDelegatedBy(did: string): PolicyBuilder { + this.predicates.push({ op: 'DelegatedBy', args: did }) + return this + } + + requireAgent(): PolicyBuilder { + this.predicates.push({ op: 'IsAgent' }) + return this + } + + requireHuman(): PolicyBuilder { + this.predicates.push({ op: 'IsHuman' }) + return this + } + + requireWorkload(): PolicyBuilder { + this.predicates.push({ op: 'IsWorkload' }) + return this + } + + requireRepo(repo: string): PolicyBuilder { + this.predicates.push({ op: 'RepoIs', args: repo }) + return this + } + + requireRepoIn(repos: string[]): PolicyBuilder { + const orArgs = repos.map(r => ({ op: 'RepoIs', args: r })) + this.predicates.push({ op: 'Or', args: orArgs }) + return this + } + + requireEnv(env: string): PolicyBuilder { + this.predicates.push({ op: 'EnvIs', args: env }) + return this + } + + requireEnvIn(envs: string[]): PolicyBuilder { + const orArgs = envs.map(e => ({ op: 'EnvIs', args: e })) + this.predicates.push({ op: 'Or', args: orArgs }) + return this + } + + refMatches(pattern: string): PolicyBuilder { + this.predicates.push({ op: 'RefMatches', args: pattern }) + return this + } + + pathAllowed(patterns: string[]): PolicyBuilder { + this.predicates.push({ op: 'PathAllowed', args: patterns }) + return this + } + + maxChainDepth(depth: number): PolicyBuilder { + this.predicates.push({ op: 'MaxChainDepth', args: depth }) + return this + } + + attrEquals(key: string, value: string): PolicyBuilder { + this.predicates.push({ op: 'AttrEquals', args: { key, value } }) + return this + } + + attrIn(key: string, values: string[]): PolicyBuilder { + this.predicates.push({ op: 'AttrIn', args: { key, values } }) + return this + } + + workloadIssuerIs(did: string): PolicyBuilder { + this.predicates.push({ op: 'WorkloadIssuerIs', args: did }) + return this + } + + workloadClaimEquals(key: string, value: string): PolicyBuilder { + this.predicates.push({ op: 'WorkloadClaimEquals', args: { key, value } }) + return this + } + + orPolicy(other: PolicyBuilder): PolicyBuilder { + return PolicyBuilder.anyOf(this, other) + } + + negate(): PolicyBuilder { + const result = new PolicyBuilder() + result.predicates = [{ op: 'Not', args: { op: 'And', args: this.predicates } }] + return result + } + + toJson(): string { + if (this.predicates.length === 0) { + throw new Error('Cannot export an empty policy.') + } + const expr = { op: 'And', args: this.predicates } + return JSON.stringify(expr) + } + + build(): string { + if (this.predicates.length === 0) { + throw new Error( + 'Cannot build an empty policy. Add at least one predicate, ' + + 'or use PolicyBuilder.standard("capability") for the common case.' + ) + } + const json = this.toJson() + try { + return native.compilePolicy(json) + } catch (err) { + throw mapNativeError(err, AuthsError) + } + } + + evaluate(context: EvalContextOpts): PolicyDecision { + const compiledJson = this.build() + return evaluatePolicy(compiledJson, context) + } +} + +export function compilePolicy(policyJson: string): string { + try { + return native.compilePolicy(policyJson) + } catch (err) { + throw mapNativeError(err, AuthsError) + } +} + +export function evaluatePolicy(compiledPolicyJson: string, context: EvalContextOpts): PolicyDecision { + try { + const result = native.evaluatePolicy( + compiledPolicyJson, + context.issuer, + context.subject, + context.capabilities ?? null, + context.role ?? null, + context.revoked ?? null, + context.expiresAt ?? null, + context.repo ?? null, + context.environment ?? null, + context.signerType ?? null, + context.delegatedBy ?? null, + context.chainDepth ?? null, + ) + return { + outcome: result.outcome, + reason: result.reason, + message: result.message, + allowed: result.outcome === 'allow', + denied: result.outcome === 'deny', + } + } catch (err) { + throw mapNativeError(err, AuthsError) + } +} diff --git a/packages/auths-node/lib/signing.ts b/packages/auths-node/lib/signing.ts new file mode 100644 index 00000000..218bca19 --- /dev/null +++ b/packages/auths-node/lib/signing.ts @@ -0,0 +1,93 @@ +import native from './native' +import { mapNativeError, CryptoError } from './errors' +import type { Auths } from './client' + +export interface SignResult { + signature: string + signerDid: string +} + +export interface ActionEnvelope { + envelopeJson: string + signatureHex: string + signerDid: string +} + +export class SigningService { + constructor(private client: Auths) {} + + signAsIdentity(opts: { + message: Buffer + identityDid: string + passphrase?: string + }): SignResult { + const pp = opts.passphrase ?? this.client.passphrase + try { + const result = native.signAsIdentity( + opts.message, + opts.identityDid, + this.client.repoPath, + pp, + ) + return { signature: result.signature, signerDid: result.signerDid } + } catch (err) { + throw mapNativeError(err, CryptoError) + } + } + + signActionAsIdentity(opts: { + actionType: string + payloadJson: string + identityDid: string + passphrase?: string + }): ActionEnvelope { + const pp = opts.passphrase ?? this.client.passphrase + try { + return native.signActionAsIdentity( + opts.actionType, + opts.payloadJson, + opts.identityDid, + this.client.repoPath, + pp, + ) + } catch (err) { + throw mapNativeError(err, CryptoError) + } + } + + signAsAgent(opts: { + message: Buffer + keyAlias: string + passphrase?: string + }): SignResult { + const pp = opts.passphrase ?? this.client.passphrase + try { + const result = native.signAsAgent(opts.message, opts.keyAlias, this.client.repoPath, pp) + return { signature: result.signature, signerDid: result.signerDid } + } catch (err) { + throw mapNativeError(err, CryptoError) + } + } + + signActionAsAgent(opts: { + actionType: string + payloadJson: string + keyAlias: string + agentDid: string + passphrase?: string + }): ActionEnvelope { + const pp = opts.passphrase ?? this.client.passphrase + try { + return native.signActionAsAgent( + opts.actionType, + opts.payloadJson, + opts.keyAlias, + opts.agentDid, + this.client.repoPath, + pp, + ) + } catch (err) { + throw mapNativeError(err, CryptoError) + } + } +} diff --git a/packages/auths-node/lib/trust.ts b/packages/auths-node/lib/trust.ts new file mode 100644 index 00000000..fe2f3019 --- /dev/null +++ b/packages/auths-node/lib/trust.ts @@ -0,0 +1,75 @@ +import native from './native' +import { mapNativeError, StorageError } from './errors' +import type { Auths } from './client' + +export interface PinnedIdentity { + did: string + label: string | null + trustLevel: string + firstSeen: string + kelSequence: number | null + pinnedAt: string +} + +export class TrustService { + constructor(private client: Auths) {} + + pin(opts: { + did: string + label?: string + trustLevel?: 'tofu' | 'manual' | 'org_policy' + }): PinnedIdentity { + try { + const result = native.pinIdentity( + opts.did, + this.client.repoPath, + opts.label ?? null, + opts.trustLevel ?? null, + ) + return { + did: result.did, + label: result.label ?? null, + trustLevel: result.trustLevel, + firstSeen: result.firstSeen, + kelSequence: result.kelSequence ?? null, + pinnedAt: result.pinnedAt, + } + } catch (err) { + throw mapNativeError(err, StorageError) + } + } + + remove(did: string): void { + try { + native.removePinnedIdentity(did, this.client.repoPath) + } catch (err) { + throw mapNativeError(err, StorageError) + } + } + + list(): PinnedIdentity[] { + try { + const json = native.listPinnedIdentities(this.client.repoPath) + return JSON.parse(json) + } catch (err) { + throw mapNativeError(err, StorageError) + } + } + + get(did: string): PinnedIdentity | null { + try { + const result = native.getPinnedIdentity(did, this.client.repoPath) + if (!result) return null + return { + did: result.did, + label: result.label ?? null, + trustLevel: result.trustLevel, + firstSeen: result.firstSeen, + kelSequence: result.kelSequence ?? null, + pinnedAt: result.pinnedAt, + } + } catch (err) { + throw mapNativeError(err, StorageError) + } + } +} diff --git a/packages/auths-node/lib/verify.ts b/packages/auths-node/lib/verify.ts new file mode 100644 index 00000000..4dfb23e8 --- /dev/null +++ b/packages/auths-node/lib/verify.ts @@ -0,0 +1,115 @@ +import native from './native' +import type { NapiVerificationResult, NapiVerificationReport } from './native' +import { mapNativeError, VerificationError } from './errors' + +export interface VerificationResult { + valid: boolean + error?: string | null + errorCode?: string | null +} + +export interface VerificationStatus { + statusType: string + at?: string | null + step?: number | null + missingLink?: string | null + required?: number | null + verified?: number | null +} + +export interface ChainLink { + issuer: string + subject: string + valid: boolean + error?: string | null +} + +export interface VerificationReport { + status: VerificationStatus + chain: ChainLink[] + warnings: string[] +} + +export interface WitnessKey { + did: string + publicKeyHex: string +} + +export interface WitnessConfig { + receipts: string[] + keys: WitnessKey[] + threshold: number +} + +export async function verifyAttestation(attestationJson: string, issuerPkHex: string): Promise { + try { + return await native.verifyAttestation(attestationJson, issuerPkHex) + } catch (err) { + throw mapNativeError(err, VerificationError) + } +} + +export async function verifyAttestationWithCapability(attestationJson: string, issuerPkHex: string, requiredCapability: string): Promise { + try { + return await native.verifyAttestationWithCapability(attestationJson, issuerPkHex, requiredCapability) + } catch (err) { + throw mapNativeError(err, VerificationError) + } +} + +export async function verifyChain(attestationsJson: string[], rootPkHex: string): Promise { + try { + return await native.verifyChain(attestationsJson, rootPkHex) + } catch (err) { + throw mapNativeError(err, VerificationError) + } +} + +export async function verifyChainWithCapability(attestationsJson: string[], rootPkHex: string, requiredCapability: string): Promise { + try { + return await native.verifyChainWithCapability(attestationsJson, rootPkHex, requiredCapability) + } catch (err) { + throw mapNativeError(err, VerificationError) + } +} + +export async function verifyDeviceAuthorization(identityDid: string, deviceDid: string, attestationsJson: string[], identityPkHex: string): Promise { + try { + return await native.verifyDeviceAuthorization(identityDid, deviceDid, attestationsJson, identityPkHex) + } catch (err) { + throw mapNativeError(err, VerificationError) + } +} + +export async function verifyAtTime(attestationJson: string, issuerPkHex: string, atRfc3339: string): Promise { + try { + return await native.verifyAtTime(attestationJson, issuerPkHex, atRfc3339) + } catch (err) { + throw mapNativeError(err, VerificationError) + } +} + +export async function verifyAtTimeWithCapability(attestationJson: string, issuerPkHex: string, atRfc3339: string, requiredCapability: string): Promise { + try { + return await native.verifyAtTimeWithCapability(attestationJson, issuerPkHex, atRfc3339, requiredCapability) + } catch (err) { + throw mapNativeError(err, VerificationError) + } +} + +export async function verifyChainWithWitnesses(attestationsJson: string[], rootPkHex: string, witnesses: WitnessConfig): Promise { + const keysJson = witnesses.keys.map(k => + JSON.stringify({ did: k.did, public_key_hex: k.publicKeyHex }), + ) + try { + return await native.verifyChainWithWitnesses( + attestationsJson, + rootPkHex, + witnesses.receipts, + keysJson, + witnesses.threshold, + ) + } catch (err) { + throw mapNativeError(err, VerificationError) + } +} diff --git a/packages/auths-node/lib/witness.ts b/packages/auths-node/lib/witness.ts new file mode 100644 index 00000000..da67e9ff --- /dev/null +++ b/packages/auths-node/lib/witness.ts @@ -0,0 +1,43 @@ +import native from './native' +import { mapNativeError, StorageError } from './errors' +import type { Auths } from './client' + +export interface WitnessEntry { + url: string + did: string | null + label: string | null +} + +export class WitnessService { + constructor(private client: Auths) {} + + add(opts: { url: string; label?: string }): WitnessEntry { + try { + const result = native.addWitness(opts.url, this.client.repoPath, opts.label ?? null) + return { + url: result.url, + did: result.did ?? null, + label: result.label ?? null, + } + } catch (err) { + throw mapNativeError(err, StorageError) + } + } + + remove(url: string): void { + try { + native.removeWitness(url, this.client.repoPath) + } catch (err) { + throw mapNativeError(err, StorageError) + } + } + + list(): WitnessEntry[] { + try { + const json = native.listWitnesses(this.client.repoPath) + return JSON.parse(json) + } catch (err) { + throw mapNativeError(err, StorageError) + } + } +} diff --git a/packages/auths-node/package.json b/packages/auths-node/package.json new file mode 100644 index 00000000..c21c022b --- /dev/null +++ b/packages/auths-node/package.json @@ -0,0 +1,44 @@ +{ + "name": "@auths-dev/node", + "version": "0.1.0", + "description": "Node.js bindings for the Auths decentralized identity SDK", + "license": "Apache-2.0", + "main": "index.js", + "types": "index.d.ts", + "exports": { + ".": { + "types": "./index.d.ts", + "default": "./index.js" + }, + "./package.json": "./package.json" + }, + "engines": { + "node": ">=20.0.0" + }, + "napi": { + "binaryName": "auths", + "targets": [ + "x86_64-apple-darwin", + "aarch64-apple-darwin", + "x86_64-unknown-linux-gnu", + "x86_64-unknown-linux-musl", + "aarch64-unknown-linux-gnu", + "aarch64-unknown-linux-musl", + "x86_64-pc-windows-msvc", + "aarch64-pc-windows-msvc" + ] + }, + "scripts": { + "artifacts": "napi artifacts", + "build": "napi build --platform --release", + "build:debug": "napi build --platform", + "prepublishOnly": "napi prepublish -t npm", + "test": "vitest run", + "universal": "napi universal -t darwin" + }, + "devDependencies": { + "@napi-rs/cli": "^3.0.0", + "typescript": "^5.7.0", + "vitest": "^3.0.0" + } +} diff --git a/packages/auths-node/src/artifact.rs b/packages/auths-node/src/artifact.rs new file mode 100644 index 00000000..45de3db1 --- /dev/null +++ b/packages/auths-node/src/artifact.rs @@ -0,0 +1,217 @@ +use std::io::Read; +use std::path::PathBuf; +use std::sync::Arc; + +use auths_core::signing::PrefilledPassphraseProvider; +use auths_core::storage::keychain::{KeyAlias, get_platform_keychain_with_config}; +use auths_sdk::context::AuthsContext; +use auths_sdk::ports::artifact::{ArtifactDigest, ArtifactError, ArtifactMetadata, ArtifactSource}; +use auths_sdk::signing::{ + ArtifactSigningParams, SigningKeyMaterial, sign_artifact as sdk_sign_artifact, +}; +use auths_storage::git::{ + GitRegistryBackend, RegistryAttestationStorage, RegistryConfig, RegistryIdentityStorage, +}; +use auths_verifier::clock::SystemClock; +use napi_derive::napi; +use sha2::{Digest, Sha256}; + +use crate::error::format_error; +use crate::helpers::{make_env_config, resolve_passphrase}; + +struct FileArtifact { + path: PathBuf, +} + +impl ArtifactSource for FileArtifact { + fn digest(&self) -> Result { + let mut file = std::fs::File::open(&self.path) + .map_err(|e| ArtifactError::Io(format!("{}: {e}", self.path.display())))?; + let mut hasher = Sha256::new(); + let mut buf = [0u8; 8192]; + loop { + let n = file + .read(&mut buf) + .map_err(|e| ArtifactError::Io(e.to_string()))?; + if n == 0 { + break; + } + hasher.update(&buf[..n]); + } + Ok(ArtifactDigest { + algorithm: "sha256".to_string(), + hex: hex::encode(hasher.finalize()), + }) + } + + fn metadata(&self) -> Result { + let digest = self.digest()?; + let meta = std::fs::metadata(&self.path) + .map_err(|e| ArtifactError::Metadata(format!("{}: {e}", self.path.display())))?; + Ok(ArtifactMetadata { + artifact_type: "file".to_string(), + digest, + name: self + .path + .file_name() + .map(|n| n.to_string_lossy().to_string()), + size: Some(meta.len()), + }) + } +} + +struct BytesArtifact { + data: Vec, +} + +impl ArtifactSource for BytesArtifact { + fn digest(&self) -> Result { + let mut hasher = Sha256::new(); + hasher.update(&self.data); + Ok(ArtifactDigest { + algorithm: "sha256".to_string(), + hex: hex::encode(hasher.finalize()), + }) + } + + fn metadata(&self) -> Result { + let digest = self.digest()?; + Ok(ArtifactMetadata { + artifact_type: "bytes".to_string(), + digest, + name: None, + size: Some(self.data.len() as u64), + }) + } +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiArtifactResult { + pub attestation_json: String, + pub rid: String, + pub digest: String, + pub file_size: i64, +} + +fn build_context_and_sign( + artifact: Arc, + identity_key_alias: &str, + repo_path: &str, + passphrase: Option, + expires_in_days: Option, + note: Option, +) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let env_config = make_env_config(&passphrase_str, repo_path); + let provider = Arc::new(PrefilledPassphraseProvider::new(&passphrase_str)); + let clock = Arc::new(SystemClock); + + let repo = PathBuf::from(shellexpand::tilde(repo_path).as_ref()); + let config = RegistryConfig::single_tenant(&repo); + let backend = Arc::new(GitRegistryBackend::open_existing(config).map_err(|e| { + format_error( + "AUTHS_REGISTRY_ERROR", + format!("Failed to open registry: {e}"), + ) + })?); + + let keychain = get_platform_keychain_with_config(&env_config) + .map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", format!("Keychain error: {e}")))?; + let keychain = Arc::from(keychain); + + let identity_storage = Arc::new(RegistryIdentityStorage::new(&repo)); + let attestation_storage = Arc::new(RegistryAttestationStorage::new(&repo)); + + let alias = KeyAlias::new(identity_key_alias) + .map_err(|e| format_error("AUTHS_KEY_NOT_FOUND", format!("Invalid key alias: {e}")))?; + + let ctx = AuthsContext::builder() + .registry(backend) + .key_storage(keychain) + .clock(clock) + .identity_storage(identity_storage) + .attestation_sink(attestation_storage.clone()) + .attestation_source(attestation_storage) + .passphrase_provider(provider) + .build(); + + let file_size = artifact + .metadata() + .map(|m| m.size.unwrap_or(0)) + .unwrap_or(0) as i64; + + let params = ArtifactSigningParams { + artifact, + identity_key: Some(SigningKeyMaterial::Alias(alias.clone())), + device_key: SigningKeyMaterial::Alias(alias), + expires_in_days, + note, + }; + + let result = sdk_sign_artifact(params, &ctx).map_err(|e| { + format_error( + "AUTHS_SIGNING_FAILED", + format!("Artifact signing failed: {e}"), + ) + })?; + + Ok(NapiArtifactResult { + attestation_json: result.attestation_json, + rid: result.rid, + digest: result.digest, + file_size, + }) +} + +#[napi] +pub fn sign_artifact( + file_path: String, + identity_key_alias: String, + repo_path: String, + passphrase: Option, + expires_in_days: Option, + note: Option, +) -> napi::Result { + let path = PathBuf::from(shellexpand::tilde(&file_path).as_ref()); + if !path.exists() { + return Err(format_error( + "AUTHS_INVALID_INPUT", + format!( + "Artifact not found: '{file_path}'. Check the path and ensure the file exists." + ), + )); + } + + let artifact = Arc::new(FileArtifact { path }); + build_context_and_sign( + artifact, + &identity_key_alias, + &repo_path, + passphrase, + expires_in_days, + note, + ) +} + +#[napi] +pub fn sign_artifact_bytes( + data: napi::bindgen_prelude::Buffer, + identity_key_alias: String, + repo_path: String, + passphrase: Option, + expires_in_days: Option, + note: Option, +) -> napi::Result { + let artifact = Arc::new(BytesArtifact { + data: data.to_vec(), + }); + build_context_and_sign( + artifact, + &identity_key_alias, + &repo_path, + passphrase, + expires_in_days, + note, + ) +} diff --git a/packages/auths-node/src/attestation_query.rs b/packages/auths-node/src/attestation_query.rs new file mode 100644 index 00000000..30f6df3d --- /dev/null +++ b/packages/auths-node/src/attestation_query.rs @@ -0,0 +1,104 @@ +use std::path::PathBuf; +use std::sync::Arc; + +use auths_id::attestation::group::AttestationGroup; +use auths_id::storage::attestation::AttestationSource; +use auths_storage::git::{GitRegistryBackend, RegistryAttestationStorage, RegistryConfig}; +use auths_verifier::core::Attestation; +use auths_verifier::types::DeviceDID; +use napi_derive::napi; + +use crate::error::format_error; + +#[napi(object)] +#[derive(Clone)] +pub struct NapiAttestation { + pub rid: String, + pub issuer: String, + pub subject: String, + pub device_did: String, + pub capabilities: Vec, + pub signer_type: Option, + pub expires_at: Option, + pub revoked_at: Option, + pub created_at: Option, + pub delegated_by: Option, + pub json: String, +} + +fn attestation_to_napi(att: &Attestation) -> NapiAttestation { + let json = serde_json::to_string(att).unwrap_or_default(); + NapiAttestation { + rid: att.rid.to_string(), + issuer: att.issuer.to_string(), + subject: att.subject.to_string(), + device_did: att.subject.to_string(), + capabilities: att.capabilities.iter().map(|c| c.to_string()).collect(), + signer_type: att.signer_type.as_ref().map(|s| format!("{s:?}")), + expires_at: att.expires_at.map(|t| t.to_rfc3339()), + revoked_at: att.revoked_at.map(|t| t.to_rfc3339()), + created_at: att.timestamp.map(|t| t.to_rfc3339()), + delegated_by: att.delegated_by.as_ref().map(|d| d.to_string()), + json, + } +} + +fn open_attestation_storage(repo_path: &str) -> napi::Result> { + let repo = PathBuf::from(shellexpand::tilde(repo_path).as_ref()); + let config = RegistryConfig::single_tenant(&repo); + let _backend = GitRegistryBackend::open_existing(config).map_err(|e| { + format_error( + "AUTHS_REGISTRY_ERROR", + format!("Failed to open registry: {e}"), + ) + })?; + Ok(Arc::new(RegistryAttestationStorage::new(&repo))) +} + +#[napi] +pub fn list_attestations(repo_path: String) -> napi::Result> { + let storage = open_attestation_storage(&repo_path)?; + let all = storage.load_all_attestations().map_err(|e| { + format_error( + "AUTHS_REGISTRY_ERROR", + format!("Failed to load attestations: {e}"), + ) + })?; + Ok(all.iter().map(attestation_to_napi).collect()) +} + +#[napi] +pub fn list_attestations_by_device( + repo_path: String, + device_did: String, +) -> napi::Result> { + let storage = open_attestation_storage(&repo_path)?; + let all = storage.load_all_attestations().map_err(|e| { + format_error( + "AUTHS_REGISTRY_ERROR", + format!("Failed to load attestations: {e}"), + ) + })?; + let group = AttestationGroup::from_list(all); + Ok(group + .get(&device_did) + .map(|atts| atts.iter().map(attestation_to_napi).collect()) + .unwrap_or_default()) +} + +#[napi] +pub fn get_latest_attestation( + repo_path: String, + device_did: String, +) -> napi::Result> { + let storage = open_attestation_storage(&repo_path)?; + let all = storage.load_all_attestations().map_err(|e| { + format_error( + "AUTHS_REGISTRY_ERROR", + format!("Failed to load attestations: {e}"), + ) + })?; + let group = AttestationGroup::from_list(all); + let did = DeviceDID(device_did); + Ok(group.latest(&did).map(attestation_to_napi)) +} diff --git a/packages/auths-node/src/audit.rs b/packages/auths-node/src/audit.rs new file mode 100644 index 00000000..ddba573a --- /dev/null +++ b/packages/auths-node/src/audit.rs @@ -0,0 +1,128 @@ +use std::path::PathBuf; + +use auths_infra_git::audit::Git2LogProvider; +use auths_sdk::ports::git::SignatureStatus; +use auths_sdk::workflows::audit::AuditWorkflow; +use napi_derive::napi; + +use crate::error::format_error; + +fn resolve_repo(repo_path: &str) -> PathBuf { + PathBuf::from(shellexpand::tilde(repo_path).as_ref()) +} + +fn parse_timestamp(ts: &str) -> Option { + chrono::NaiveDateTime::parse_from_str(&ts[..19], "%Y-%m-%dT%H:%M:%S").ok() +} + +#[napi] +pub fn generate_audit_report( + target_repo_path: String, + auths_repo_path: String, + since: Option, + until: Option, + author: Option, + limit: Option, +) -> napi::Result { + let target = resolve_repo(&target_repo_path); + let _auths = resolve_repo(&auths_repo_path); + let limit = limit.unwrap_or(500) as usize; + + let provider = + Git2LogProvider::open(&target).map_err(|e| format_error("AUTHS_AUDIT_ERROR", e))?; + + let workflow = AuditWorkflow::new(&provider); + let report = workflow + .generate_report(None, Some(limit)) + .map_err(|e| format_error("AUTHS_AUDIT_ERROR", e))?; + + let since_filter = since.and_then(|s| { + chrono::NaiveDate::parse_from_str(&s, "%Y-%m-%d") + .ok() + .and_then(|d| d.and_hms_opt(0, 0, 0)) + }); + let until_filter = until.and_then(|u| { + chrono::NaiveDate::parse_from_str(&u, "%Y-%m-%d") + .ok() + .and_then(|d| d.and_hms_opt(23, 59, 59)) + }); + + let commits: Vec = report + .commits + .iter() + .filter(|c| { + if author.as_ref().is_some_and(|a| c.author_email != *a) { + return false; + } + if since_filter.is_some_and(|since_dt| { + parse_timestamp(&c.timestamp).is_some_and(|ct| ct < since_dt) + }) { + return false; + } + if until_filter.is_some_and(|until_dt| { + parse_timestamp(&c.timestamp).is_some_and(|ct| ct > until_dt) + }) { + return false; + } + true + }) + .map(|c| { + let (sig_type, signer_did, verified) = match &c.signature_status { + SignatureStatus::AuthsSigned { signer_did } => { + (Some("auths"), Some(signer_did.as_str()), Some(true)) + } + SignatureStatus::SshSigned => (Some("ssh"), None, None), + SignatureStatus::GpgSigned { verified } => (Some("gpg"), None, Some(*verified)), + SignatureStatus::InvalidSignature { .. } => (Some("invalid"), None, Some(false)), + SignatureStatus::Unsigned => (None, None, None), + }; + serde_json::json!({ + "oid": c.hash, + "author_name": c.author_name, + "author_email": c.author_email, + "date": c.timestamp, + "message": c.message, + "signature_type": sig_type, + "signer_did": signer_did, + "verified": verified, + }) + }) + .collect(); + + let total = commits.len(); + let signed = commits + .iter() + .filter(|c| c["signature_type"] != serde_json::Value::Null) + .count(); + let unsigned = total - signed; + let auths_signed = commits + .iter() + .filter(|c| c["signature_type"] == "auths") + .count(); + let gpg_signed = commits + .iter() + .filter(|c| c["signature_type"] == "gpg") + .count(); + let ssh_signed = commits + .iter() + .filter(|c| c["signature_type"] == "ssh") + .count(); + let verification_passed = commits.iter().filter(|c| c["verified"] == true).count(); + let verification_failed = signed - verification_passed; + + let result = serde_json::json!({ + "commits": commits, + "summary": { + "total_commits": total, + "signed_commits": signed, + "unsigned_commits": unsigned, + "auths_signed": auths_signed, + "gpg_signed": gpg_signed, + "ssh_signed": ssh_signed, + "verification_passed": verification_passed, + "verification_failed": verification_failed, + }, + }); + + serde_json::to_string(&result).map_err(|e| format_error("AUTHS_AUDIT_ERROR", e)) +} diff --git a/packages/auths-node/src/commit_sign.rs b/packages/auths-node/src/commit_sign.rs new file mode 100644 index 00000000..0f09e9db --- /dev/null +++ b/packages/auths-node/src/commit_sign.rs @@ -0,0 +1,63 @@ +use std::path::PathBuf; +use std::sync::Arc; + +use auths_core::signing::PrefilledPassphraseProvider; +use auths_core::storage::keychain::get_platform_keychain_with_config; +use auths_sdk::workflows::signing::{ + CommitSigningContext, CommitSigningParams, CommitSigningWorkflow, +}; +use napi_derive::napi; + +use crate::error::format_error; +use crate::helpers::{make_env_config, resolve_passphrase}; + +#[napi(object)] +#[derive(Clone)] +pub struct NapiCommitSignPemResult { + pub signature_pem: String, + pub method: String, + pub namespace: String, +} + +#[napi] +pub fn sign_commit( + data: napi::bindgen_prelude::Buffer, + identity_key_alias: String, + repo_path: String, + passphrase: Option, +) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let env_config = make_env_config(&passphrase_str, &repo_path); + let provider = Arc::new(PrefilledPassphraseProvider::new(&passphrase_str)); + + let keychain = get_platform_keychain_with_config(&env_config) + .map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", format!("Keychain error: {e}")))?; + let keychain = Arc::from(keychain); + + let repo = PathBuf::from(shellexpand::tilde(&repo_path).as_ref()); + + let params = + CommitSigningParams::new(&identity_key_alias, "git", data.to_vec()).with_repo_path(repo); + + let signing_ctx = CommitSigningContext { + key_storage: keychain, + passphrase_provider: provider, + agent_signing: Arc::new(auths_sdk::ports::agent::NoopAgentProvider), + }; + + #[allow(clippy::disallowed_methods)] // Presentation boundary + let now = chrono::Utc::now(); + + let pem = CommitSigningWorkflow::execute(&signing_ctx, params, now).map_err(|e| { + format_error( + "AUTHS_SIGNING_FAILED", + format!("Commit signing failed: {e}"), + ) + })?; + + Ok(NapiCommitSignPemResult { + signature_pem: pem, + method: "direct".to_string(), + namespace: "git".to_string(), + }) +} diff --git a/packages/auths-node/src/device.rs b/packages/auths-node/src/device.rs new file mode 100644 index 00000000..46ee3820 --- /dev/null +++ b/packages/auths-node/src/device.rs @@ -0,0 +1,210 @@ +use std::path::PathBuf; +use std::sync::Arc; + +use auths_core::signing::PrefilledPassphraseProvider; +use auths_core::storage::keychain::get_platform_keychain_with_config; +use auths_sdk::context::AuthsContext; +use auths_sdk::device::extend_device; +use auths_sdk::device::{link_device, revoke_device}; +use auths_sdk::types::{DeviceExtensionConfig, DeviceLinkConfig}; +use auths_storage::git::{ + GitRegistryBackend, RegistryAttestationStorage, RegistryConfig, RegistryIdentityStorage, +}; +use auths_verifier::clock::SystemClock; +use auths_verifier::core::Capability; +use napi_derive::napi; + +use crate::error::format_error; +use crate::helpers::{make_env_config, resolve_key_alias, resolve_passphrase}; +use crate::types::{NapiExtensionResult, NapiLinkResult}; + +fn open_backend(repo: &PathBuf) -> napi::Result> { + let config = RegistryConfig::single_tenant(repo); + let backend = GitRegistryBackend::open_existing(config).map_err(|e| { + format_error( + "AUTHS_REGISTRY_ERROR", + format!("Failed to open registry: {e}"), + ) + })?; + Ok(Arc::new(backend)) +} + +#[napi] +pub fn link_device_to_identity( + identity_key_alias: String, + capabilities: Vec, + repo_path: String, + passphrase: Option, + expires_in_days: Option, +) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let env_config = make_env_config(&passphrase_str, &repo_path); + let provider = Arc::new(PrefilledPassphraseProvider::new(&passphrase_str)); + let clock = Arc::new(SystemClock); + + let repo = PathBuf::from(shellexpand::tilde(&repo_path).as_ref()); + let backend = open_backend(&repo)?; + + let keychain = get_platform_keychain_with_config(&env_config) + .map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", format!("Keychain error: {e}")))?; + + let alias = resolve_key_alias(&identity_key_alias, keychain.as_ref())?; + + let parsed_caps: Vec = capabilities + .iter() + .map(|c| { + Capability::parse(c).map_err(|e| { + format_error( + "AUTHS_INVALID_INPUT", + format!("Invalid capability '{c}': {e}"), + ) + }) + }) + .collect::>>()?; + + let link_config = DeviceLinkConfig { + identity_key_alias: alias, + device_key_alias: None, + device_did: None, + capabilities: parsed_caps, + expires_in_days, + note: None, + payload: None, + }; + + let keychain: Arc = + Arc::from(keychain); + let identity_storage = Arc::new(RegistryIdentityStorage::new(&repo)); + let attestation_storage = Arc::new(RegistryAttestationStorage::new(&repo)); + + let ctx = AuthsContext::builder() + .registry(backend) + .key_storage(keychain) + .clock(clock.clone()) + .identity_storage(identity_storage) + .attestation_sink(attestation_storage.clone()) + .attestation_source(attestation_storage) + .passphrase_provider(provider) + .build(); + + let result = link_device(link_config, &ctx, clock.as_ref()) + .map_err(|e| format_error("AUTHS_DEVICE_ERROR", format!("Device linking failed: {e}")))?; + + Ok(NapiLinkResult { + device_did: result.device_did.to_string(), + attestation_id: result.attestation_id.to_string(), + }) +} + +#[napi] +pub fn revoke_device_from_identity( + device_did: String, + identity_key_alias: String, + repo_path: String, + passphrase: Option, + note: Option, +) -> napi::Result<()> { + let passphrase_str = resolve_passphrase(passphrase); + let env_config = make_env_config(&passphrase_str, &repo_path); + let provider = Arc::new(PrefilledPassphraseProvider::new(&passphrase_str)); + let clock = Arc::new(SystemClock); + + let repo = PathBuf::from(shellexpand::tilde(&repo_path).as_ref()); + let backend = open_backend(&repo)?; + + let keychain = get_platform_keychain_with_config(&env_config) + .map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", format!("Keychain error: {e}")))?; + + let alias = resolve_key_alias(&identity_key_alias, keychain.as_ref())?; + + let keychain: Arc = + Arc::from(keychain); + let identity_storage = Arc::new(RegistryIdentityStorage::new(&repo)); + let attestation_storage = Arc::new(RegistryAttestationStorage::new(&repo)); + + let ctx = AuthsContext::builder() + .registry(backend) + .key_storage(keychain) + .clock(clock.clone()) + .identity_storage(identity_storage) + .attestation_sink(attestation_storage.clone()) + .attestation_source(attestation_storage) + .passphrase_provider(provider) + .build(); + + revoke_device(&device_did, &alias, &ctx, note, clock.as_ref()).map_err(|e| { + format_error( + "AUTHS_DEVICE_ERROR", + format!("Device revocation failed: {e}"), + ) + })?; + + Ok(()) +} + +#[napi] +pub fn extend_device_authorization( + device_did: String, + identity_key_alias: String, + days: u32, + repo_path: String, + passphrase: Option, +) -> napi::Result { + if days == 0 { + return Err(format_error( + "AUTHS_INVALID_INPUT", + "days must be positive (> 0)", + )); + } + + let passphrase_str = resolve_passphrase(passphrase); + let env_config = make_env_config(&passphrase_str, &repo_path); + let provider = Arc::new(PrefilledPassphraseProvider::new(&passphrase_str)); + let clock = Arc::new(SystemClock); + + let repo = PathBuf::from(shellexpand::tilde(&repo_path).as_ref()); + let backend = open_backend(&repo)?; + + let keychain = get_platform_keychain_with_config(&env_config) + .map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", format!("Keychain error: {e}")))?; + let keychain: Arc = + Arc::from(keychain); + + let identity_storage = Arc::new(RegistryIdentityStorage::new(&repo)); + let attestation_storage = Arc::new(RegistryAttestationStorage::new(&repo)); + + let alias = resolve_key_alias(&identity_key_alias, keychain.as_ref())?; + + let ext_config = DeviceExtensionConfig { + repo_path: repo, + device_did: device_did.clone(), + days, + identity_key_alias: alias, + device_key_alias: None, + }; + + let ctx = AuthsContext::builder() + .registry(backend) + .key_storage(keychain) + .clock(clock.clone()) + .identity_storage(identity_storage) + .attestation_sink(attestation_storage.clone()) + .attestation_source(attestation_storage) + .passphrase_provider(provider) + .build(); + + let result = extend_device(ext_config, &ctx, clock.as_ref()).map_err(|e| { + format_error( + "AUTHS_DEVICE_ERROR", + format!("Device extension failed: {e}"), + ) + })?; + + Ok(NapiExtensionResult { + device_did: result.device_did.to_string(), + new_expires_at: result.new_expires_at.to_rfc3339(), + previous_expires_at: result + .previous_expires_at + .map(|t: chrono::DateTime| t.to_rfc3339()), + }) +} diff --git a/packages/auths-node/src/diagnostics.rs b/packages/auths-node/src/diagnostics.rs new file mode 100644 index 00000000..44c804bd --- /dev/null +++ b/packages/auths-node/src/diagnostics.rs @@ -0,0 +1,104 @@ +use std::process::Command; + +use auths_sdk::ports::diagnostics::{ + CheckResult, CryptoDiagnosticProvider, DiagnosticError, GitDiagnosticProvider, +}; +use auths_sdk::workflows::diagnostics::DiagnosticsWorkflow; +use napi_derive::napi; + +use crate::error::format_error; + +struct FfiDiagnosticAdapter; + +impl GitDiagnosticProvider for FfiDiagnosticAdapter { + fn check_git_version(&self) -> Result { + let output = Command::new("git").arg("--version").output(); + let (passed, message) = match output { + Ok(out) if out.status.success() => { + let version = String::from_utf8_lossy(&out.stdout).trim().to_string(); + (true, Some(version)) + } + _ => (false, Some("git command not found on PATH".to_string())), + }; + Ok(CheckResult { + name: "Git installed".to_string(), + passed, + message, + config_issues: vec![], + }) + } + + fn get_git_config(&self, key: &str) -> Result, DiagnosticError> { + let output = Command::new("git") + .args(["config", "--global", "--get", key]) + .output() + .map_err(|e| DiagnosticError::ExecutionFailed(e.to_string()))?; + + if output.status.success() { + Ok(String::from_utf8(output.stdout) + .ok() + .map(|s| s.trim().to_string())) + } else { + Ok(None) + } + } +} + +impl CryptoDiagnosticProvider for FfiDiagnosticAdapter { + fn check_ssh_keygen_available(&self) -> Result { + let output = Command::new("ssh-keygen").arg("-V").output(); + let (passed, message) = match output { + Ok(out) if out.status.success() => (true, Some("ssh-keygen found on PATH".to_string())), + _ => ( + false, + Some("ssh-keygen command not found on PATH".to_string()), + ), + }; + Ok(CheckResult { + name: "ssh-keygen installed".to_string(), + passed, + message, + config_issues: vec![], + }) + } +} + +#[napi] +pub fn run_diagnostics(repo_path: String, passphrase: Option) -> napi::Result { + let _repo = repo_path; + let _passphrase = passphrase; + + let adapter = FfiDiagnosticAdapter; + let workflow = DiagnosticsWorkflow::new(&adapter, &adapter); + let report = workflow + .run() + .map_err(|e| format_error("AUTHS_DIAGNOSTIC_ERROR", e))?; + + let all_passed = report.checks.iter().all(|c| c.passed); + + let checks: Vec = report + .checks + .iter() + .map(|c| { + let fix_hint = if !c.passed { + Some("Run: auths init --profile developer") + } else { + None + }; + serde_json::json!({ + "name": c.name, + "passed": c.passed, + "message": c.message, + "fix_hint": fix_hint, + }) + }) + .collect(); + + let result = serde_json::json!({ + "checks": checks, + "all_passed": all_passed, + "version": env!("CARGO_PKG_VERSION"), + }); + + serde_json::to_string(&result).map_err(|e| format_error("AUTHS_DIAGNOSTIC_ERROR", e)) +} diff --git a/packages/auths-node/src/error.rs b/packages/auths-node/src/error.rs new file mode 100644 index 00000000..cfb0674e --- /dev/null +++ b/packages/auths-node/src/error.rs @@ -0,0 +1,5 @@ +use napi::Status; + +pub fn format_error(code: &str, message: impl std::fmt::Display) -> napi::Error { + napi::Error::new(Status::GenericFailure, format!("[{code}] {message}")) +} diff --git a/packages/auths-node/src/helpers.rs b/packages/auths-node/src/helpers.rs new file mode 100644 index 00000000..daae67d0 --- /dev/null +++ b/packages/auths-node/src/helpers.rs @@ -0,0 +1,59 @@ +use std::path::PathBuf; + +use auths_core::config::{EnvironmentConfig, KeychainConfig}; +use auths_core::storage::keychain::{ + IdentityDID, KeyAlias, KeyRole, KeyStorage, get_platform_keychain_with_config, +}; + +use crate::error::format_error; + +#[allow(clippy::disallowed_methods)] // Presentation boundary: env var read is intentional +pub fn resolve_passphrase(passphrase: Option) -> String { + passphrase.unwrap_or_else(|| std::env::var("AUTHS_PASSPHRASE").unwrap_or_default()) +} + +#[allow(clippy::disallowed_methods)] // Presentation boundary: env var read is intentional +pub fn resolve_repo_path(path: Option) -> PathBuf { + let raw = path + .unwrap_or_else(|| std::env::var("AUTHS_HOME").unwrap_or_else(|_| "~/.auths".to_string())); + let expanded = shellexpand::tilde(&raw); + PathBuf::from(expanded.as_ref()) +} + +pub fn make_env_config(passphrase: &str, repo_path: &str) -> EnvironmentConfig { + let mut keychain = KeychainConfig::from_env(); + if keychain.backend.is_none() { + keychain.backend = Some("file".to_string()); + } + keychain.passphrase = Some(passphrase.to_string()); + EnvironmentConfig { + auths_home: Some(repo_path.into()), + keychain, + ssh_agent_socket: None, + } +} + +pub fn get_keychain(config: &EnvironmentConfig) -> napi::Result> { + get_platform_keychain_with_config(config).map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", e)) +} + +pub fn resolve_key_alias( + identity_ref: &str, + keychain: &(dyn KeyStorage + Send + Sync), +) -> napi::Result { + if identity_ref.starts_with("did:") { + let did = IdentityDID::new_unchecked(identity_ref.to_string()); + let aliases = keychain + .list_aliases_for_identity_with_role(&did, KeyRole::Primary) + .map_err(|e| format_error("AUTHS_KEY_NOT_FOUND", format!("Key lookup failed: {e}")))?; + aliases.into_iter().next().ok_or_else(|| { + format_error( + "AUTHS_KEY_NOT_FOUND", + format!("No primary key found for identity '{identity_ref}'"), + ) + }) + } else { + KeyAlias::new(identity_ref) + .map_err(|e| format_error("AUTHS_KEY_NOT_FOUND", format!("Invalid key alias: {e}"))) + } +} diff --git a/packages/auths-node/src/identity.rs b/packages/auths-node/src/identity.rs new file mode 100644 index 00000000..3afc62d7 --- /dev/null +++ b/packages/auths-node/src/identity.rs @@ -0,0 +1,460 @@ +use std::path::PathBuf; +use std::sync::Arc; + +use auths_core::crypto::signer::encrypt_keypair; +use auths_core::signing::PrefilledPassphraseProvider; +use auths_core::storage::keychain::{KeyAlias, KeyRole, get_platform_keychain_with_config}; +use auths_id::identity::helpers::{encode_seed_as_pkcs8, extract_seed_bytes}; +use auths_id::identity::initialize::initialize_registry_identity; +use auths_id::storage::attestation::AttestationSource; +use auths_sdk::context::AuthsContext; +use auths_sdk::device::link_device; +use auths_sdk::types::{DeviceLinkConfig, IdentityRotationConfig}; +use auths_sdk::workflows::rotation::rotate_identity; +use auths_storage::git::{ + GitRegistryBackend, RegistryAttestationStorage, RegistryConfig, RegistryIdentityStorage, +}; +use auths_verifier::clock::SystemClock; +use auths_verifier::core::Capability; +use auths_verifier::types::DeviceDID; +use napi_derive::napi; +use ring::rand::SystemRandom; +use ring::signature::{Ed25519KeyPair, KeyPair}; + +use crate::error::format_error; +use crate::helpers::{make_env_config, resolve_key_alias, resolve_passphrase}; +use crate::types::{ + NapiAgentIdentityBundle, NapiDelegatedAgentBundle, NapiIdentityResult, NapiRotationResult, +}; + +fn init_backend(repo: &PathBuf) -> napi::Result> { + let config = RegistryConfig::single_tenant(repo); + let backend = GitRegistryBackend::from_config_unchecked(config); + backend.init_if_needed().map_err(|e| { + format_error( + "AUTHS_REGISTRY_ERROR", + format!("Failed to initialize registry: {e}"), + ) + })?; + Ok(Arc::new(backend)) +} + +fn open_backend(repo: &PathBuf) -> napi::Result> { + let config = RegistryConfig::single_tenant(repo); + let backend = GitRegistryBackend::open_existing(config).map_err(|e| { + format_error( + "AUTHS_REGISTRY_ERROR", + format!("Failed to open registry: {e}"), + ) + })?; + Ok(Arc::new(backend)) +} + +#[napi] +pub fn create_identity( + key_alias: String, + repo_path: String, + passphrase: Option, +) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let env_config = make_env_config(&passphrase_str, &repo_path); + let alias = KeyAlias::new(&key_alias) + .map_err(|e| format_error("AUTHS_KEY_NOT_FOUND", format!("Invalid key alias: {e}")))?; + let provider = PrefilledPassphraseProvider::new(&passphrase_str); + + let repo = PathBuf::from(shellexpand::tilde(&repo_path).as_ref()); + let backend = init_backend(&repo)?; + + let keychain = get_platform_keychain_with_config(&env_config) + .map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", format!("Keychain error: {e}")))?; + + let (identity_did, result_alias) = + initialize_registry_identity(backend, &alias, &provider, keychain.as_ref(), None).map_err( + |e| { + format_error( + "AUTHS_IDENTITY_ERROR", + format!("Identity creation failed: {e}"), + ) + }, + )?; + + let pub_bytes = auths_core::storage::keychain::extract_public_key_bytes( + keychain.as_ref(), + &result_alias, + &provider, + ) + .map_err(|e| { + format_error( + "AUTHS_CRYPTO_ERROR", + format!("Public key extraction failed: {e}"), + ) + })?; + + Ok(NapiIdentityResult { + did: identity_did.to_string(), + key_alias: result_alias.to_string(), + public_key_hex: hex::encode(pub_bytes), + }) +} + +#[napi] +pub fn create_agent_identity( + agent_name: String, + capabilities: Vec, + repo_path: String, + passphrase: Option, +) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let env_config = make_env_config(&passphrase_str, &repo_path); + let alias = KeyAlias::new_unchecked(format!("{}-agent", agent_name)); + let provider = PrefilledPassphraseProvider::new(&passphrase_str); + let clock = Arc::new(SystemClock); + + let repo = PathBuf::from(shellexpand::tilde(&repo_path).as_ref()); + let backend = init_backend(&repo)?; + + let keychain = get_platform_keychain_with_config(&env_config) + .map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", format!("Keychain error: {e}")))?; + + let parsed_caps: Vec = capabilities + .iter() + .map(|c| { + Capability::parse(c).map_err(|e| { + format_error( + "AUTHS_INVALID_INPUT", + format!("Invalid capability '{c}': {e}"), + ) + }) + }) + .collect::>>()?; + + let (identity_did, result_alias) = + initialize_registry_identity(backend.clone(), &alias, &provider, keychain.as_ref(), None) + .map_err(|e| { + format_error( + "AUTHS_IDENTITY_ERROR", + format!("Agent identity creation failed: {e}"), + ) + })?; + + let pub_bytes = auths_core::storage::keychain::extract_public_key_bytes( + keychain.as_ref(), + &result_alias, + &provider, + ) + .map_err(|e| { + format_error( + "AUTHS_CRYPTO_ERROR", + format!("Public key extraction failed: {e}"), + ) + })?; + + // Use link_device to produce a proper signed self-attestation, + // following the same pattern as delegate_agent. + let link_config = DeviceLinkConfig { + identity_key_alias: result_alias.clone(), + device_key_alias: Some(result_alias.clone()), + device_did: None, + capabilities: parsed_caps, + expires_in_days: None, + note: Some(format!("Agent: {}", agent_name)), + payload: None, + }; + + let provider = Arc::new(provider); + let keychain: Arc = + Arc::from(keychain); + let identity_storage = Arc::new(RegistryIdentityStorage::new(&repo)); + let attestation_storage = Arc::new(RegistryAttestationStorage::new(&repo)); + + let ctx = AuthsContext::builder() + .registry(backend) + .key_storage(keychain) + .clock(clock.clone()) + .identity_storage(identity_storage) + .attestation_sink(attestation_storage.clone()) + .attestation_source(attestation_storage.clone()) + .passphrase_provider(provider) + .build(); + + let result = link_device(link_config, &ctx, clock.as_ref()).map_err(|e| { + format_error( + "AUTHS_IDENTITY_ERROR", + format!("Agent self-attestation failed: {e}"), + ) + })?; + + let device_did = DeviceDID(result.device_did.to_string()); + let attestations = attestation_storage + .load_attestations_for_device(&device_did) + .map_err(|e| { + format_error( + "AUTHS_REGISTRY_ERROR", + format!("Failed to load attestation: {e}"), + ) + })?; + + let attestation = attestations.last().ok_or_else(|| { + format_error( + "AUTHS_REGISTRY_ERROR", + "No attestation found after self-attestation", + ) + })?; + + let attestation_json = serde_json::to_string(attestation).map_err(|e| { + format_error( + "AUTHS_SERIALIZATION_ERROR", + format!("Serialization failed: {e}"), + ) + })?; + + Ok(NapiAgentIdentityBundle { + agent_did: identity_did.to_string(), + key_alias: result_alias.to_string(), + attestation_json, + public_key_hex: hex::encode(pub_bytes), + repo_path: Some(repo.to_string_lossy().to_string()), + }) +} + +#[napi] +pub fn delegate_agent( + agent_name: String, + capabilities: Vec, + parent_repo_path: String, + passphrase: Option, + expires_in_days: Option, + identity_did: Option, +) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let env_config = make_env_config(&passphrase_str, &parent_repo_path); + let provider = Arc::new(PrefilledPassphraseProvider::new(&passphrase_str)); + let clock = Arc::new(SystemClock); + + let repo = PathBuf::from(shellexpand::tilde(&parent_repo_path).as_ref()); + let backend = open_backend(&repo)?; + + let keychain = get_platform_keychain_with_config(&env_config) + .map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", format!("Keychain error: {e}")))?; + + let parent_alias = if let Some(ref did) = identity_did { + resolve_key_alias(did, keychain.as_ref())? + } else { + let aliases = keychain + .list_aliases() + .map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", format!("Keychain error: {e}")))?; + aliases + .into_iter() + .find(|a| !a.as_str().contains("--next-")) + .ok_or_else(|| { + format_error("AUTHS_KEY_NOT_FOUND", "No identity key found in keychain") + })? + }; + + let agent_alias = KeyAlias::new_unchecked(format!("{}-agent", agent_name)); + let rng = SystemRandom::new(); + let pkcs8 = Ed25519KeyPair::generate_pkcs8(&rng) + .map_err(|e| format_error("AUTHS_CRYPTO_ERROR", format!("Key generation failed: {e}")))?; + let keypair = Ed25519KeyPair::from_pkcs8(pkcs8.as_ref()) + .map_err(|e| format_error("AUTHS_CRYPTO_ERROR", format!("Key parsing failed: {e}")))?; + let agent_pubkey = keypair.public_key().as_ref().to_vec(); + + let (parent_did, _, _) = keychain + .load_key(&parent_alias) + .map_err(|e| format_error("AUTHS_KEY_NOT_FOUND", format!("Key load failed: {e}")))?; + + let seed = extract_seed_bytes(pkcs8.as_ref()) + .map_err(|e| format_error("AUTHS_CRYPTO_ERROR", format!("Seed extraction failed: {e}")))?; + let seed_pkcs8 = encode_seed_as_pkcs8(seed) + .map_err(|e| format_error("AUTHS_CRYPTO_ERROR", format!("PKCS8 encoding failed: {e}")))?; + let encrypted = encrypt_keypair(&seed_pkcs8, &passphrase_str) + .map_err(|e| format_error("AUTHS_CRYPTO_ERROR", format!("Key encryption failed: {e}")))?; + keychain + .store_key( + &agent_alias, + &parent_did, + KeyRole::DelegatedAgent, + &encrypted, + ) + .map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", format!("Key storage failed: {e}")))?; + + let parsed_caps: Vec = capabilities + .iter() + .map(|c| { + Capability::parse(c).map_err(|e| { + format_error( + "AUTHS_INVALID_INPUT", + format!("Invalid capability '{c}': {e}"), + ) + }) + }) + .collect::>>()?; + + let link_config = DeviceLinkConfig { + identity_key_alias: parent_alias, + device_key_alias: Some(agent_alias.clone()), + device_did: None, + capabilities: parsed_caps, + expires_in_days, + note: Some(format!("Agent: {}", agent_name)), + payload: None, + }; + + let keychain: Arc = + Arc::from(keychain); + let identity_storage = Arc::new(RegistryIdentityStorage::new(&repo)); + let attestation_storage = Arc::new(RegistryAttestationStorage::new(&repo)); + + let ctx = AuthsContext::builder() + .registry(backend) + .key_storage(keychain) + .clock(clock.clone()) + .identity_storage(identity_storage) + .attestation_sink(attestation_storage.clone()) + .attestation_source(attestation_storage.clone()) + .passphrase_provider(provider) + .build(); + + let result = link_device(link_config, &ctx, clock.as_ref()).map_err(|e| { + format_error( + "AUTHS_IDENTITY_ERROR", + format!("Agent provisioning failed: {e}"), + ) + })?; + + let device_did = DeviceDID(result.device_did.to_string()); + let attestations = attestation_storage + .load_attestations_for_device(&device_did) + .map_err(|e| { + format_error( + "AUTHS_REGISTRY_ERROR", + format!("Failed to load attestation: {e}"), + ) + })?; + + let attestation = attestations.last().ok_or_else(|| { + format_error( + "AUTHS_REGISTRY_ERROR", + "No attestation found after provisioning", + ) + })?; + + let attestation_json = serde_json::to_string(attestation).map_err(|e| { + format_error( + "AUTHS_SERIALIZATION_ERROR", + format!("Serialization failed: {e}"), + ) + })?; + + Ok(NapiDelegatedAgentBundle { + agent_did: result.device_did.to_string(), + key_alias: agent_alias.to_string(), + attestation_json, + public_key_hex: hex::encode(&agent_pubkey), + repo_path: Some(repo.to_string_lossy().to_string()), + }) +} + +#[napi] +pub fn rotate_identity_keys( + repo_path: String, + identity_key_alias: Option, + next_key_alias: Option, + passphrase: Option, +) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let env_config = make_env_config(&passphrase_str, &repo_path); + let provider = Arc::new(PrefilledPassphraseProvider::new(&passphrase_str)); + let clock = Arc::new(SystemClock); + + let repo = PathBuf::from(shellexpand::tilde(&repo_path).as_ref()); + let backend = open_backend(&repo)?; + + let keychain = get_platform_keychain_with_config(&env_config) + .map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", format!("Keychain error: {e}")))?; + let keychain: Arc = + Arc::from(keychain); + + let identity_storage = Arc::new(RegistryIdentityStorage::new(&repo)); + let attestation_storage = Arc::new(RegistryAttestationStorage::new(&repo)); + + let alias = identity_key_alias + .as_deref() + .map(|a| resolve_key_alias(a, keychain.as_ref())) + .transpose()?; + + let ctx = AuthsContext::builder() + .registry(backend) + .key_storage(keychain) + .clock(clock.clone()) + .identity_storage(identity_storage) + .attestation_sink(attestation_storage.clone()) + .attestation_source(attestation_storage) + .passphrase_provider(provider) + .build(); + + let next_alias = next_key_alias + .as_deref() + .map(|a| { + KeyAlias::new(a).map_err(|e| { + format_error( + "AUTHS_KEY_NOT_FOUND", + format!("Invalid next key alias: {e}"), + ) + }) + }) + .transpose()?; + + let rotation_config = IdentityRotationConfig { + repo_path: repo, + identity_key_alias: alias, + next_key_alias: next_alias, + }; + + let result = rotate_identity(rotation_config, &ctx, clock.as_ref()) + .map_err(|e| format_error("AUTHS_ROTATION_ERROR", format!("Key rotation failed: {e}")))?; + + Ok(NapiRotationResult { + controller_did: result.controller_did.to_string(), + new_key_fingerprint: result.new_key_fingerprint, + previous_key_fingerprint: result.previous_key_fingerprint, + sequence: result.sequence as i64, + }) +} + +#[napi] +pub fn get_identity_public_key( + identity_did: String, + repo_path: String, + passphrase: Option, +) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let env_config = make_env_config(&passphrase_str, &repo_path); + let provider = PrefilledPassphraseProvider::new(&passphrase_str); + + let keychain = get_platform_keychain_with_config(&env_config) + .map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", format!("Keychain error: {e}")))?; + + let did = auths_verifier::types::IdentityDID::new(&identity_did); + let aliases = keychain + .list_aliases_for_identity_with_role(&did, KeyRole::Primary) + .map_err(|e| format_error("AUTHS_KEY_NOT_FOUND", format!("Key lookup failed: {e}")))?; + let alias = aliases.first().ok_or_else(|| { + format_error( + "AUTHS_KEY_NOT_FOUND", + format!("No primary key found for identity '{identity_did}'"), + ) + })?; + let pub_bytes = auths_core::storage::keychain::extract_public_key_bytes( + keychain.as_ref(), + alias, + &provider, + ) + .map_err(|e| { + format_error( + "AUTHS_CRYPTO_ERROR", + format!("Public key extraction failed: {e}"), + ) + })?; + Ok(hex::encode(pub_bytes)) +} diff --git a/packages/auths-node/src/lib.rs b/packages/auths-node/src/lib.rs new file mode 100644 index 00000000..44c5006f --- /dev/null +++ b/packages/auths-node/src/lib.rs @@ -0,0 +1,26 @@ +#![deny(clippy::all)] + +pub mod artifact; +pub mod attestation_query; +pub mod audit; +pub mod commit_sign; +pub mod device; +pub mod diagnostics; +pub mod error; +pub mod helpers; +pub mod identity; +pub mod org; +pub mod pairing; +pub mod policy; +pub mod sign; +pub mod trust; +pub mod types; +pub mod verify; +pub mod witness; + +use napi_derive::napi; + +#[napi] +pub fn version() -> String { + env!("CARGO_PKG_VERSION").to_string() +} diff --git a/packages/auths-node/src/org.rs b/packages/auths-node/src/org.rs new file mode 100644 index 00000000..4ac4fa10 --- /dev/null +++ b/packages/auths-node/src/org.rs @@ -0,0 +1,425 @@ +use std::path::PathBuf; +use std::sync::Arc; + +use auths_core::ports::clock::SystemClock; +use auths_core::ports::id::SystemUuidProvider; +use auths_core::signing::{DidResolver, PrefilledPassphraseProvider, StorageSigner}; +use auths_core::storage::keychain::{IdentityDID, KeyAlias}; +use auths_id::attestation::create::create_signed_attestation; +use auths_id::identity::initialize::initialize_registry_identity; +use auths_id::identity::resolve::RegistryDidResolver; +use auths_id::storage::git_refs::AttestationMetadata; +use auths_id::storage::registry::{MemberFilter, RegistryBackend}; +use auths_sdk::workflows::org::{ + AddMemberCommand, OrgContext, RevokeMemberCommand, add_organization_member, + revoke_organization_member, +}; +use auths_storage::git::{GitRegistryBackend, RegistryConfig}; +use auths_verifier::Capability; +use auths_verifier::core::{Ed25519PublicKey, Role}; +use auths_verifier::types::DeviceDID; +use napi_derive::napi; + +use crate::error::format_error; +use crate::helpers::{make_env_config, resolve_passphrase}; + +fn get_keychain( + passphrase: &str, + repo_path: &str, +) -> napi::Result> { + let env_config = make_env_config(passphrase, repo_path); + auths_core::storage::keychain::get_platform_keychain_with_config(&env_config) + .map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", e)) +} + +fn resolve_repo(repo_path: &str) -> PathBuf { + PathBuf::from(shellexpand::tilde(repo_path).as_ref()) +} + +fn find_signer_alias( + org_did: &str, + keychain: &(dyn auths_core::storage::keychain::KeyStorage + Send + Sync), +) -> napi::Result { + let identity_did = IdentityDID::new_unchecked(org_did.to_string()); + let aliases = keychain + .list_aliases_for_identity(&identity_did) + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))?; + aliases + .into_iter() + .find(|a| !a.contains("--next-")) + .ok_or_else(|| { + format_error( + "AUTHS_ORG_ERROR", + format!("No signing key found for org {org_did}"), + ) + }) +} + +fn extract_org_prefix(org_did: &str) -> String { + org_did + .strip_prefix("did:keri:") + .unwrap_or(org_did) + .to_string() +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiOrgResult { + pub org_prefix: String, + pub org_did: String, + pub label: String, + pub repo_path: String, +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiOrgMember { + pub member_did: String, + pub role: String, + pub capabilities_json: String, + pub issuer_did: String, + pub attestation_rid: String, + pub revoked: bool, + pub expires_at: Option, +} + +#[napi] +pub fn create_org( + label: String, + repo_path: String, + passphrase: Option, +) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let repo = resolve_repo(&repo_path); + + let key_alias_str = format!( + "org-{}", + label + .chars() + .filter(|c| c.is_alphanumeric()) + .take(20) + .collect::() + .to_lowercase() + ); + + let config = RegistryConfig::single_tenant(&repo); + let backend = GitRegistryBackend::from_config_unchecked(config); + backend + .init_if_needed() + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))?; + let backend = Arc::new(backend); + + let key_alias = KeyAlias::new_unchecked(key_alias_str); + let keychain = get_keychain(&passphrase_str, &repo_path)?; + let provider = PrefilledPassphraseProvider::new(&passphrase_str); + + let (controller_did, alias) = + initialize_registry_identity(backend.clone(), &key_alias, &provider, &*keychain, None) + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))?; + + let uuid_provider = SystemUuidProvider; + let rid = auths_core::ports::id::UuidProvider::new_id(&uuid_provider).to_string(); + + let resolver = RegistryDidResolver::new(backend.clone()); + let org_resolved = resolver + .resolve(controller_did.as_str()) + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))?; + let org_pk_bytes = *org_resolved.public_key(); + + #[allow(clippy::disallowed_methods)] + let now = chrono::Utc::now(); + let admin_capabilities = vec![ + Capability::sign_commit(), + Capability::sign_release(), + Capability::manage_members(), + Capability::rotate_keys(), + ]; + + let meta = AttestationMetadata { + note: Some(format!("Organization '{}' root admin", label)), + timestamp: Some(now), + expires_at: None, + }; + + let signer = StorageSigner::new(keychain); + let org_did_device = DeviceDID::new(controller_did.to_string()); + + let attestation = create_signed_attestation( + now, + &rid, + &controller_did, + &org_did_device, + org_pk_bytes.as_bytes(), + Some(serde_json::json!({ + "org_role": "admin", + "org_name": label + })), + &meta, + &signer, + &provider, + Some(&alias), + None, + admin_capabilities, + Some(Role::Admin), + None, + ) + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))?; + + let org_prefix = extract_org_prefix(controller_did.as_str()); + + backend + .store_org_member(&org_prefix, &attestation) + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))?; + + Ok(NapiOrgResult { + org_prefix, + org_did: controller_did.to_string(), + label, + repo_path, + }) +} + +#[napi] +#[allow(clippy::too_many_arguments)] +pub fn add_org_member( + org_did: String, + member_did: String, + role: String, + repo_path: String, + capabilities_json: Option, + passphrase: Option, + note: Option, + member_public_key_hex: Option, +) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let repo = resolve_repo(&repo_path); + + let role_parsed: Role = role + .parse() + .map_err(|e| format_error("AUTHS_ORG_ERROR", format!("Invalid role: {e}")))?; + + let capabilities: Vec = if let Some(json) = capabilities_json { + serde_json::from_str(&json).map_err(|e| { + format_error("AUTHS_ORG_ERROR", format!("Invalid capabilities JSON: {e}")) + })? + } else { + role_parsed + .default_capabilities() + .iter() + .map(|c| c.as_str().to_string()) + .collect() + }; + + let keychain = get_keychain(&passphrase_str, &repo_path)?; + let signer_alias = find_signer_alias(&org_did, &*keychain)?; + + let backend = Arc::new(GitRegistryBackend::from_config_unchecked( + RegistryConfig::single_tenant(&repo), + )); + + let resolver = RegistryDidResolver::new(backend.clone()); + let admin_pk_hex = hex::encode( + resolver + .resolve(&org_did) + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))? + .public_key() + .as_bytes(), + ); + + let member_pk = if let Some(pk_hex) = member_public_key_hex { + let pk_bytes = hex::decode(&pk_hex).map_err(|e| { + format_error( + "AUTHS_ORG_ERROR", + format!("Invalid member public key hex: {e}"), + ) + })?; + Ed25519PublicKey::try_from_slice(&pk_bytes) + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))? + } else { + let member_resolved = resolver + .resolve(&member_did) + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))?; + *member_resolved.public_key() + }; + + let org_prefix = extract_org_prefix(&org_did); + + let signer = StorageSigner::new(keychain); + let uuid_provider = SystemUuidProvider; + let provider = PrefilledPassphraseProvider::new(&passphrase_str); + + let org_ctx = OrgContext { + registry: &*backend, + clock: &SystemClock, + uuid_provider: &uuid_provider, + signer: &signer, + passphrase_provider: &provider, + }; + + let attestation = add_organization_member( + &org_ctx, + AddMemberCommand { + org_prefix, + member_did: member_did.clone(), + member_public_key: member_pk, + role: role_parsed, + capabilities: capabilities.clone(), + admin_public_key_hex: admin_pk_hex, + signer_alias, + note, + }, + ) + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))?; + + let caps_json = serde_json::to_string(&capabilities).unwrap_or_default(); + + Ok(NapiOrgMember { + member_did, + role, + capabilities_json: caps_json, + issuer_did: attestation.issuer.to_string(), + attestation_rid: attestation.rid.to_string(), + revoked: false, + expires_at: attestation.expires_at.map(|e| e.to_rfc3339()), + }) +} + +#[napi] +pub fn revoke_org_member( + org_did: String, + member_did: String, + repo_path: String, + passphrase: Option, + note: Option, + member_public_key_hex: Option, +) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let repo = resolve_repo(&repo_path); + + let keychain = get_keychain(&passphrase_str, &repo_path)?; + let signer_alias = find_signer_alias(&org_did, &*keychain)?; + + let backend = Arc::new(GitRegistryBackend::from_config_unchecked( + RegistryConfig::single_tenant(&repo), + )); + + let resolver = RegistryDidResolver::new(backend.clone()); + let admin_pk_hex = hex::encode( + resolver + .resolve(&org_did) + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))? + .public_key() + .as_bytes(), + ); + + let member_pk = if let Some(pk_hex) = member_public_key_hex { + let pk_bytes = hex::decode(&pk_hex).map_err(|e| { + format_error( + "AUTHS_ORG_ERROR", + format!("Invalid member public key hex: {e}"), + ) + })?; + Ed25519PublicKey::try_from_slice(&pk_bytes) + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))? + } else { + let member_resolved = resolver + .resolve(&member_did) + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))?; + *member_resolved.public_key() + }; + + let org_prefix = extract_org_prefix(&org_did); + + let signer = StorageSigner::new(keychain); + let uuid_provider = SystemUuidProvider; + let provider = PrefilledPassphraseProvider::new(&passphrase_str); + + let org_ctx = OrgContext { + registry: &*backend, + clock: &SystemClock, + uuid_provider: &uuid_provider, + signer: &signer, + passphrase_provider: &provider, + }; + + let revocation = revoke_organization_member( + &org_ctx, + RevokeMemberCommand { + org_prefix, + member_did: member_did.clone(), + member_public_key: member_pk, + admin_public_key_hex: admin_pk_hex, + signer_alias, + note, + }, + ) + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))?; + + let caps: Vec = revocation + .capabilities + .iter() + .map(|c| c.as_str().to_string()) + .collect(); + let caps_json = serde_json::to_string(&caps).unwrap_or_default(); + let role_str = revocation + .role + .map(|r| r.as_str().to_string()) + .unwrap_or_else(|| "member".to_string()); + + Ok(NapiOrgMember { + member_did, + role: role_str, + capabilities_json: caps_json, + issuer_did: revocation.issuer.to_string(), + attestation_rid: revocation.rid.to_string(), + revoked: true, + expires_at: revocation.expires_at.map(|e| e.to_rfc3339()), + }) +} + +#[napi] +pub fn list_org_members( + org_did: String, + include_revoked: bool, + repo_path: String, +) -> napi::Result { + let repo = resolve_repo(&repo_path); + let org_prefix = extract_org_prefix(&org_did); + + let backend = GitRegistryBackend::from_config_unchecked(RegistryConfig::single_tenant(&repo)); + + let filter = MemberFilter::default(); + + let members = backend + .list_org_members(&org_prefix, &filter) + .map_err(|e| format_error("AUTHS_ORG_ERROR", e))?; + + let result: Vec = members + .iter() + .filter_map(|m| { + let is_revoked = m.revoked_at.is_some(); + if !include_revoked && is_revoked { + return None; + } + + let caps: Vec = m + .capabilities + .iter() + .map(|c| c.as_str().to_string()) + .collect(); + let role_str = m.role.as_ref().map(|r| r.as_str()).unwrap_or("member"); + + Some(serde_json::json!({ + "member_did": m.did.to_string(), + "role": role_str, + "capabilities": caps, + "issuer_did": m.issuer.to_string(), + "attestation_rid": m.rid.to_string(), + "revoked": is_revoked, + "expires_at": m.expires_at.map(|e| e.to_rfc3339()), + })) + }) + .collect(); + + serde_json::to_string(&result).map_err(|e| format_error("AUTHS_ORG_ERROR", e)) +} diff --git a/packages/auths-node/src/pairing.rs b/packages/auths-node/src/pairing.rs new file mode 100644 index 00000000..e6f3c005 --- /dev/null +++ b/packages/auths-node/src/pairing.rs @@ -0,0 +1,451 @@ +use napi_derive::napi; + +use std::net::{IpAddr, Ipv4Addr, SocketAddr}; +use std::sync::Arc; +use std::time::Duration; + +use auths_core::storage::keychain::{IdentityDID, KeyAlias, KeyRole, KeyStorage}; +use auths_id::storage::identity::IdentityStorage; +use auths_pairing_daemon::{ + MockNetworkDiscovery, MockNetworkInterfaces, PairingDaemonBuilder, PairingDaemonHandle, + RateLimiter, +}; +use auths_sdk::pairing::{ + PairingAttestationParams, PairingSessionParams, build_pairing_session_request, + create_pairing_attestation, +}; +use auths_storage::git::{RegistryAttestationStorage, RegistryIdentityStorage}; +use chrono::Utc; +use tokio::sync::Mutex; + +use crate::error::format_error; +use crate::helpers::{get_keychain, make_env_config, resolve_passphrase, resolve_repo_path}; + +#[napi(object)] +#[derive(Clone)] +pub struct NapiPairingSession { + pub session_id: String, + pub short_code: String, + pub endpoint: String, + pub token: String, + pub controller_did: String, +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiPairingResponse { + pub device_did: String, + pub device_name: Option, + pub device_public_key_hex: String, +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiPairingResult { + pub device_did: String, + pub device_name: Option, + pub attestation_rid: String, +} + +#[napi] +pub struct NapiPairingHandle { + handle: Arc>>, + server_task: Arc>>>, + session_info: NapiPairingSession, +} + +#[napi] +impl NapiPairingHandle { + #[napi(factory)] + #[allow(clippy::too_many_arguments)] + pub async fn create_session( + repo_path: String, + capabilities_json: Option, + timeout_secs: Option, + bind_address: Option, + enable_mdns: Option, + passphrase: Option, + ) -> napi::Result { + let _pp = resolve_passphrase(passphrase); + let repo = resolve_repo_path(Some(repo_path)); + let bind_addr: IpAddr = bind_address + .as_deref() + .and_then(|s| s.parse().ok()) + .unwrap_or(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0))); + let timeout = timeout_secs.unwrap_or(300) as u64; + let mdns = enable_mdns.unwrap_or(true); + + let capabilities: Vec = if let Some(json) = capabilities_json { + serde_json::from_str(&json).unwrap_or_else(|_| vec!["sign:commit".to_string()]) + } else { + vec!["sign:commit".to_string()] + }; + + let identity_storage = RegistryIdentityStorage::new(repo.clone()); + let managed = identity_storage + .load_identity() + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))?; + let controller_did = managed.controller_did.to_string(); + + #[allow(clippy::disallowed_methods)] + let now = Utc::now(); + let session_req = build_pairing_session_request( + now, + PairingSessionParams { + controller_did: controller_did.clone(), + registry: "local".to_string(), + capabilities, + expiry_secs: timeout, + }, + ) + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))?; + + let session_id = session_req.create_request.session_id.clone(); + let short_code = session_req.create_request.short_code.clone(); + + let mut builder = PairingDaemonBuilder::new().with_rate_limiter(RateLimiter::new(100)); + + let mock_addr = SocketAddr::new(bind_addr, 0); + builder = builder.with_network(MockNetworkInterfaces(bind_addr)); + + if !mdns { + builder = builder.with_discovery(MockNetworkDiscovery(mock_addr)); + } + + let daemon = builder + .build(session_req.create_request) + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))?; + + let token = daemon.token().to_string(); + let (router, handle) = daemon.into_parts(); + + let listener = tokio::net::TcpListener::bind(SocketAddr::new(bind_addr, 0)) + .await + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", format!("Failed to bind: {e}")))?; + let local_addr = listener + .local_addr() + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", format!("No local addr: {e}")))?; + let endpoint = format!("http://{}:{}", local_addr.ip(), local_addr.port()); + + let server_task = tokio::task::spawn(async move { + axum::serve( + listener, + router.into_make_service_with_connect_info::(), + ) + .await + .ok(); + }); + + let session_info = NapiPairingSession { + session_id, + short_code, + endpoint, + token, + controller_did, + }; + + Ok(NapiPairingHandle { + handle: Arc::new(Mutex::new(Some(handle))), + server_task: Arc::new(Mutex::new(Some(server_task))), + session_info, + }) + } + + #[napi(getter)] + pub fn session(&self) -> NapiPairingSession { + self.session_info.clone() + } + + #[napi] + pub async fn wait_for_response( + &self, + timeout_secs: Option, + ) -> napi::Result { + let timeout = Duration::from_secs(timeout_secs.unwrap_or(300) as u64); + + let handle = { + let mut guard = self.handle.lock().await; + guard.take().ok_or_else(|| { + format_error( + "AUTHS_PAIRING_ERROR", + "Pairing handle already consumed or session stopped.", + ) + })? + }; + + let result = handle.wait_for_response(timeout).await; + + match result { + Ok(response) => { + let device_did = response.device_did.clone(); + let device_name = response.device_name.clone(); + let device_pk_hex = + hex::encode(response.device_signing_pubkey.decode().unwrap_or_default()); + Ok(NapiPairingResponse { + device_did, + device_name, + device_public_key_hex: device_pk_hex, + }) + } + Err(e) => Err(format_error("AUTHS_PAIRING_TIMEOUT", e)), + } + } + + #[napi] + pub async fn complete( + &self, + device_did: String, + device_public_key_hex: String, + repo_path: String, + capabilities_json: Option, + passphrase: Option, + ) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let repo = resolve_repo_path(Some(repo_path.clone())); + let env_config = make_env_config(&passphrase_str, &repo_path); + + let capabilities: Vec = if let Some(json) = capabilities_json { + serde_json::from_str(&json).unwrap_or_else(|_| vec!["sign:commit".to_string()]) + } else { + vec!["sign:commit".to_string()] + }; + + let device_pubkey = hex::decode(&device_public_key_hex).map_err(|e| { + format_error( + "AUTHS_PAIRING_ERROR", + format!("Invalid public key hex: {e}"), + ) + })?; + + let identity_storage: Arc = + Arc::new(RegistryIdentityStorage::new(repo.clone())); + + let managed = identity_storage + .load_identity() + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))?; + let controller_identity_did = + IdentityDID::new_unchecked(managed.controller_did.to_string()); + + let keychain = get_keychain(&env_config)?; + let aliases = keychain + .list_aliases_for_identity_with_role(&controller_identity_did, KeyRole::Primary) + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))?; + let identity_key_alias_str = aliases + .into_iter() + .next() + .ok_or_else(|| format_error("AUTHS_PAIRING_ERROR", "No primary signing key found"))?; + let identity_key_alias = KeyAlias::new_unchecked(identity_key_alias_str); + + let key_storage: Arc = Arc::from(keychain); + let provider = Arc::new(auths_core::signing::PrefilledPassphraseProvider::new( + &passphrase_str, + )); + + #[allow(clippy::disallowed_methods)] + let now = Utc::now(); + let params = PairingAttestationParams { + identity_storage: identity_storage.clone(), + key_storage: key_storage.clone(), + device_pubkey: &device_pubkey, + device_did_str: &device_did, + capabilities: &capabilities, + identity_key_alias: &identity_key_alias, + passphrase_provider: provider, + }; + + let attestation = create_pairing_attestation(¶ms, now) + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))?; + + let attestation_storage = RegistryAttestationStorage::new(&repo); + use auths_id::attestation::AttestationSink; + attestation_storage + .export( + &auths_verifier::VerifiedAttestation::dangerous_from_unchecked(attestation.clone()), + ) + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))?; + + Ok(NapiPairingResult { + device_did, + device_name: None, + attestation_rid: attestation.rid.to_string(), + }) + } + + #[napi] + pub async fn stop(&self) -> napi::Result<()> { + let mut handle_guard = self.handle.lock().await; + *handle_guard = None; + + let mut task_guard = self.server_task.lock().await; + if let Some(task) = task_guard.take() { + task.abort(); + } + Ok(()) + } +} + +impl Drop for NapiPairingHandle { + fn drop(&mut self) { + if let Ok(mut guard) = self.server_task.try_lock() { + if let Some(task) = guard.take() { + task.abort(); + } + } + } +} + +#[napi] +pub async fn join_pairing_session( + short_code: String, + endpoint: String, + token: String, + repo_path: String, + device_name: Option, + passphrase: Option, +) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let repo = resolve_repo_path(Some(repo_path.clone())); + let env_config = make_env_config(&passphrase_str, &repo_path); + + let identity_storage = RegistryIdentityStorage::new(repo.clone()); + let managed = identity_storage + .load_identity() + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))?; + + let controller_identity_did = IdentityDID::new_unchecked(managed.controller_did.to_string()); + + let keychain = get_keychain(&env_config)?; + let aliases = keychain + .list_aliases_for_identity_with_role(&controller_identity_did, KeyRole::Primary) + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))?; + let key_alias_str = aliases + .into_iter() + .next() + .ok_or_else(|| format_error("AUTHS_PAIRING_ERROR", "No primary signing key found"))?; + + let (_did, _role, encrypted_key) = keychain + .load_key(&key_alias_str) + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))?; + + let pkcs8_bytes = auths_core::crypto::signer::decrypt_keypair(&encrypted_key, &passphrase_str) + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))?; + + let (seed, pubkey_32) = auths_crypto::parse_ed25519_key_material(&pkcs8_bytes) + .ok() + .and_then(|(seed, maybe_pk)| maybe_pk.map(|pk| (seed, pk))) + .or_else(|| { + let seed = auths_crypto::parse_ed25519_seed(&pkcs8_bytes).ok()?; + let pk = auths_core::crypto::provider_bridge::ed25519_public_key_from_seed_sync(&seed) + .ok()?; + Some((seed, pk)) + }) + .ok_or_else(|| { + format_error( + "AUTHS_PAIRING_ERROR", + "Failed to parse Ed25519 key material", + ) + })?; + + let device_did = auths_verifier::types::DeviceDID::from_ed25519(&pubkey_32); + + let lookup_url = format!("{}/v1/pairing/sessions/by-code/{}", endpoint, short_code); + + let session_data: serde_json::Value = { + let client = reqwest::Client::new(); + let resp = client + .get(&lookup_url) + .send() + .await + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))?; + resp.json::() + .await + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))? + }; + + let session_id = session_data["session_id"] + .as_str() + .ok_or_else(|| format_error("AUTHS_PAIRING_ERROR", "No session_id in response"))? + .to_string(); + + let token_data = &session_data["token"]; + let controller_did_str = token_data["controller_did"] + .as_str() + .unwrap_or("") + .to_string(); + let ephemeral_pubkey_str = token_data["ephemeral_pubkey"] + .as_str() + .unwrap_or("") + .to_string(); + let capabilities: Vec = token_data["capabilities"] + .as_array() + .map(|arr| { + arr.iter() + .filter_map(|v| v.as_str().map(|s| s.to_string())) + .collect() + }) + .unwrap_or_default(); + let expires_at = token_data["expires_at"].as_i64().unwrap_or(0); + + #[allow(clippy::disallowed_methods)] + let now = Utc::now(); + let pairing_token = auths_core::pairing::PairingToken { + controller_did: controller_did_str, + endpoint: endpoint.clone(), + short_code: short_code.clone(), + ephemeral_pubkey: ephemeral_pubkey_str, + expires_at: chrono::DateTime::from_timestamp(expires_at, 0).unwrap_or(now), + capabilities, + }; + + let secure_seed = auths_crypto::SecureSeed::new(*seed.as_bytes()); + let (pairing_response, _shared_secret) = auths_core::pairing::PairingResponse::create( + now, + &pairing_token, + &secure_seed, + &pubkey_32, + device_did.to_string(), + device_name.clone(), + ) + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))?; + + let submit_req = auths_core::pairing::types::SubmitResponseRequest { + device_x25519_pubkey: auths_core::pairing::types::Base64UrlEncoded::from_raw( + pairing_response.device_x25519_pubkey, + ), + device_signing_pubkey: auths_core::pairing::types::Base64UrlEncoded::from_raw( + pairing_response.device_signing_pubkey, + ), + device_did: pairing_response.device_did.clone(), + signature: auths_core::pairing::types::Base64UrlEncoded::from_raw( + pairing_response.signature, + ), + device_name: pairing_response.device_name, + }; + + let submit_url = format!("{}/v1/pairing/sessions/{}/response", endpoint, session_id); + + { + let client = reqwest::Client::new(); + let resp = client + .post(&submit_url) + .header("X-Pairing-Token", &token) + .json(&submit_req) + .send() + .await + .map_err(|e| format_error("AUTHS_PAIRING_ERROR", e))?; + if !resp.status().is_success() { + let status = resp.status(); + let body = resp.text().await.unwrap_or_default(); + return Err(format_error( + "AUTHS_PAIRING_ERROR", + format!("Submit response failed: {} {}", status, body), + )); + } + } + + Ok(NapiPairingResponse { + device_did: device_did.to_string(), + device_name, + device_public_key_hex: hex::encode(pubkey_32), + }) +} diff --git a/packages/auths-node/src/policy.rs b/packages/auths-node/src/policy.rs new file mode 100644 index 00000000..0789773e --- /dev/null +++ b/packages/auths-node/src/policy.rs @@ -0,0 +1,147 @@ +use napi_derive::napi; + +use auths_policy::{ + CanonicalCapability, CanonicalDid, EvalContext, SignerType, compile_from_json, enforce_simple, +}; +use chrono::Utc; + +use crate::error::format_error; + +#[napi(object)] +#[derive(Clone)] +pub struct NapiPolicyDecision { + pub outcome: String, + pub reason: String, + pub message: String, +} + +#[napi] +pub fn compile_policy(policy_json: String) -> napi::Result { + compile_from_json(policy_json.as_bytes()).map_err(|errors| { + let msgs: Vec = errors + .iter() + .map(|e| format!("{}: {}", e.path, e.message)) + .collect(); + format_error( + "AUTHS_POLICY_COMPILE_ERROR", + format!("Policy compilation failed: {}", msgs.join("; ")), + ) + })?; + Ok(policy_json) +} + +#[napi] +#[allow(clippy::too_many_arguments)] +pub fn evaluate_policy( + policy_json: String, + issuer: String, + subject: String, + capabilities: Option>, + role: Option, + revoked: Option, + expires_at: Option, + repo: Option, + environment: Option, + signer_type: Option, + delegated_by: Option, + chain_depth: Option, +) -> napi::Result { + let compiled = compile_from_json(policy_json.as_bytes()).map_err(|errors| { + let msgs: Vec = errors + .iter() + .map(|e| format!("{}: {}", e.path, e.message)) + .collect(); + format_error( + "AUTHS_POLICY_COMPILE_ERROR", + format!("Policy compilation failed: {}", msgs.join("; ")), + ) + })?; + + let issuer_did = CanonicalDid::parse(&issuer).map_err(|e| { + format_error( + "AUTHS_POLICY_INVALID_DID", + format!("Invalid issuer DID: {e}"), + ) + })?; + let subject_did = CanonicalDid::parse(&subject).map_err(|e| { + format_error( + "AUTHS_POLICY_INVALID_DID", + format!("Invalid subject DID: {e}"), + ) + })?; + + #[allow(clippy::disallowed_methods)] + let now = Utc::now(); + let mut ctx = EvalContext::new(now, issuer_did, subject_did).revoked(revoked.unwrap_or(false)); + + if let Some(caps) = capabilities { + for cap_str in &caps { + let cap = CanonicalCapability::parse(cap_str).map_err(|e| { + format_error( + "AUTHS_POLICY_INVALID_CAPABILITY", + format!("Invalid capability '{cap_str}': {e}"), + ) + })?; + ctx = ctx.capability(cap); + } + } + + if let Some(r) = role { + ctx = ctx.role(r); + } + + if let Some(exp) = expires_at { + let ts: chrono::DateTime = exp.parse().map_err(|_| { + format_error( + "AUTHS_POLICY_INVALID_TIMESTAMP", + format!("Invalid expires_at RFC 3339: {exp}"), + ) + })?; + ctx = ctx.expires_at(ts); + } + + if let Some(r) = repo { + ctx = ctx.repo(r); + } + + if let Some(env) = environment { + ctx = ctx.environment(env); + } + + if let Some(st) = signer_type { + let parsed = match st.to_lowercase().as_str() { + "human" => SignerType::Human, + "agent" => SignerType::Agent, + "workload" => SignerType::Workload, + _ => { + return Err(format_error( + "AUTHS_POLICY_INVALID_SIGNER_TYPE", + format!("Invalid signer_type: '{st}'. Must be 'human', 'agent', or 'workload'"), + )); + } + }; + ctx = ctx.signer_type(parsed); + } + + if let Some(d) = delegated_by { + let did = CanonicalDid::parse(&d).map_err(|e| { + format_error( + "AUTHS_POLICY_INVALID_DID", + format!("Invalid delegated_by DID: {e}"), + ) + })?; + ctx = ctx.delegated_by(did); + } + + if let Some(depth) = chain_depth { + ctx = ctx.chain_depth(depth); + } + + let decision = enforce_simple(&compiled, &ctx); + + Ok(NapiPolicyDecision { + outcome: decision.outcome.to_string().to_lowercase(), + reason: format!("{:?}", decision.reason), + message: decision.message, + }) +} diff --git a/packages/auths-node/src/sign.rs b/packages/auths-node/src/sign.rs new file mode 100644 index 00000000..49fbf88d --- /dev/null +++ b/packages/auths-node/src/sign.rs @@ -0,0 +1,213 @@ +use auths_core::signing::{PrefilledPassphraseProvider, SecureSigner, StorageSigner}; +use auths_core::storage::keychain::{KeyAlias, get_platform_keychain_with_config}; +use auths_verifier::core::MAX_ATTESTATION_JSON_SIZE; +use auths_verifier::types::IdentityDID; +use napi_derive::napi; + +use crate::error::format_error; +use crate::helpers::{make_env_config, resolve_passphrase}; +use crate::types::{NapiActionEnvelope, NapiCommitSignResult}; + +fn make_signer( + passphrase: &str, + repo_path: &str, +) -> napi::Result<( + StorageSigner>, + PrefilledPassphraseProvider, +)> { + let env_config = make_env_config(passphrase, repo_path); + let keychain = get_platform_keychain_with_config(&env_config) + .map_err(|e| format_error("AUTHS_KEYCHAIN_ERROR", format!("Keychain error: {e}")))?; + let signer = StorageSigner::new(keychain); + let provider = PrefilledPassphraseProvider::new(passphrase); + Ok((signer, provider)) +} + +#[napi] +pub fn sign_as_identity( + message: napi::bindgen_prelude::Buffer, + identity_did: String, + repo_path: String, + passphrase: Option, +) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let (signer, provider) = make_signer(&passphrase_str, &repo_path)?; + let did = IdentityDID::new(&identity_did); + + let sig_bytes = signer + .sign_for_identity(&did, &provider, message.as_ref()) + .map_err(|e| format_error("AUTHS_SIGNING_FAILED", format!("Signing failed: {e}")))?; + + Ok(NapiCommitSignResult { + signature: hex::encode(sig_bytes), + signer_did: identity_did, + }) +} + +#[napi] +pub fn sign_action_as_identity( + action_type: String, + payload_json: String, + identity_did: String, + repo_path: String, + passphrase: Option, +) -> napi::Result { + if payload_json.len() > MAX_ATTESTATION_JSON_SIZE { + return Err(format_error( + "AUTHS_INVALID_INPUT", + format!( + "Payload JSON too large: {} bytes, max {MAX_ATTESTATION_JSON_SIZE}", + payload_json.len() + ), + )); + } + + let payload: serde_json::Value = serde_json::from_str(&payload_json) + .map_err(|e| format_error("AUTHS_INVALID_INPUT", format!("Invalid payload JSON: {e}")))?; + + #[allow(clippy::disallowed_methods)] // Presentation boundary + let timestamp = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true); + + let signing_data = serde_json::json!({ + "version": "1.0", + "type": action_type, + "identity": identity_did, + "payload": payload, + "timestamp": ×tamp, + }); + + let canonical = json_canon::to_string(&signing_data).map_err(|e| { + format_error( + "AUTHS_SERIALIZATION_ERROR", + format!("Canonicalization failed: {e}"), + ) + })?; + + let passphrase_str = resolve_passphrase(passphrase); + let (signer, provider) = make_signer(&passphrase_str, &repo_path)?; + let did = IdentityDID::new(&identity_did); + + let sig_bytes = signer + .sign_for_identity(&did, &provider, canonical.as_bytes()) + .map_err(|e| format_error("AUTHS_SIGNING_FAILED", format!("Signing failed: {e}")))?; + + let sig_hex = hex::encode(sig_bytes); + + let envelope = serde_json::json!({ + "version": "1.0", + "type": action_type, + "identity": identity_did, + "payload": payload, + "timestamp": timestamp, + "signature": sig_hex, + }); + + let envelope_json = serde_json::to_string(&envelope).map_err(|e| { + format_error( + "AUTHS_SERIALIZATION_ERROR", + format!("Failed to serialize envelope: {e}"), + ) + })?; + + Ok(NapiActionEnvelope { + envelope_json, + signature_hex: sig_hex, + signer_did: identity_did, + }) +} + +#[napi] +pub fn sign_as_agent( + message: napi::bindgen_prelude::Buffer, + key_alias: String, + repo_path: String, + passphrase: Option, +) -> napi::Result { + let passphrase_str = resolve_passphrase(passphrase); + let (signer, provider) = make_signer(&passphrase_str, &repo_path)?; + let alias = KeyAlias::new(&key_alias) + .map_err(|e| format_error("AUTHS_KEY_NOT_FOUND", format!("Invalid key alias: {e}")))?; + + let sig_bytes = signer + .sign_with_alias(&alias, &provider, message.as_ref()) + .map_err(|e| format_error("AUTHS_SIGNING_FAILED", format!("Signing failed: {e}")))?; + + Ok(NapiCommitSignResult { + signature: hex::encode(sig_bytes), + signer_did: key_alias, + }) +} + +#[napi] +pub fn sign_action_as_agent( + action_type: String, + payload_json: String, + key_alias: String, + agent_did: String, + repo_path: String, + passphrase: Option, +) -> napi::Result { + if payload_json.len() > MAX_ATTESTATION_JSON_SIZE { + return Err(format_error( + "AUTHS_INVALID_INPUT", + format!( + "Payload JSON too large: {} bytes, max {MAX_ATTESTATION_JSON_SIZE}", + payload_json.len() + ), + )); + } + + let payload: serde_json::Value = serde_json::from_str(&payload_json) + .map_err(|e| format_error("AUTHS_INVALID_INPUT", format!("Invalid payload JSON: {e}")))?; + + #[allow(clippy::disallowed_methods)] + let timestamp = chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true); + + let signing_data = serde_json::json!({ + "version": "1.0", + "type": action_type, + "identity": agent_did, + "payload": payload, + "timestamp": ×tamp, + }); + + let canonical = json_canon::to_string(&signing_data).map_err(|e| { + format_error( + "AUTHS_SERIALIZATION_ERROR", + format!("Canonicalization failed: {e}"), + ) + })?; + + let passphrase_str = resolve_passphrase(passphrase); + let (signer, provider) = make_signer(&passphrase_str, &repo_path)?; + let alias = KeyAlias::new(&key_alias) + .map_err(|e| format_error("AUTHS_KEY_NOT_FOUND", format!("Invalid key alias: {e}")))?; + + let sig_bytes = signer + .sign_with_alias(&alias, &provider, canonical.as_bytes()) + .map_err(|e| format_error("AUTHS_SIGNING_FAILED", format!("Signing failed: {e}")))?; + + let sig_hex = hex::encode(sig_bytes); + + let envelope = serde_json::json!({ + "version": "1.0", + "type": action_type, + "identity": agent_did, + "payload": payload, + "timestamp": timestamp, + "signature": sig_hex, + }); + + let envelope_json = serde_json::to_string(&envelope).map_err(|e| { + format_error( + "AUTHS_SERIALIZATION_ERROR", + format!("Failed to serialize envelope: {e}"), + ) + })?; + + Ok(NapiActionEnvelope { + envelope_json, + signature_hex: sig_hex, + signer_did: agent_did, + }) +} diff --git a/packages/auths-node/src/trust.rs b/packages/auths-node/src/trust.rs new file mode 100644 index 00000000..0711c4bc --- /dev/null +++ b/packages/auths-node/src/trust.rs @@ -0,0 +1,174 @@ +use std::path::PathBuf; + +use auths_core::trust::pinned::{PinnedIdentity, PinnedIdentityStore, TrustLevel}; +use auths_id::identity::resolve::{DefaultDidResolver, DidResolver}; +use napi_derive::napi; + +use crate::error::format_error; + +fn resolve_repo(repo_path: &str) -> PathBuf { + PathBuf::from(shellexpand::tilde(repo_path).as_ref()) +} + +fn store_path(repo_path: &str) -> PathBuf { + resolve_repo(repo_path).join("known_identities.json") +} + +fn parse_trust_level(s: &str) -> napi::Result { + match s { + "tofu" => Ok(TrustLevel::Tofu), + "manual" => Ok(TrustLevel::Manual), + "org_policy" => Ok(TrustLevel::OrgPolicy), + _ => Err(format_error( + "AUTHS_INVALID_INPUT", + format!( + "Invalid trust_level '{}': must be one of 'tofu', 'manual', 'org_policy'", + s + ), + )), + } +} + +fn trust_level_str(tl: &TrustLevel) -> &'static str { + match tl { + TrustLevel::Tofu => "tofu", + TrustLevel::Manual => "manual", + TrustLevel::OrgPolicy => "org_policy", + } +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiPinnedIdentity { + pub did: String, + pub label: Option, + pub trust_level: String, + pub first_seen: String, + pub kel_sequence: Option, + pub pinned_at: String, +} + +#[napi] +pub fn pin_identity( + did: String, + repo_path: String, + label: Option, + trust_level: Option, +) -> napi::Result { + let tl = parse_trust_level(&trust_level.unwrap_or_else(|| "manual".to_string()))?; + let store = PinnedIdentityStore::new(store_path(&repo_path)); + let repo = resolve_repo(&repo_path); + + let resolver = DefaultDidResolver::with_repo(&repo); + let public_key_hex = match resolver.resolve(&did) { + Ok(resolved) => hex::encode(resolved.public_key().as_bytes()), + Err(_) => String::new(), + }; + + #[allow(clippy::disallowed_methods)] + let now = chrono::Utc::now(); + + if let Ok(Some(existing)) = store.lookup(&did) { + let _ = store.remove(&did); + let pin = PinnedIdentity { + did: did.clone(), + public_key_hex: if public_key_hex.is_empty() { + existing.public_key_hex + } else { + public_key_hex + }, + kel_tip_said: existing.kel_tip_said, + kel_sequence: existing.kel_sequence, + first_seen: existing.first_seen, + origin: label.clone().unwrap_or(existing.origin), + trust_level: tl.clone(), + }; + store + .pin(pin.clone()) + .map_err(|e| format_error("AUTHS_TRUST_ERROR", e))?; + return Ok(NapiPinnedIdentity { + did: pin.did, + label, + trust_level: trust_level_str(&pin.trust_level).to_string(), + first_seen: pin.first_seen.to_rfc3339(), + kel_sequence: pin.kel_sequence.map(|s| s as u32), + pinned_at: now.to_rfc3339(), + }); + } + + let pin = PinnedIdentity { + did: did.clone(), + public_key_hex, + kel_tip_said: None, + kel_sequence: None, + first_seen: now, + origin: label.clone().unwrap_or_else(|| "manual".to_string()), + trust_level: tl.clone(), + }; + + store + .pin(pin) + .map_err(|e| format_error("AUTHS_TRUST_ERROR", e))?; + + Ok(NapiPinnedIdentity { + did, + label, + trust_level: trust_level_str(&tl).to_string(), + first_seen: now.to_rfc3339(), + kel_sequence: None, + pinned_at: now.to_rfc3339(), + }) +} + +#[napi] +pub fn remove_pinned_identity(did: String, repo_path: String) -> napi::Result<()> { + let store = PinnedIdentityStore::new(store_path(&repo_path)); + store + .remove(&did) + .map_err(|e| format_error("AUTHS_TRUST_ERROR", e))?; + Ok(()) +} + +#[napi] +pub fn list_pinned_identities(repo_path: String) -> napi::Result { + let store = PinnedIdentityStore::new(store_path(&repo_path)); + let entries = store + .list() + .map_err(|e| format_error("AUTHS_TRUST_ERROR", e))?; + + let json_entries: Vec = entries + .iter() + .map(|e| { + serde_json::json!({ + "did": e.did, + "label": e.origin, + "trust_level": trust_level_str(&e.trust_level), + "first_seen": e.first_seen.to_rfc3339(), + "kel_sequence": e.kel_sequence, + "pinned_at": e.first_seen.to_rfc3339(), + }) + }) + .collect(); + + serde_json::to_string(&json_entries).map_err(|e| format_error("AUTHS_TRUST_ERROR", e)) +} + +#[napi] +pub fn get_pinned_identity( + did: String, + repo_path: String, +) -> napi::Result> { + let store = PinnedIdentityStore::new(store_path(&repo_path)); + let entry = store + .lookup(&did) + .map_err(|e| format_error("AUTHS_TRUST_ERROR", e))?; + + Ok(entry.map(|e| NapiPinnedIdentity { + did: e.did, + label: Some(e.origin), + trust_level: trust_level_str(&e.trust_level).to_string(), + first_seen: e.first_seen.to_rfc3339(), + kel_sequence: e.kel_sequence.map(|s| s as u32), + pinned_at: e.first_seen.to_rfc3339(), + })) +} diff --git a/packages/auths-node/src/types.rs b/packages/auths-node/src/types.rs new file mode 100644 index 00000000..59d02d47 --- /dev/null +++ b/packages/auths-node/src/types.rs @@ -0,0 +1,205 @@ +use napi_derive::napi; + +use auths_verifier::types::{ + ChainLink as RustChainLink, VerificationReport as RustVerificationReport, + VerificationStatus as RustVerificationStatus, +}; + +#[napi(object)] +#[derive(Clone)] +pub struct NapiVerificationResult { + pub valid: bool, + pub error: Option, + pub error_code: Option, +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiVerificationStatus { + pub status_type: String, + pub at: Option, + pub step: Option, + pub missing_link: Option, + pub required: Option, + pub verified: Option, +} + +impl NapiVerificationStatus { + pub fn is_valid(&self) -> bool { + self.status_type == "Valid" + } +} + +impl From for NapiVerificationStatus { + fn from(status: RustVerificationStatus) -> Self { + match status { + RustVerificationStatus::Valid => NapiVerificationStatus { + status_type: "Valid".to_string(), + at: None, + step: None, + missing_link: None, + required: None, + verified: None, + }, + RustVerificationStatus::Expired { at } => NapiVerificationStatus { + status_type: "Expired".to_string(), + at: Some(at.to_rfc3339()), + step: None, + missing_link: None, + required: None, + verified: None, + }, + RustVerificationStatus::Revoked { at } => NapiVerificationStatus { + status_type: "Revoked".to_string(), + at: at.map(|t| t.to_rfc3339()), + step: None, + missing_link: None, + required: None, + verified: None, + }, + RustVerificationStatus::InvalidSignature { step } => NapiVerificationStatus { + status_type: "InvalidSignature".to_string(), + at: None, + step: Some(step as u32), + missing_link: None, + required: None, + verified: None, + }, + RustVerificationStatus::BrokenChain { missing_link } => NapiVerificationStatus { + status_type: "BrokenChain".to_string(), + at: None, + step: None, + missing_link: Some(missing_link), + required: None, + verified: None, + }, + RustVerificationStatus::InsufficientWitnesses { required, verified } => { + NapiVerificationStatus { + status_type: "InsufficientWitnesses".to_string(), + at: None, + step: None, + missing_link: None, + required: Some(required as u32), + verified: Some(verified as u32), + } + } + } + } +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiChainLink { + pub issuer: String, + pub subject: String, + pub valid: bool, + pub error: Option, +} + +impl From for NapiChainLink { + fn from(link: RustChainLink) -> Self { + NapiChainLink { + issuer: link.issuer, + subject: link.subject, + valid: link.valid, + error: link.error, + } + } +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiVerificationReport { + pub status: NapiVerificationStatus, + pub chain: Vec, + pub warnings: Vec, +} + +impl NapiVerificationReport { + pub fn is_valid(&self) -> bool { + self.status.is_valid() + } +} + +impl From for NapiVerificationReport { + fn from(report: RustVerificationReport) -> Self { + NapiVerificationReport { + status: report.status.into(), + chain: report.chain.into_iter().map(|l| l.into()).collect(), + warnings: report.warnings, + } + } +} + +// Identity types + +#[napi(object)] +#[derive(Clone)] +pub struct NapiIdentityResult { + pub did: String, + pub key_alias: String, + pub public_key_hex: String, +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiAgentIdentityBundle { + pub agent_did: String, + pub key_alias: String, + pub attestation_json: String, + pub public_key_hex: String, + pub repo_path: Option, +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiDelegatedAgentBundle { + pub agent_did: String, + pub key_alias: String, + pub attestation_json: String, + pub public_key_hex: String, + pub repo_path: Option, +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiRotationResult { + pub controller_did: String, + pub new_key_fingerprint: String, + pub previous_key_fingerprint: String, + pub sequence: i64, +} + +// Device types + +#[napi(object)] +#[derive(Clone)] +pub struct NapiLinkResult { + pub device_did: String, + pub attestation_id: String, +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiExtensionResult { + pub device_did: String, + pub new_expires_at: String, + pub previous_expires_at: Option, +} + +// Signing types + +#[napi(object)] +#[derive(Clone)] +pub struct NapiCommitSignResult { + pub signature: String, + pub signer_did: String, +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiActionEnvelope { + pub envelope_json: String, + pub signature_hex: String, + pub signer_did: String, +} diff --git a/packages/auths-node/src/verify.rs b/packages/auths-node/src/verify.rs new file mode 100644 index 00000000..92ce4464 --- /dev/null +++ b/packages/auths-node/src/verify.rs @@ -0,0 +1,446 @@ +use auths_verifier::core::{ + Attestation, Capability, MAX_ATTESTATION_JSON_SIZE, MAX_JSON_BATCH_SIZE, +}; +use auths_verifier::error::AuthsErrorInfo; +use auths_verifier::types::DeviceDID; +use auths_verifier::verify::{ + verify_at_time as rust_verify_at_time, verify_chain as rust_verify_chain, + verify_chain_with_capability as rust_verify_chain_with_capability, + verify_chain_with_witnesses as rust_verify_chain_with_witnesses, + verify_device_authorization as rust_verify_device_authorization, + verify_with_capability as rust_verify_with_capability, verify_with_keys, +}; +use auths_verifier::witness::{WitnessReceipt, WitnessVerifyConfig}; +use chrono::{DateTime, Utc}; +use napi_derive::napi; + +use crate::error::format_error; +use crate::types::{NapiVerificationReport, NapiVerificationResult}; + +fn decode_pk_hex(hex_str: &str, label: &str) -> napi::Result> { + let bytes = hex::decode(hex_str) + .map_err(|e| format_error("AUTHS_INVALID_INPUT", format!("Invalid {label} hex: {e}")))?; + if bytes.len() != 32 { + return Err(format_error( + "AUTHS_INVALID_INPUT", + format!( + "Invalid {label} length: expected 32 bytes (64 hex chars), got {}", + bytes.len() + ), + )); + } + Ok(bytes) +} + +fn parse_attestations(jsons: &[String]) -> napi::Result> { + jsons + .iter() + .enumerate() + .map(|(i, json)| { + serde_json::from_str(json).map_err(|e| { + format_error( + "AUTHS_SERIALIZATION_ERROR", + format!("Failed to parse attestation {i}: {e}"), + ) + }) + }) + .collect() +} + +fn check_batch_size(jsons: &[String]) -> napi::Result<()> { + let total: usize = jsons.iter().map(|s| s.len()).sum(); + if total > MAX_JSON_BATCH_SIZE { + return Err(format_error( + "AUTHS_INVALID_INPUT", + format!("Total attestation JSON too large: {total} bytes, max {MAX_JSON_BATCH_SIZE}"), + )); + } + Ok(()) +} + +#[napi] +pub async fn verify_attestation( + attestation_json: String, + issuer_pk_hex: String, +) -> napi::Result { + if attestation_json.len() > MAX_ATTESTATION_JSON_SIZE { + return Err(format_error( + "AUTHS_INVALID_INPUT", + format!( + "Attestation JSON too large: {} bytes, max {}", + attestation_json.len(), + MAX_ATTESTATION_JSON_SIZE + ), + )); + } + + let issuer_pk_bytes = decode_pk_hex(&issuer_pk_hex, "issuer public key")?; + + let att: Attestation = match serde_json::from_str(&attestation_json) { + Ok(att) => att, + Err(e) => { + return Ok(NapiVerificationResult { + valid: false, + error: Some(format!("Failed to parse attestation JSON: {e}")), + error_code: Some("AUTHS_SERIALIZATION_ERROR".to_string()), + }); + } + }; + + match verify_with_keys(&att, &issuer_pk_bytes).await { + Ok(_) => Ok(NapiVerificationResult { + valid: true, + error: None, + error_code: None, + }), + Err(e) => Ok(NapiVerificationResult { + valid: false, + error_code: Some(e.error_code().to_string()), + error: Some(e.to_string()), + }), + } +} + +#[napi] +pub async fn verify_chain( + attestations_json: Vec, + root_pk_hex: String, +) -> napi::Result { + check_batch_size(&attestations_json)?; + let root_pk_bytes = decode_pk_hex(&root_pk_hex, "root public key")?; + let attestations = parse_attestations(&attestations_json)?; + + match rust_verify_chain(&attestations, &root_pk_bytes).await { + Ok(report) => Ok(report.into()), + Err(e) => Err(format_error( + e.error_code(), + format!("Chain verification failed: {e}"), + )), + } +} + +#[napi] +pub async fn verify_device_authorization( + identity_did: String, + device_did: String, + attestations_json: Vec, + identity_pk_hex: String, +) -> napi::Result { + check_batch_size(&attestations_json)?; + let identity_pk_bytes = decode_pk_hex(&identity_pk_hex, "identity public key")?; + let attestations = parse_attestations(&attestations_json)?; + let device = DeviceDID::new(&device_did); + + match rust_verify_device_authorization( + &identity_did, + &device, + &attestations, + &identity_pk_bytes, + ) + .await + { + Ok(report) => Ok(report.into()), + Err(e) => Err(format_error( + e.error_code(), + format!("Device authorization verification failed: {e}"), + )), + } +} + +#[napi] +pub async fn verify_attestation_with_capability( + attestation_json: String, + issuer_pk_hex: String, + required_capability: String, +) -> napi::Result { + if attestation_json.len() > MAX_ATTESTATION_JSON_SIZE { + return Err(format_error( + "AUTHS_INVALID_INPUT", + format!( + "Attestation JSON too large: {} bytes, max {}", + attestation_json.len(), + MAX_ATTESTATION_JSON_SIZE + ), + )); + } + + let issuer_pk_bytes = decode_pk_hex(&issuer_pk_hex, "issuer public key")?; + + let att: Attestation = match serde_json::from_str(&attestation_json) { + Ok(att) => att, + Err(e) => { + return Ok(NapiVerificationResult { + valid: false, + error: Some(format!("Failed to parse attestation JSON: {e}")), + error_code: Some("AUTHS_SERIALIZATION_ERROR".to_string()), + }); + } + }; + + let cap = Capability::parse(&required_capability).map_err(|e| { + format_error( + "AUTHS_INVALID_INPUT", + format!("Invalid capability '{required_capability}': {e}"), + ) + })?; + + match rust_verify_with_capability(&att, &cap, &issuer_pk_bytes).await { + Ok(_) => Ok(NapiVerificationResult { + valid: true, + error: None, + error_code: None, + }), + Err(e) => Ok(NapiVerificationResult { + valid: false, + error_code: Some(e.error_code().to_string()), + error: Some(e.to_string()), + }), + } +} + +#[napi] +pub async fn verify_chain_with_capability( + attestations_json: Vec, + root_pk_hex: String, + required_capability: String, +) -> napi::Result { + check_batch_size(&attestations_json)?; + let root_pk_bytes = decode_pk_hex(&root_pk_hex, "root public key")?; + let attestations = parse_attestations(&attestations_json)?; + + let cap = Capability::parse(&required_capability).map_err(|e| { + format_error( + "AUTHS_INVALID_INPUT", + format!("Invalid capability '{required_capability}': {e}"), + ) + })?; + + match rust_verify_chain_with_capability(&attestations, &cap, &root_pk_bytes).await { + Ok(report) => Ok(report.into()), + Err(e) => Err(format_error( + e.error_code(), + format!("Chain verification with capability failed: {e}"), + )), + } +} + +fn parse_rfc3339_timestamp(at_rfc3339: &str) -> napi::Result> { + let at: DateTime = at_rfc3339.parse::>().map_err(|_| { + if at_rfc3339.contains(' ') && !at_rfc3339.contains('T') { + format_error( + "AUTHS_INVALID_INPUT", + format!( + "Expected RFC 3339 format like '2024-06-15T00:00:00Z', got '{at_rfc3339}'. \ + Hint: use 'T' between date and time, and append 'Z' or a UTC offset." + ), + ) + } else { + format_error( + "AUTHS_INVALID_INPUT", + format!( + "Expected RFC 3339 format like '2024-06-15T00:00:00Z', got '{at_rfc3339}'." + ), + ) + } + })?; + + #[allow(clippy::disallowed_methods)] // Presentation boundary + let now = Utc::now(); + let skew_tolerance = chrono::Duration::seconds(60); + if at > now + skew_tolerance { + return Err(format_error( + "AUTHS_INVALID_INPUT", + format!( + "Timestamp {at_rfc3339} is in the future. \ + Time-pinned verification requires a past or present timestamp." + ), + )); + } + + Ok(at) +} + +#[napi] +pub async fn verify_at_time( + attestation_json: String, + issuer_pk_hex: String, + at_rfc3339: String, +) -> napi::Result { + if attestation_json.len() > MAX_ATTESTATION_JSON_SIZE { + return Err(format_error( + "AUTHS_INVALID_INPUT", + format!( + "Attestation JSON too large: {} bytes, max {}", + attestation_json.len(), + MAX_ATTESTATION_JSON_SIZE + ), + )); + } + + let at = parse_rfc3339_timestamp(&at_rfc3339)?; + let issuer_pk_bytes = decode_pk_hex(&issuer_pk_hex, "issuer public key")?; + + let att: Attestation = match serde_json::from_str(&attestation_json) { + Ok(att) => att, + Err(e) => { + return Ok(NapiVerificationResult { + valid: false, + error: Some(format!("Failed to parse attestation JSON: {e}")), + error_code: Some("AUTHS_SERIALIZATION_ERROR".to_string()), + }); + } + }; + + match rust_verify_at_time(&att, &issuer_pk_bytes, at).await { + Ok(_) => Ok(NapiVerificationResult { + valid: true, + error: None, + error_code: None, + }), + Err(e) => Ok(NapiVerificationResult { + valid: false, + error_code: Some(e.error_code().to_string()), + error: Some(e.to_string()), + }), + } +} + +#[napi] +pub async fn verify_at_time_with_capability( + attestation_json: String, + issuer_pk_hex: String, + at_rfc3339: String, + required_capability: String, +) -> napi::Result { + if attestation_json.len() > MAX_ATTESTATION_JSON_SIZE { + return Err(format_error( + "AUTHS_INVALID_INPUT", + format!( + "Attestation JSON too large: {} bytes, max {}", + attestation_json.len(), + MAX_ATTESTATION_JSON_SIZE + ), + )); + } + + let at = parse_rfc3339_timestamp(&at_rfc3339)?; + let issuer_pk_bytes = decode_pk_hex(&issuer_pk_hex, "issuer public key")?; + + let att: Attestation = match serde_json::from_str(&attestation_json) { + Ok(att) => att, + Err(e) => { + return Ok(NapiVerificationResult { + valid: false, + error: Some(format!("Failed to parse attestation JSON: {e}")), + error_code: Some("AUTHS_SERIALIZATION_ERROR".to_string()), + }); + } + }; + + let cap = Capability::parse(&required_capability).map_err(|e| { + format_error( + "AUTHS_INVALID_INPUT", + format!("Invalid capability '{required_capability}': {e}"), + ) + })?; + + match rust_verify_at_time(&att, &issuer_pk_bytes, at).await { + Ok(_) => { + if att.capabilities.contains(&cap) { + Ok(NapiVerificationResult { + valid: true, + error: None, + error_code: None, + }) + } else { + Ok(NapiVerificationResult { + valid: false, + error: Some(format!( + "Attestation does not grant required capability '{required_capability}'" + )), + error_code: Some("AUTHS_MISSING_CAPABILITY".to_string()), + }) + } + } + Err(e) => Ok(NapiVerificationResult { + valid: false, + error_code: Some(e.error_code().to_string()), + error: Some(e.to_string()), + }), + } +} + +#[napi] +pub async fn verify_chain_with_witnesses( + attestations_json: Vec, + root_pk_hex: String, + receipts_json: Vec, + witness_keys_json: Vec, + threshold: u32, +) -> napi::Result { + check_batch_size(&attestations_json)?; + let root_pk_bytes = decode_pk_hex(&root_pk_hex, "root public key")?; + let attestations = parse_attestations(&attestations_json)?; + + let receipts: Vec = receipts_json + .iter() + .enumerate() + .map(|(i, json)| { + serde_json::from_str(json).map_err(|e| { + format_error( + "AUTHS_SERIALIZATION_ERROR", + format!("Failed to parse witness receipt {i}: {e}"), + ) + }) + }) + .collect::>>()?; + + #[derive(serde::Deserialize)] + struct WitnessKeyInput { + did: String, + public_key_hex: String, + } + + let witness_keys: Vec<(String, Vec)> = witness_keys_json + .iter() + .enumerate() + .map(|(i, json)| { + let input: WitnessKeyInput = serde_json::from_str(json).map_err(|e| { + format_error( + "AUTHS_SERIALIZATION_ERROR", + format!("Failed to parse witness key {i}: {e}"), + ) + })?; + let pk_bytes = hex::decode(&input.public_key_hex).map_err(|e| { + format_error( + "AUTHS_INVALID_INPUT", + format!("Invalid witness key {i} hex: {e}"), + ) + })?; + if pk_bytes.len() != 32 { + return Err(format_error( + "AUTHS_INVALID_INPUT", + format!( + "Invalid witness key {i} length: expected 32 bytes, got {}", + pk_bytes.len() + ), + )); + } + Ok((input.did, pk_bytes)) + }) + .collect::>>()?; + + let config = WitnessVerifyConfig { + receipts: &receipts, + witness_keys: &witness_keys, + threshold: threshold as usize, + }; + + match rust_verify_chain_with_witnesses(&attestations, &root_pk_bytes, &config).await { + Ok(report) => Ok(report.into()), + Err(e) => Err(format_error( + e.error_code(), + format!("Chain verification with witnesses failed: {e}"), + )), + } +} diff --git a/packages/auths-node/src/witness.rs b/packages/auths-node/src/witness.rs new file mode 100644 index 00000000..b5c2d3cf --- /dev/null +++ b/packages/auths-node/src/witness.rs @@ -0,0 +1,138 @@ +use std::path::PathBuf; + +use auths_id::storage::identity::IdentityStorage; +use auths_id::witness_config::WitnessConfig; +use auths_storage::git::RegistryIdentityStorage; +use napi_derive::napi; + +use crate::error::format_error; + +fn resolve_repo(repo_path: &str) -> PathBuf { + PathBuf::from(shellexpand::tilde(repo_path).as_ref()) +} + +fn load_witness_config(repo_path: &PathBuf) -> napi::Result { + let storage = RegistryIdentityStorage::new(repo_path); + let identity = storage + .load_identity() + .map_err(|e| format_error("AUTHS_WITNESS_ERROR", e))?; + + if let Some(wc) = identity + .metadata + .as_ref() + .and_then(|m| m.get("witness_config")) + { + let config: WitnessConfig = serde_json::from_value(wc.clone()) + .map_err(|e| format_error("AUTHS_WITNESS_ERROR", e))?; + return Ok(config); + } + Ok(WitnessConfig::default()) +} + +fn save_witness_config(repo_path: &PathBuf, config: &WitnessConfig) -> napi::Result<()> { + let storage = RegistryIdentityStorage::new(repo_path); + let mut identity = storage + .load_identity() + .map_err(|e| format_error("AUTHS_WITNESS_ERROR", e))?; + + let metadata = identity + .metadata + .get_or_insert_with(|| serde_json::json!({})); + if let Some(obj) = metadata.as_object_mut() { + obj.insert( + "witness_config".to_string(), + serde_json::to_value(config).map_err(|e| format_error("AUTHS_WITNESS_ERROR", e))?, + ); + } + + storage + .create_identity(identity.controller_did.as_str(), identity.metadata) + .map_err(|e| format_error("AUTHS_WITNESS_ERROR", e))?; + Ok(()) +} + +#[napi(object)] +#[derive(Clone)] +pub struct NapiWitnessResult { + pub url: String, + pub did: Option, + pub label: Option, +} + +#[napi] +pub fn add_witness( + url_str: String, + repo_path: String, + label: Option, +) -> napi::Result { + let repo = resolve_repo(&repo_path); + let parsed_url: url::Url = url_str.parse().map_err(|e| { + format_error( + "AUTHS_WITNESS_ERROR", + format!("Invalid URL '{}': {}", url_str, e), + ) + })?; + + let mut config = load_witness_config(&repo)?; + + if config.witness_urls.contains(&parsed_url) { + return Ok(NapiWitnessResult { + url: url_str, + did: None, + label, + }); + } + + config.witness_urls.push(parsed_url); + if config.threshold == 0 { + config.threshold = 1; + } + + save_witness_config(&repo, &config)?; + Ok(NapiWitnessResult { + url: url_str, + did: None, + label, + }) +} + +#[napi] +pub fn remove_witness(url_str: String, repo_path: String) -> napi::Result<()> { + let repo = resolve_repo(&repo_path); + let parsed_url: url::Url = url_str.parse().map_err(|e| { + format_error( + "AUTHS_WITNESS_ERROR", + format!("Invalid URL '{}': {}", url_str, e), + ) + })?; + + let mut config = load_witness_config(&repo)?; + config.witness_urls.retain(|u| u != &parsed_url); + + if config.threshold > config.witness_urls.len() { + config.threshold = config.witness_urls.len(); + } + + save_witness_config(&repo, &config)?; + Ok(()) +} + +#[napi] +pub fn list_witnesses(repo_path: String) -> napi::Result { + let repo = resolve_repo(&repo_path); + let config = load_witness_config(&repo)?; + + let entries: Vec = config + .witness_urls + .iter() + .map(|u| { + serde_json::json!({ + "url": u.to_string(), + "did": null, + "label": null, + }) + }) + .collect(); + + serde_json::to_string(&entries).map_err(|e| format_error("AUTHS_WITNESS_ERROR", e)) +} diff --git a/packages/auths-node/tsconfig.json b/packages/auths-node/tsconfig.json new file mode 100644 index 00000000..6342b529 --- /dev/null +++ b/packages/auths-node/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "commonjs", + "moduleResolution": "node", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "outDir": "./dist", + "rootDir": "." + }, + "include": ["lib/**/*.ts"], + "exclude": ["node_modules", "__tests__", "dist"] +} diff --git a/packages/auths-node/vitest.config.ts b/packages/auths-node/vitest.config.ts new file mode 100644 index 00000000..c6ed9cf2 --- /dev/null +++ b/packages/auths-node/vitest.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from 'vitest/config' + +export default defineConfig({ + test: { + pool: 'forks', + poolOptions: { + forks: { + singleFork: true, + }, + }, + }, +}) diff --git a/packages/auths-python/python/auths/_client.py b/packages/auths-python/python/auths/_client.py index 3894a610..0175d07c 100644 --- a/packages/auths-python/python/auths/_client.py +++ b/packages/auths-python/python/auths/_client.py @@ -411,7 +411,7 @@ def get_public_key( pp = passphrase or self._passphrase try: - return get_identity_public_key(identity, pp) + return get_identity_public_key(identity, self.repo_path, pp) except (ValueError, RuntimeError) as exc: raise _map_error(exc, default_cls=CryptoError) from exc @@ -448,7 +448,7 @@ def sign_as_agent( pp = passphrase or self._passphrase try: - return _sign_as_agent(message, key_alias, pp) + return _sign_as_agent(message, key_alias, self.repo_path, pp) except (ValueError, RuntimeError) as exc: raise _map_error(exc, default_cls=CryptoError) from exc @@ -486,7 +486,7 @@ def sign_action_as_agent( pp = passphrase or self._passphrase try: - return _sign_action_as_agent(action_type, payload, key_alias, agent_did, pp) + return _sign_action_as_agent(action_type, payload, key_alias, agent_did, self.repo_path, pp) except (ValueError, RuntimeError) as exc: raise _map_error(exc, default_cls=CryptoError) from exc diff --git a/packages/auths-python/src/artifact_sign.rs b/packages/auths-python/src/artifact_sign.rs index af9dd95d..a1f71135 100644 --- a/packages/auths-python/src/artifact_sign.rs +++ b/packages/auths-python/src/artifact_sign.rs @@ -138,20 +138,23 @@ fn build_context_and_sign( let repo = PathBuf::from(shellexpand::tilde(repo_path).as_ref()); let config = RegistryConfig::single_tenant(&repo); - let backend = Arc::new( - GitRegistryBackend::open_existing(config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_REGISTRY_ERROR] Failed to open registry: {e}")))?, - ); + let backend = Arc::new(GitRegistryBackend::open_existing(config).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_REGISTRY_ERROR] Failed to open registry: {e}" + )) + })?); - let keychain = get_platform_keychain_with_config(&env_config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")))?; + let keychain = get_platform_keychain_with_config(&env_config).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")) + })?; let keychain = Arc::from(keychain); let identity_storage = Arc::new(RegistryIdentityStorage::new(&repo)); let attestation_storage = Arc::new(RegistryAttestationStorage::new(&repo)); - let alias = KeyAlias::new(identity_key_alias) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Invalid key alias: {e}")))?; + let alias = KeyAlias::new(identity_key_alias).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Invalid key alias: {e}")) + })?; let ctx = AuthsContext::builder() .registry(backend) @@ -176,8 +179,11 @@ fn build_context_and_sign( note, }; - let result = sdk_sign_artifact(params, &ctx) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_SIGNING_FAILED] Artifact signing failed: {e}")))?; + let result = sdk_sign_artifact(params, &ctx).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_SIGNING_FAILED] Artifact signing failed: {e}" + )) + })?; Ok(PyArtifactResult { attestation_json: result.attestation_json, diff --git a/packages/auths-python/src/attestation_query.rs b/packages/auths-python/src/attestation_query.rs index 2f614cc0..614452c5 100644 --- a/packages/auths-python/src/attestation_query.rs +++ b/packages/auths-python/src/attestation_query.rs @@ -76,8 +76,11 @@ fn attestation_to_py(att: &Attestation) -> PyAttestation { fn open_attestation_storage(repo_path: &str) -> PyResult> { let repo = PathBuf::from(shellexpand::tilde(repo_path).as_ref()); let config = RegistryConfig::single_tenant(&repo); - let _backend = GitRegistryBackend::open_existing(config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_REGISTRY_ERROR] Failed to open registry: {e}")))?; + let _backend = GitRegistryBackend::open_existing(config).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_REGISTRY_ERROR] Failed to open registry: {e}" + )) + })?; Ok(Arc::new(RegistryAttestationStorage::new(&repo))) } @@ -94,9 +97,11 @@ fn open_attestation_storage(repo_path: &str) -> PyResult, repo_path: &str) -> PyResult> { let storage = open_attestation_storage(repo_path)?; py.allow_threads(|| { - let all = storage - .load_all_attestations() - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_REGISTRY_ERROR] Failed to load attestations: {e}")))?; + let all = storage.load_all_attestations().map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_REGISTRY_ERROR] Failed to load attestations: {e}" + )) + })?; Ok(all.iter().map(attestation_to_py).collect()) }) } @@ -119,9 +124,11 @@ pub fn list_attestations_by_device( ) -> PyResult> { let storage = open_attestation_storage(repo_path)?; py.allow_threads(|| { - let all = storage - .load_all_attestations() - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_REGISTRY_ERROR] Failed to load attestations: {e}")))?; + let all = storage.load_all_attestations().map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_REGISTRY_ERROR] Failed to load attestations: {e}" + )) + })?; let group = AttestationGroup::from_list(all); Ok(group .get(device_did) @@ -148,9 +155,11 @@ pub fn get_latest_attestation( ) -> PyResult> { let storage = open_attestation_storage(repo_path)?; py.allow_threads(|| { - let all = storage - .load_all_attestations() - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_REGISTRY_ERROR] Failed to load attestations: {e}")))?; + let all = storage.load_all_attestations().map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_REGISTRY_ERROR] Failed to load attestations: {e}" + )) + })?; let group = AttestationGroup::from_list(all); let did = DeviceDID(device_did.to_string()); Ok(group.latest(&did).map(attestation_to_py)) diff --git a/packages/auths-python/src/audit.rs b/packages/auths-python/src/audit.rs index 34bb5bd9..ed97a1b9 100644 --- a/packages/auths-python/src/audit.rs +++ b/packages/auths-python/src/audit.rs @@ -57,18 +57,20 @@ pub fn generate_audit_report( } } if let Some(since_dt) = since_filter { - if let Ok(ct) = - chrono::NaiveDateTime::parse_from_str(&c.timestamp[..19], "%Y-%m-%dT%H:%M:%S") - { + if let Ok(ct) = chrono::NaiveDateTime::parse_from_str( + &c.timestamp[..19], + "%Y-%m-%dT%H:%M:%S", + ) { if ct < since_dt { return false; } } } if let Some(until_dt) = until_filter { - if let Ok(ct) = - chrono::NaiveDateTime::parse_from_str(&c.timestamp[..19], "%Y-%m-%dT%H:%M:%S") - { + if let Ok(ct) = chrono::NaiveDateTime::parse_from_str( + &c.timestamp[..19], + "%Y-%m-%dT%H:%M:%S", + ) { if ct > until_dt { return false; } @@ -82,9 +84,7 @@ pub fn generate_audit_report( (Some("auths"), Some(signer_did.as_str()), Some(true)) } SignatureStatus::SshSigned => (Some("ssh"), None, None), - SignatureStatus::GpgSigned { verified } => { - (Some("gpg"), None, Some(*verified)) - } + SignatureStatus::GpgSigned { verified } => (Some("gpg"), None, Some(*verified)), SignatureStatus::InvalidSignature { .. } => { (Some("invalid"), None, Some(false)) } @@ -122,10 +122,7 @@ pub fn generate_audit_report( .iter() .filter(|c| c["signature_type"] == "ssh") .count(); - let verification_passed = commits - .iter() - .filter(|c| c["verified"] == true) - .count(); + let verification_passed = commits.iter().filter(|c| c["verified"] == true).count(); let verification_failed = signed - verification_passed; serde_json::json!({ diff --git a/packages/auths-python/src/commit_sign.rs b/packages/auths-python/src/commit_sign.rs index f0e993b2..c5a03672 100644 --- a/packages/auths-python/src/commit_sign.rs +++ b/packages/auths-python/src/commit_sign.rs @@ -65,8 +65,9 @@ pub fn sign_commit( let env_config = make_keychain_config(&passphrase_str, repo_path); let provider = Arc::new(PrefilledPassphraseProvider::new(&passphrase_str)); - let keychain = get_platform_keychain_with_config(&env_config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")))?; + let keychain = get_platform_keychain_with_config(&env_config).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")) + })?; let keychain = Arc::from(keychain); let repo = PathBuf::from(shellexpand::tilde(repo_path).as_ref()); @@ -84,8 +85,9 @@ pub fn sign_commit( let now = chrono::Utc::now(); py.allow_threads(move || { - let pem = CommitSigningWorkflow::execute(&signing_ctx, params, now) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_SIGNING_FAILED] Commit signing failed: {e}")))?; + let pem = CommitSigningWorkflow::execute(&signing_ctx, params, now).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_SIGNING_FAILED] Commit signing failed: {e}")) + })?; Ok(PyCommitSignResult { signature_pem: pem, diff --git a/packages/auths-python/src/device_ext.rs b/packages/auths-python/src/device_ext.rs index 07cb05e3..3e9a6386 100644 --- a/packages/auths-python/src/device_ext.rs +++ b/packages/auths-python/src/device_ext.rs @@ -71,20 +71,23 @@ pub fn extend_device_authorization_ffi( let repo = PathBuf::from(shellexpand::tilde(repo_path).as_ref()); let config = RegistryConfig::single_tenant(&repo); - let backend = Arc::new( - GitRegistryBackend::open_existing(config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_REGISTRY_ERROR] Failed to open registry: {e}")))?, - ); + let backend = Arc::new(GitRegistryBackend::open_existing(config).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_REGISTRY_ERROR] Failed to open registry: {e}" + )) + })?); - let keychain = get_platform_keychain_with_config(&env_config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")))?; + let keychain = get_platform_keychain_with_config(&env_config).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")) + })?; let keychain = Arc::from(keychain); let identity_storage = Arc::new(RegistryIdentityStorage::new(&repo)); let attestation_storage = Arc::new(RegistryAttestationStorage::new(&repo)); - let alias = KeyAlias::new(identity_key_alias) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Invalid key alias: {e}")))?; + let alias = KeyAlias::new(identity_key_alias).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Invalid key alias: {e}")) + })?; let ext_config = DeviceExtensionConfig { repo_path: repo, @@ -105,8 +108,9 @@ pub fn extend_device_authorization_ffi( .build(); py.allow_threads(|| { - let result = extend_device(ext_config, &ctx, clock.as_ref()) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_DEVICE_ERROR] Device extension failed: {e}")))?; + let result = extend_device(ext_config, &ctx, clock.as_ref()).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_DEVICE_ERROR] Device extension failed: {e}")) + })?; Ok(PyDeviceExtension { device_did: result.device_did.to_string(), diff --git a/packages/auths-python/src/diagnostics.rs b/packages/auths-python/src/diagnostics.rs index 6d272e58..e73caa06 100644 --- a/packages/auths-python/src/diagnostics.rs +++ b/packages/auths-python/src/diagnostics.rs @@ -47,9 +47,7 @@ impl CryptoDiagnosticProvider for FfiDiagnosticAdapter { fn check_ssh_keygen_available(&self) -> Result { let output = Command::new("ssh-keygen").arg("-V").output(); let (passed, message) = match output { - Ok(out) if out.status.success() => { - (true, Some("ssh-keygen found on PATH".to_string())) - } + Ok(out) if out.status.success() => (true, Some("ssh-keygen found on PATH".to_string())), _ => ( false, Some("ssh-keygen command not found on PATH".to_string()), @@ -66,10 +64,7 @@ impl CryptoDiagnosticProvider for FfiDiagnosticAdapter { #[pyfunction] #[pyo3(signature = (repo_path,))] -pub fn run_diagnostics( - py: Python<'_>, - repo_path: &str, -) -> PyResult { +pub fn run_diagnostics(py: Python<'_>, repo_path: &str) -> PyResult { let _repo = repo_path.to_string(); py.allow_threads(move || { diff --git a/packages/auths-python/src/identity.rs b/packages/auths-python/src/identity.rs index be961527..2536f20e 100644 --- a/packages/auths-python/src/identity.rs +++ b/packages/auths-python/src/identity.rs @@ -55,18 +55,18 @@ pub(crate) fn resolve_key_alias( let did = IdentityDID::new_unchecked(identity_ref.to_string()); let aliases = keychain .list_aliases_for_identity_with_role(&did, KeyRole::Primary) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Key lookup failed: {e}")))?; - aliases - .into_iter() - .next() - .ok_or_else(|| { - PyRuntimeError::new_err(format!( - "[AUTHS_KEY_NOT_FOUND] No primary key found for identity '{identity_ref}'" - )) - }) + .map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Key lookup failed: {e}")) + })?; + aliases.into_iter().next().ok_or_else(|| { + PyRuntimeError::new_err(format!( + "[AUTHS_KEY_NOT_FOUND] No primary key found for identity '{identity_ref}'" + )) + }) } else { - KeyAlias::new(identity_ref) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Invalid key alias: {e}"))) + KeyAlias::new(identity_ref).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Invalid key alias: {e}")) + }) } } @@ -134,25 +134,33 @@ pub fn create_identity( ) -> PyResult<(String, String, String)> { let passphrase_str = resolve_passphrase(passphrase); let env_config = make_keychain_config(&passphrase_str, repo_path); - let alias = KeyAlias::new(key_alias) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Invalid key alias: {e}")))?; + let alias = KeyAlias::new(key_alias).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Invalid key alias: {e}")) + })?; let provider = PrefilledPassphraseProvider::new(&passphrase_str); let repo = PathBuf::from(shellexpand::tilde(repo_path).as_ref()); let config = RegistryConfig::single_tenant(&repo); let backend = GitRegistryBackend::from_config_unchecked(config); - backend - .init_if_needed() - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_REGISTRY_ERROR] Failed to initialize registry: {e}")))?; + backend.init_if_needed().map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_REGISTRY_ERROR] Failed to initialize registry: {e}" + )) + })?; let backend = Arc::new(backend); - let keychain = get_platform_keychain_with_config(&env_config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")))?; + let keychain = get_platform_keychain_with_config(&env_config).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")) + })?; py.allow_threads(|| { let (identity_did, result_alias) = initialize_registry_identity(backend, &alias, &provider, keychain.as_ref(), None) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_IDENTITY_ERROR] Identity creation failed: {e}")))?; + .map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_IDENTITY_ERROR] Identity creation failed: {e}" + )) + })?; // Extract public key so callers can verify signatures immediately let pub_bytes = auths_core::storage::keychain::extract_public_key_bytes( @@ -160,9 +168,17 @@ pub fn create_identity( &result_alias, &provider, ) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] Public key extraction failed: {e}")))?; + .map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_CRYPTO_ERROR] Public key extraction failed: {e}" + )) + })?; - Ok((identity_did.to_string(), result_alias.to_string(), hex::encode(pub_bytes))) + Ok(( + identity_did.to_string(), + result_alias.to_string(), + hex::encode(pub_bytes), + )) }) } @@ -195,31 +211,44 @@ pub fn create_agent_identity( let repo = PathBuf::from(shellexpand::tilde(repo_path).as_ref()); let config = RegistryConfig::single_tenant(&repo); let backend = GitRegistryBackend::from_config_unchecked(config); - backend - .init_if_needed() - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_REGISTRY_ERROR] Failed to initialize registry: {e}")))?; + backend.init_if_needed().map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_REGISTRY_ERROR] Failed to initialize registry: {e}" + )) + })?; let backend = Arc::new(backend); - let keychain = get_platform_keychain_with_config(&env_config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")))?; + let keychain = get_platform_keychain_with_config(&env_config).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")) + })?; // Validate capabilities let _parsed_caps: Vec = capabilities .iter() .map(|c| { - Capability::parse(c) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_INVALID_INPUT] Invalid capability '{c}': {e}"))) + Capability::parse(c).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_INVALID_INPUT] Invalid capability '{c}': {e}" + )) + }) }) .collect::>>()?; let parsed_caps_for_att = _parsed_caps; py.allow_threads(|| { - let (identity_did, result_alias) = - initialize_registry_identity(backend.clone(), &alias, &provider, keychain.as_ref(), None) - .map_err(|e| { - PyRuntimeError::new_err(format!("[AUTHS_IDENTITY_ERROR] Agent identity creation failed: {e}")) - })?; + let (identity_did, result_alias) = initialize_registry_identity( + backend.clone(), + &alias, + &provider, + keychain.as_ref(), + None, + ) + .map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_IDENTITY_ERROR] Agent identity creation failed: {e}" + )) + })?; // Extract public key let pub_bytes = auths_core::storage::keychain::extract_public_key_bytes( @@ -227,15 +256,18 @@ pub fn create_agent_identity( &result_alias, &provider, ) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] Public key extraction failed: {e}")))?; + .map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_CRYPTO_ERROR] Public key extraction failed: {e}" + )) + })?; // Build a self-attestation for the standalone agent let attestation_json = { - let device_did = DeviceDID::from_ed25519( - pub_bytes.as_slice().try_into().map_err(|_| { + let device_did = + DeviceDID::from_ed25519(pub_bytes.as_slice().try_into().map_err(|_| { PyRuntimeError::new_err("[AUTHS_CRYPTO_ERROR] Invalid public key length") - })?, - ); + })?); let att = serde_json::json!({ "version": 1, "rid": repo.file_name().unwrap_or_default().to_string_lossy(), @@ -246,8 +278,11 @@ pub fn create_agent_identity( "timestamp": chrono::Utc::now().to_rfc3339(), "note": format!("Agent: {}", alias), }); - serde_json::to_string(&att) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_SERIALIZATION_ERROR] Serialization failed: {e}")))? + serde_json::to_string(&att).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_SERIALIZATION_ERROR] Serialization failed: {e}" + )) + })? }; Ok(AgentIdentityBundle { @@ -296,58 +331,77 @@ pub fn delegate_agent( let repo = PathBuf::from(shellexpand::tilde(parent_repo_path).as_ref()); let config = RegistryConfig::single_tenant(&repo); - let backend = Arc::new( - GitRegistryBackend::open_existing(config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_REGISTRY_ERROR] Failed to open registry: {e}")))?, - ); + let backend = Arc::new(GitRegistryBackend::open_existing(config).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_REGISTRY_ERROR] Failed to open registry: {e}" + )) + })?); - let keychain = get_platform_keychain_with_config(&env_config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")))?; + let keychain = get_platform_keychain_with_config(&env_config).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")) + })?; // Resolve parent identity key alias let parent_alias = if let Some(ref did) = identity_did { resolve_key_alias(did, keychain.as_ref())? } else { - let aliases = keychain - .list_aliases() - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")))?; + let aliases = keychain.list_aliases().map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")) + })?; aliases .into_iter() .find(|a| !a.as_str().contains("--next-")) - .ok_or_else(|| PyRuntimeError::new_err("[AUTHS_KEY_NOT_FOUND] No identity key found in keychain"))? + .ok_or_else(|| { + PyRuntimeError::new_err("[AUTHS_KEY_NOT_FOUND] No identity key found in keychain") + })? }; // Generate a new Ed25519 keypair for the agent let agent_alias = KeyAlias::new_unchecked(format!("{}-agent", agent_name)); let rng = SystemRandom::new(); - let pkcs8 = Ed25519KeyPair::generate_pkcs8(&rng) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] Key generation failed: {e}")))?; - let keypair = Ed25519KeyPair::from_pkcs8(pkcs8.as_ref()) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] Key parsing failed: {e}")))?; + let pkcs8 = Ed25519KeyPair::generate_pkcs8(&rng).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] Key generation failed: {e}")) + })?; + let keypair = Ed25519KeyPair::from_pkcs8(pkcs8.as_ref()).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] Key parsing failed: {e}")) + })?; let agent_pubkey = keypair.public_key().as_ref().to_vec(); // Get parent identity DID for key storage association - let (parent_did, _, _) = keychain - .load_key(&parent_alias) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Key load failed: {e}")))?; + let (parent_did, _, _) = keychain.load_key(&parent_alias).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Key load failed: {e}")) + })?; // Encrypt and store the agent key - let seed = extract_seed_bytes(pkcs8.as_ref()) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] Seed extraction failed: {e}")))?; - let seed_pkcs8 = encode_seed_as_pkcs8(seed) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] PKCS8 encoding failed: {e}")))?; - let encrypted = encrypt_keypair(&seed_pkcs8, &passphrase_str) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] Key encryption failed: {e}")))?; + let seed = extract_seed_bytes(pkcs8.as_ref()).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] Seed extraction failed: {e}")) + })?; + let seed_pkcs8 = encode_seed_as_pkcs8(seed).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] PKCS8 encoding failed: {e}")) + })?; + let encrypted = encrypt_keypair(&seed_pkcs8, &passphrase_str).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] Key encryption failed: {e}")) + })?; keychain - .store_key(&agent_alias, &parent_did, KeyRole::DelegatedAgent, &encrypted) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Key storage failed: {e}")))?; + .store_key( + &agent_alias, + &parent_did, + KeyRole::DelegatedAgent, + &encrypted, + ) + .map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Key storage failed: {e}")) + })?; // Parse capabilities let parsed_caps: Vec = capabilities .iter() .map(|c| { - Capability::parse(c) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_INVALID_INPUT] Invalid capability '{c}': {e}"))) + Capability::parse(c).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_INVALID_INPUT] Invalid capability '{c}': {e}" + )) + }) }) .collect::>>()?; @@ -376,20 +430,32 @@ pub fn delegate_agent( .build(); py.allow_threads(|| { - let result = link_device(link_config, &ctx, clock.as_ref()) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_IDENTITY_ERROR] Agent provisioning failed: {e}")))?; + let result = link_device(link_config, &ctx, clock.as_ref()).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_IDENTITY_ERROR] Agent provisioning failed: {e}" + )) + })?; let device_did = DeviceDID(result.device_did.to_string()); let attestations = attestation_storage .load_attestations_for_device(&device_did) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_REGISTRY_ERROR] Failed to load attestation: {e}")))?; + .map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_REGISTRY_ERROR] Failed to load attestation: {e}" + )) + })?; - let attestation = attestations - .last() - .ok_or_else(|| PyRuntimeError::new_err("[AUTHS_REGISTRY_ERROR] No attestation found after provisioning"))?; + let attestation = attestations.last().ok_or_else(|| { + PyRuntimeError::new_err( + "[AUTHS_REGISTRY_ERROR] No attestation found after provisioning", + ) + })?; - let attestation_json = serde_json::to_string(attestation) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_SERIALIZATION_ERROR] Serialization failed: {e}")))?; + let attestation_json = serde_json::to_string(attestation).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_SERIALIZATION_ERROR] Serialization failed: {e}" + )) + })?; Ok(DelegatedAgentBundle { agent_did: result.device_did.to_string(), @@ -431,13 +497,15 @@ pub fn link_device_to_identity( let repo = PathBuf::from(shellexpand::tilde(repo_path).as_ref()); let config = RegistryConfig::single_tenant(&repo); - let backend = Arc::new( - GitRegistryBackend::open_existing(config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_REGISTRY_ERROR] Failed to open registry: {e}")))?, - ); + let backend = Arc::new(GitRegistryBackend::open_existing(config).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_REGISTRY_ERROR] Failed to open registry: {e}" + )) + })?); - let keychain = get_platform_keychain_with_config(&env_config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")))?; + let keychain = get_platform_keychain_with_config(&env_config).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")) + })?; let alias = resolve_key_alias(identity_key_alias, keychain.as_ref())?; @@ -448,8 +516,11 @@ pub fn link_device_to_identity( let parsed_caps: Vec = capabilities .iter() .map(|c| { - Capability::parse(c) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_INVALID_INPUT] Invalid capability '{c}': {e}"))) + Capability::parse(c).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_INVALID_INPUT] Invalid capability '{c}': {e}" + )) + }) }) .collect::>>()?; @@ -474,8 +545,9 @@ pub fn link_device_to_identity( .build(); py.allow_threads(|| { - let result = link_device(link_config, &ctx, clock.as_ref()) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_DEVICE_ERROR] Device linking failed: {e}")))?; + let result = link_device(link_config, &ctx, clock.as_ref()).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_DEVICE_ERROR] Device linking failed: {e}")) + })?; Ok(( result.device_did.to_string(), result.attestation_id.to_string(), @@ -513,13 +585,15 @@ pub fn revoke_device_from_identity( let repo = PathBuf::from(shellexpand::tilde(repo_path).as_ref()); let config = RegistryConfig::single_tenant(&repo); - let backend = Arc::new( - GitRegistryBackend::open_existing(config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_REGISTRY_ERROR] Failed to open registry: {e}")))?, - ); + let backend = Arc::new(GitRegistryBackend::open_existing(config).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_REGISTRY_ERROR] Failed to open registry: {e}" + )) + })?); - let keychain = get_platform_keychain_with_config(&env_config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")))?; + let keychain = get_platform_keychain_with_config(&env_config).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")) + })?; let alias = resolve_key_alias(identity_key_alias, keychain.as_ref())?; @@ -538,8 +612,11 @@ pub fn revoke_device_from_identity( .build(); py.allow_threads(|| { - revoke_device(device_did, &alias, &ctx, note, clock.as_ref()) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_DEVICE_ERROR] Device revocation failed: {e}")))?; + revoke_device(device_did, &alias, &ctx, note, clock.as_ref()).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_DEVICE_ERROR] Device revocation failed: {e}" + )) + })?; Ok(()) }) } diff --git a/packages/auths-python/src/identity_sign.rs b/packages/auths-python/src/identity_sign.rs index a3f14f0f..ed907682 100644 --- a/packages/auths-python/src/identity_sign.rs +++ b/packages/auths-python/src/identity_sign.rs @@ -1,12 +1,13 @@ use auths_core::config::{EnvironmentConfig, KeychainConfig}; use auths_core::signing::{PrefilledPassphraseProvider, SecureSigner, StorageSigner}; -use auths_core::storage::keychain::{KeyAlias, get_platform_keychain_with_config}; +use auths_core::storage::keychain::{KeyAlias, KeyRole, get_platform_keychain_with_config}; use auths_verifier::core::MAX_ATTESTATION_JSON_SIZE; use auths_verifier::types::IdentityDID; use pyo3::exceptions::PyRuntimeError; use pyo3::prelude::*; fn make_signer( + repo_path: Option<&str>, passphrase: Option, ) -> PyResult<( StorageSigner>, @@ -15,18 +16,20 @@ fn make_signer( #[allow(clippy::disallowed_methods)] // Presentation boundary: env var read is intentional let passphrase_str = passphrase.unwrap_or_else(|| std::env::var("AUTHS_PASSPHRASE").unwrap_or_default()); + let mut keychain_config = KeychainConfig::from_env(); + if keychain_config.backend.is_none() { + keychain_config.backend = Some("file".to_string()); + } + keychain_config.passphrase = Some(passphrase_str.clone()); let env_config = EnvironmentConfig { - auths_home: None, - keychain: KeychainConfig { - backend: Some("file".to_string()), - file_path: None, - passphrase: Some(passphrase_str.clone()), - }, + auths_home: repo_path.map(Into::into), + keychain: keychain_config, ssh_agent_socket: None, }; - let keychain = get_platform_keychain_with_config(&env_config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")))?; + let keychain = get_platform_keychain_with_config(&env_config).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")) + })?; let signer = StorageSigner::new(keychain); let provider = PrefilledPassphraseProvider::new(&passphrase_str); @@ -54,15 +57,16 @@ pub fn sign_as_identity( repo_path: &str, passphrase: Option, ) -> PyResult { - let _ = repo_path; - let (signer, provider) = make_signer(passphrase)?; + let (signer, provider) = make_signer(Some(repo_path), passphrase)?; let did = IdentityDID::new(identity_did); let msg = message.to_vec(); py.allow_threads(move || { let sig_bytes = signer .sign_for_identity(&did, &provider, &msg) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_SIGNING_FAILED] Signing failed: {e}")))?; + .map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_SIGNING_FAILED] Signing failed: {e}")) + })?; Ok(hex::encode(sig_bytes)) }) } @@ -90,8 +94,6 @@ pub fn sign_action_as_identity( repo_path: &str, passphrase: Option, ) -> PyResult { - let _ = repo_path; - if payload_json.len() > MAX_ATTESTATION_JSON_SIZE { return Err(pyo3::exceptions::PyValueError::new_err(format!( "Payload JSON too large: {} bytes, max {MAX_ATTESTATION_JSON_SIZE}", @@ -114,10 +116,13 @@ pub fn sign_action_as_identity( "timestamp": ×tamp, }); - let canonical = json_canon::to_string(&signing_data) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_SERIALIZATION_ERROR] Canonicalization failed: {e}")))?; + let canonical = json_canon::to_string(&signing_data).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_SERIALIZATION_ERROR] Canonicalization failed: {e}" + )) + })?; - let (signer, provider) = make_signer(passphrase)?; + let (signer, provider) = make_signer(Some(repo_path), passphrase)?; let did = IdentityDID::new(identity_did); let action_type_owned = action_type.to_string(); @@ -126,7 +131,9 @@ pub fn sign_action_as_identity( let sig_hex = py.allow_threads(move || { let sig_bytes = signer .sign_for_identity(&did, &provider, canonical.as_bytes()) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_SIGNING_FAILED] Signing failed: {e}")))?; + .map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_SIGNING_FAILED] Signing failed: {e}")) + })?; Ok::(hex::encode(sig_bytes)) })?; @@ -139,41 +146,57 @@ pub fn sign_action_as_identity( "signature": sig_hex, }); - serde_json::to_string(&envelope) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_SERIALIZATION_ERROR] Failed to serialize envelope: {e}"))) + serde_json::to_string(&envelope).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_SERIALIZATION_ERROR] Failed to serialize envelope: {e}" + )) + }) } /// Retrieve the Ed25519 public key (hex) for an identity DID. /// /// Args: /// * `identity_did`: The identity DID (did:keri:...). +/// * `repo_path`: Path to the auths repository. /// * `passphrase`: Optional passphrase for keychain access. /// /// Usage: /// ```ignore -/// let pub_hex = get_identity_public_key(py, "did:keri:E...", None)?; +/// let pub_hex = get_identity_public_key(py, "did:keri:E...", "~/.auths", None)?; /// ``` #[pyfunction] -#[pyo3(signature = (identity_did, passphrase=None))] +#[pyo3(signature = (identity_did, repo_path, passphrase=None))] pub fn get_identity_public_key( py: Python<'_>, identity_did: &str, + repo_path: &str, passphrase: Option, ) -> PyResult { - let (signer, provider) = make_signer(passphrase)?; + let (signer, provider) = make_signer(Some(repo_path), passphrase)?; let did = IdentityDID::new(identity_did); py.allow_threads(move || { - let aliases = signer.inner().list_aliases_for_identity(&did) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Key lookup failed: {e}")))?; - let alias = aliases.first() - .ok_or_else(|| PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] No key found for identity '{identity_did}'")))?; + let aliases = signer + .inner() + .list_aliases_for_identity_with_role(&did, KeyRole::Primary) + .map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Key lookup failed: {e}")) + })?; + let alias = aliases.first().ok_or_else(|| { + PyRuntimeError::new_err(format!( + "[AUTHS_KEY_NOT_FOUND] No primary key found for identity '{identity_did}'" + )) + })?; let pub_bytes = auths_core::storage::keychain::extract_public_key_bytes( signer.inner().as_ref(), alias, &provider, ) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] Public key extraction failed: {e}")))?; + .map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_CRYPTO_ERROR] Public key extraction failed: {e}" + )) + })?; Ok(hex::encode(pub_bytes)) }) } @@ -186,29 +209,34 @@ pub fn get_identity_public_key( /// Args: /// * `message`: The bytes to sign. /// * `key_alias`: The agent's key alias (e.g., "deploy-agent"). +/// * `repo_path`: Path to the auths repository. /// * `passphrase`: Optional passphrase for keychain access. /// /// Usage: /// ```ignore -/// let sig = sign_as_agent(py, b"hello", "deploy-bot-agent", None)?; +/// let sig = sign_as_agent(py, b"hello", "deploy-bot-agent", "~/.auths", None)?; /// ``` #[pyfunction] -#[pyo3(signature = (message, key_alias, passphrase=None))] +#[pyo3(signature = (message, key_alias, repo_path, passphrase=None))] pub fn sign_as_agent( py: Python<'_>, message: &[u8], key_alias: &str, + repo_path: &str, passphrase: Option, ) -> PyResult { - let (signer, provider) = make_signer(passphrase)?; - let alias = KeyAlias::new(key_alias) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Invalid key alias: {e}")))?; + let (signer, provider) = make_signer(Some(repo_path), passphrase)?; + let alias = KeyAlias::new(key_alias).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Invalid key alias: {e}")) + })?; let msg = message.to_vec(); py.allow_threads(move || { let sig_bytes = signer .sign_with_alias(&alias, &provider, &msg) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_SIGNING_FAILED] Signing failed: {e}")))?; + .map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_SIGNING_FAILED] Signing failed: {e}")) + })?; Ok(hex::encode(sig_bytes)) }) } @@ -220,20 +248,22 @@ pub fn sign_as_agent( /// * `payload_json`: JSON string for the payload field. /// * `key_alias`: The agent's key alias. /// * `agent_did`: The agent's DID (included in the envelope). +/// * `repo_path`: Path to the auths repository. /// * `passphrase`: Optional passphrase for keychain access. /// /// Usage: /// ```ignore -/// let envelope = sign_action_as_agent(py, "deploy", "{}", "deploy-bot-agent", "did:key:z6Mk...", None)?; +/// let envelope = sign_action_as_agent(py, "deploy", "{}", "deploy-bot-agent", "did:key:z6Mk...", "~/.auths", None)?; /// ``` #[pyfunction] -#[pyo3(signature = (action_type, payload_json, key_alias, agent_did, passphrase=None))] +#[pyo3(signature = (action_type, payload_json, key_alias, agent_did, repo_path, passphrase=None))] pub fn sign_action_as_agent( py: Python<'_>, action_type: &str, payload_json: &str, key_alias: &str, agent_did: &str, + repo_path: &str, passphrase: Option, ) -> PyResult { if payload_json.len() > MAX_ATTESTATION_JSON_SIZE { @@ -258,12 +288,16 @@ pub fn sign_action_as_agent( "timestamp": ×tamp, }); - let canonical = json_canon::to_string(&signing_data) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_SERIALIZATION_ERROR] Canonicalization failed: {e}")))?; + let canonical = json_canon::to_string(&signing_data).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_SERIALIZATION_ERROR] Canonicalization failed: {e}" + )) + })?; - let (signer, provider) = make_signer(passphrase)?; - let alias = KeyAlias::new(key_alias) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Invalid key alias: {e}")))?; + let (signer, provider) = make_signer(Some(repo_path), passphrase)?; + let alias = KeyAlias::new(key_alias).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Invalid key alias: {e}")) + })?; let action_type_owned = action_type.to_string(); let agent_did_owned = agent_did.to_string(); @@ -271,7 +305,9 @@ pub fn sign_action_as_agent( let sig_hex = py.allow_threads(move || { let sig_bytes = signer .sign_with_alias(&alias, &provider, canonical.as_bytes()) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_SIGNING_FAILED] Signing failed: {e}")))?; + .map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_SIGNING_FAILED] Signing failed: {e}")) + })?; Ok::(hex::encode(sig_bytes)) })?; @@ -284,6 +320,9 @@ pub fn sign_action_as_agent( "signature": sig_hex, }); - serde_json::to_string(&envelope) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_SERIALIZATION_ERROR] Failed to serialize envelope: {e}"))) + serde_json::to_string(&envelope).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_SERIALIZATION_ERROR] Failed to serialize envelope: {e}" + )) + }) } diff --git a/packages/auths-python/src/lib.rs b/packages/auths-python/src/lib.rs index 85be9e2c..729ec2b4 100644 --- a/packages/auths-python/src/lib.rs +++ b/packages/auths-python/src/lib.rs @@ -4,14 +4,14 @@ use pyo3::prelude::*; -pub mod audit; pub mod artifact_publish; pub mod artifact_sign; pub mod attestation_query; +pub mod audit; pub mod commit_sign; -pub mod diagnostics; pub mod commit_verify; pub mod device_ext; +pub mod diagnostics; pub mod git_integration; pub mod identity; pub mod identity_sign; diff --git a/packages/auths-python/src/org.rs b/packages/auths-python/src/org.rs index 45e06a56..b308462d 100644 --- a/packages/auths-python/src/org.rs +++ b/packages/auths-python/src/org.rs @@ -96,8 +96,7 @@ pub fn create_org( let key_alias = KeyAlias::new_unchecked(key_alias_str); let keychain = get_keychain(&passphrase_str, &repo_path_str)?; - let provider = - auths_core::signing::PrefilledPassphraseProvider::new(&passphrase_str); + let provider = auths_core::signing::PrefilledPassphraseProvider::new(&passphrase_str); let (controller_did, alias) = initialize_registry_identity(backend.clone(), &key_alias, &provider, &*keychain, None) @@ -155,12 +154,7 @@ pub fn create_org( .store_org_member(&org_prefix, &attestation) .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_ORG_ERROR] {e}")))?; - Ok(( - org_prefix, - controller_did.to_string(), - label, - repo_path_str, - )) + Ok((org_prefix, controller_did.to_string(), label, repo_path_str)) }) } @@ -219,7 +213,9 @@ pub fn add_org_member( let member_pk = if let Some(pk_hex) = member_public_key_hex { let pk_bytes = hex::decode(&pk_hex).map_err(|e| { - PyRuntimeError::new_err(format!("[AUTHS_ORG_ERROR] Invalid member public key hex: {e}")) + PyRuntimeError::new_err(format!( + "[AUTHS_ORG_ERROR] Invalid member public key hex: {e}" + )) })?; Ed25519PublicKey::try_from_slice(&pk_bytes) .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_ORG_ERROR] {e}")))? @@ -234,8 +230,7 @@ pub fn add_org_member( let signer = StorageSigner::new(keychain); let uuid_provider = SystemUuidProvider; - let provider = - auths_core::signing::PrefilledPassphraseProvider::new(&passphrase_str); + let provider = auths_core::signing::PrefilledPassphraseProvider::new(&passphrase_str); let org_ctx = OrgContext { registry: &*backend, @@ -311,7 +306,9 @@ pub fn revoke_org_member( let member_pk = if let Some(pk_hex) = member_public_key_hex { let pk_bytes = hex::decode(&pk_hex).map_err(|e| { - PyRuntimeError::new_err(format!("[AUTHS_ORG_ERROR] Invalid member public key hex: {e}")) + PyRuntimeError::new_err(format!( + "[AUTHS_ORG_ERROR] Invalid member public key hex: {e}" + )) })?; Ed25519PublicKey::try_from_slice(&pk_bytes) .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_ORG_ERROR] {e}")))? @@ -326,8 +323,7 @@ pub fn revoke_org_member( let signer = StorageSigner::new(keychain); let uuid_provider = SystemUuidProvider; - let provider = - auths_core::signing::PrefilledPassphraseProvider::new(&passphrase_str); + let provider = auths_core::signing::PrefilledPassphraseProvider::new(&passphrase_str); let org_ctx = OrgContext { registry: &*backend, @@ -385,9 +381,8 @@ pub fn list_org_members( let org_prefix = extract_org_prefix(org_did); py.allow_threads(move || { - let backend = GitRegistryBackend::from_config_unchecked( - RegistryConfig::single_tenant(&repo), - ); + let backend = + GitRegistryBackend::from_config_unchecked(RegistryConfig::single_tenant(&repo)); let filter = MemberFilter::default(); @@ -410,11 +405,7 @@ pub fn list_org_members( .iter() .map(|c| c.as_str().to_string()) .collect(); - let role_str = m - .role - .as_ref() - .map(|r| r.as_str()) - .unwrap_or("member"); + let role_str = m.role.as_ref().map(|r| r.as_str()).unwrap_or("member"); Some(serde_json::json!({ "member_did": m.did.to_string(), diff --git a/packages/auths-python/src/pairing.rs b/packages/auths-python/src/pairing.rs index 0a700780..b4a3094e 100644 --- a/packages/auths-python/src/pairing.rs +++ b/packages/auths-python/src/pairing.rs @@ -5,7 +5,7 @@ use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; -use auths_core::storage::keychain::{IdentityDID, KeyAlias}; +use auths_core::storage::keychain::{IdentityDID, KeyAlias, KeyRole}; use auths_id::storage::identity::IdentityStorage; use auths_pairing_daemon::{ MockNetworkDiscovery, MockNetworkInterfaces, PairingDaemonBuilder, PairingDaemonHandle, @@ -148,8 +148,7 @@ pub fn create_pairing_session_ffi( let session_id = session_req.create_request.session_id.clone(); let short_code = session_req.create_request.short_code.clone(); - let mut builder = PairingDaemonBuilder::new() - .with_rate_limiter(RateLimiter::new(100)); + let mut builder = PairingDaemonBuilder::new().with_rate_limiter(RateLimiter::new(100)); let mock_addr = SocketAddr::new(bind_addr, 0); builder = builder.with_network(MockNetworkInterfaces(bind_addr)); @@ -236,14 +235,11 @@ pub fn join_pairing_session_ffi( let keychain = get_keychain(&passphrase_str, &repo_path_str)?; let aliases = keychain - .list_aliases_for_identity(&controller_identity_did) + .list_aliases_for_identity_with_role(&controller_identity_did, KeyRole::Primary) .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_PAIRING_ERROR] {e}")))?; - let key_alias = aliases - .into_iter() - .find(|a| !a.contains("--next-")) - .ok_or_else(|| { - PyRuntimeError::new_err("[AUTHS_PAIRING_ERROR] No signing key found") - })?; + let key_alias = aliases.into_iter().next().ok_or_else(|| { + PyRuntimeError::new_err("[AUTHS_PAIRING_ERROR] No primary signing key found") + })?; let (_did, _role, encrypted_key) = keychain .load_key(&key_alias) @@ -274,20 +270,17 @@ pub fn join_pairing_session_ffi( let rt = runtime(); let lookup_url = format!("{}/v1/pairing/sessions/by-code/{}", endpoint, short_code); - let session_data: serde_json::Value = rt - .block_on(async { - let client = reqwest::Client::new(); - let resp = client - .get(&lookup_url) - .send() - .await - .map_err(|e| { - PyRuntimeError::new_err(format!("[AUTHS_PAIRING_ERROR] {e}")) - })?; - resp.json::().await.map_err(|e| { - PyRuntimeError::new_err(format!("[AUTHS_PAIRING_ERROR] {e}")) - }) - })?; + let session_data: serde_json::Value = rt.block_on(async { + let client = reqwest::Client::new(); + let resp = client + .get(&lookup_url) + .send() + .await + .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_PAIRING_ERROR] {e}")))?; + resp.json::() + .await + .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_PAIRING_ERROR] {e}"))) + })?; let session_id = session_data["session_id"] .as_str() @@ -326,16 +319,15 @@ pub fn join_pairing_session_ffi( }; let secure_seed = auths_crypto::SecureSeed::new(*seed.as_bytes()); - let (pairing_response, _shared_secret) = - auths_core::pairing::PairingResponse::create( - now, - &pairing_token, - &secure_seed, - &pubkey_32, - device_did.to_string(), - device_name.clone(), - ) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_PAIRING_ERROR] {e}")))?; + let (pairing_response, _shared_secret) = auths_core::pairing::PairingResponse::create( + now, + &pairing_token, + &secure_seed, + &pubkey_32, + device_did.to_string(), + device_name.clone(), + ) + .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_PAIRING_ERROR] {e}")))?; let submit_req = auths_core::pairing::types::SubmitResponseRequest { device_x25519_pubkey: auths_core::pairing::types::Base64UrlEncoded::from_raw( @@ -361,9 +353,7 @@ pub fn join_pairing_session_ffi( .json(&submit_req) .send() .await - .map_err(|e| { - PyRuntimeError::new_err(format!("[AUTHS_PAIRING_ERROR] {e}")) - })?; + .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_PAIRING_ERROR] {e}")))?; if !resp.status().is_success() { let status = resp.status(); let body = resp.text().await.unwrap_or_default(); @@ -416,21 +406,18 @@ pub fn complete_pairing_ffi( let keychain = get_keychain(&passphrase_str, &repo_path_str)?; let aliases = keychain - .list_aliases_for_identity(&controller_identity_did) + .list_aliases_for_identity_with_role(&controller_identity_did, KeyRole::Primary) .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_PAIRING_ERROR] {e}")))?; - let identity_key_alias_str = aliases - .into_iter() - .find(|a| !a.contains("--next-")) - .ok_or_else(|| { - PyRuntimeError::new_err("[AUTHS_PAIRING_ERROR] No signing key found") - })?; + let identity_key_alias_str = aliases.into_iter().next().ok_or_else(|| { + PyRuntimeError::new_err("[AUTHS_PAIRING_ERROR] No primary signing key found") + })?; let identity_key_alias = KeyAlias::new_unchecked(identity_key_alias_str); let key_storage: Arc = Arc::from(keychain); - let provider = Arc::new( - auths_core::signing::PrefilledPassphraseProvider::new(&passphrase_str), - ); + let provider = Arc::new(auths_core::signing::PrefilledPassphraseProvider::new( + &passphrase_str, + )); let now = Utc::now(); let params = PairingAttestationParams { @@ -449,15 +436,11 @@ pub fn complete_pairing_ffi( let attestation_storage = RegistryAttestationStorage::new(&repo); use auths_id::attestation::AttestationSink; attestation_storage - .export(&auths_verifier::VerifiedAttestation::dangerous_from_unchecked( - attestation.clone(), - )) + .export( + &auths_verifier::VerifiedAttestation::dangerous_from_unchecked(attestation.clone()), + ) .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_PAIRING_ERROR] {e}")))?; - Ok(( - device_did, - None, - attestation.rid.to_string(), - )) + Ok((device_did, None, attestation.rid.to_string())) }) } diff --git a/packages/auths-python/src/rotation.rs b/packages/auths-python/src/rotation.rs index fbaa7165..c9356972 100644 --- a/packages/auths-python/src/rotation.rs +++ b/packages/auths-python/src/rotation.rs @@ -68,14 +68,17 @@ pub fn rotate_identity_ffi( let repo = PathBuf::from(shellexpand::tilde(repo_path).as_ref()); let config = RegistryConfig::single_tenant(&repo); - let backend = Arc::new( - GitRegistryBackend::open_existing(config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_REGISTRY_ERROR] Failed to open registry: {e}")))?, - ); + let backend = Arc::new(GitRegistryBackend::open_existing(config).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_REGISTRY_ERROR] Failed to open registry: {e}" + )) + })?); - let keychain = get_platform_keychain_with_config(&env_config) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")))?; - let keychain: Arc = Arc::from(keychain); + let keychain = get_platform_keychain_with_config(&env_config).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_KEYCHAIN_ERROR] Keychain error: {e}")) + })?; + let keychain: Arc = + Arc::from(keychain); let identity_storage = Arc::new(RegistryIdentityStorage::new(&repo)); let attestation_storage = Arc::new(RegistryAttestationStorage::new(&repo)); @@ -96,8 +99,11 @@ pub fn rotate_identity_ffi( let next_alias = next_key_alias .map(|a| { - auths_core::storage::keychain::KeyAlias::new(a) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_KEY_NOT_FOUND] Invalid next key alias: {e}"))) + auths_core::storage::keychain::KeyAlias::new(a).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_KEY_NOT_FOUND] Invalid next key alias: {e}" + )) + }) }) .transpose()?; @@ -108,8 +114,9 @@ pub fn rotate_identity_ffi( }; py.allow_threads(|| { - let result = rotate_identity(rotation_config, &ctx, clock.as_ref()) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_ROTATION_ERROR] Key rotation failed: {e}")))?; + let result = rotate_identity(rotation_config, &ctx, clock.as_ref()).map_err(|e| { + PyRuntimeError::new_err(format!("[AUTHS_ROTATION_ERROR] Key rotation failed: {e}")) + })?; Ok(PyIdentityRotationResult { controller_did: result.controller_did.to_string(), diff --git a/packages/auths-python/src/sign.rs b/packages/auths-python/src/sign.rs index b18af157..39e05ba4 100644 --- a/packages/auths-python/src/sign.rs +++ b/packages/auths-python/src/sign.rs @@ -27,8 +27,11 @@ pub fn sign_bytes(private_key_hex: &str, message: &[u8]) -> PyResult { ))); } - let keypair = ring::signature::Ed25519KeyPair::from_seed_unchecked(&seed) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] Failed to create keypair: {e}")))?; + let keypair = ring::signature::Ed25519KeyPair::from_seed_unchecked(&seed).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_CRYPTO_ERROR] Failed to create keypair: {e}" + )) + })?; let sig = keypair.sign(message); Ok(hex::encode(sig.as_ref())) @@ -91,14 +94,20 @@ pub fn sign_action( .canonical_bytes() .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_SERIALIZATION_ERROR] {e}")))?; - let keypair = ring::signature::Ed25519KeyPair::from_seed_unchecked(&seed) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_CRYPTO_ERROR] Failed to create keypair: {e}")))?; + let keypair = ring::signature::Ed25519KeyPair::from_seed_unchecked(&seed).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_CRYPTO_ERROR] Failed to create keypair: {e}" + )) + })?; let sig = keypair.sign(&canonical); envelope.signature = hex::encode(sig.as_ref()); - serde_json::to_string(&envelope) - .map_err(|e| PyRuntimeError::new_err(format!("[AUTHS_SERIALIZATION_ERROR] Failed to serialize envelope: {e}"))) + serde_json::to_string(&envelope).map_err(|e| { + PyRuntimeError::new_err(format!( + "[AUTHS_SERIALIZATION_ERROR] Failed to serialize envelope: {e}" + )) + }) } /// Verify an action envelope's Ed25519 signature. diff --git a/packages/auths-python/src/trust.rs b/packages/auths-python/src/trust.rs index 36f84400..0926ab46 100644 --- a/packages/auths-python/src/trust.rs +++ b/packages/auths-python/src/trust.rs @@ -122,11 +122,7 @@ pub fn pin_identity( #[pyfunction] #[pyo3(signature = (did, repo_path))] -pub fn remove_pinned_identity( - py: Python<'_>, - did: &str, - repo_path: &str, -) -> PyResult<()> { +pub fn remove_pinned_identity(py: Python<'_>, did: &str, repo_path: &str) -> PyResult<()> { let did = did.to_string(); let repo = repo_path.to_string(); @@ -141,10 +137,7 @@ pub fn remove_pinned_identity( #[pyfunction] #[pyo3(signature = (repo_path,))] -pub fn list_pinned_identities( - py: Python<'_>, - repo_path: &str, -) -> PyResult { +pub fn list_pinned_identities(py: Python<'_>, repo_path: &str) -> PyResult { let repo = repo_path.to_string(); py.allow_threads(move || { diff --git a/packages/auths-python/src/witness.rs b/packages/auths-python/src/witness.rs index 408742f1..8b40b92b 100644 --- a/packages/auths-python/src/witness.rs +++ b/packages/auths-python/src/witness.rs @@ -63,7 +63,10 @@ pub fn add_witness( py.allow_threads(move || { let parsed_url: url::Url = url_str.parse().map_err(|e| { - PyRuntimeError::new_err(format!("[AUTHS_WITNESS_ERROR] Invalid URL '{}': {}", url_str, e)) + PyRuntimeError::new_err(format!( + "[AUTHS_WITNESS_ERROR] Invalid URL '{}': {}", + url_str, e + )) })?; let mut config = load_witness_config(&repo)?; @@ -84,17 +87,16 @@ pub fn add_witness( #[pyfunction] #[pyo3(signature = (url, repo_path))] -pub fn remove_witness( - py: Python<'_>, - url: &str, - repo_path: &str, -) -> PyResult<()> { +pub fn remove_witness(py: Python<'_>, url: &str, repo_path: &str) -> PyResult<()> { let url_str = url.to_string(); let repo = resolve_repo(repo_path); py.allow_threads(move || { let parsed_url: url::Url = url_str.parse().map_err(|e| { - PyRuntimeError::new_err(format!("[AUTHS_WITNESS_ERROR] Invalid URL '{}': {}", url_str, e)) + PyRuntimeError::new_err(format!( + "[AUTHS_WITNESS_ERROR] Invalid URL '{}': {}", + url_str, e + )) })?; let mut config = load_witness_config(&repo)?; @@ -111,10 +113,7 @@ pub fn remove_witness( #[pyfunction] #[pyo3(signature = (repo_path,))] -pub fn list_witnesses( - py: Python<'_>, - repo_path: &str, -) -> PyResult { +pub fn list_witnesses(py: Python<'_>, repo_path: &str) -> PyResult { let repo = resolve_repo(repo_path); py.allow_threads(move || {