diff --git a/.config/nextest.toml b/.config/nextest.toml new file mode 100644 index 0000000..13af260 --- /dev/null +++ b/.config/nextest.toml @@ -0,0 +1,28 @@ +# Nextest configuration for submod +# +# With per-test git config isolation (GIT_CONFIG_GLOBAL/GIT_CONFIG_SYSTEM), +# tests are safe to run in parallel. The serial-integration group is kept as +# defense-in-depth: integration tests that create repos, add/remove submodules, +# and shell out to git are serialized among themselves to avoid any residual +# contention on shared git state (e.g. lock files in /tmp). + +[test-groups] +serial-integration = { max-threads = 1 } + +[profile.default] +fail-fast = false + +# Integration tests that create/modify git repos run serially within their +# group. config_tests and command_contract_tests are safe to run in parallel +# with everything (they use isolated temp dirs and per-test gitconfig). +[[profile.default.overrides]] +filter = 'binary_id(submod::integration_tests) | binary_id(submod::sparse_checkout_tests) | binary_id(submod::error_handling_tests) | binary_id(submod::performance_tests)' +test-group = 'serial-integration' + +# CI profile: fail fast but still serialize integration tests +[profile.ci] +fail-fast = true + +[[profile.ci.overrides]] +filter = 'binary_id(submod::integration_tests) | binary_id(submod::sparse_checkout_tests) | binary_id(submod::error_handling_tests) | binary_id(submod::performance_tests)' +test-group = 'serial-integration' diff --git a/.serena/.gitignore b/.serena/.gitignore new file mode 100644 index 0000000..2e510af --- /dev/null +++ b/.serena/.gitignore @@ -0,0 +1,2 @@ +/cache +/project.local.yml diff --git a/.serena/project.yml b/.serena/project.yml new file mode 100644 index 0000000..3b3f205 --- /dev/null +++ b/.serena/project.yml @@ -0,0 +1,138 @@ +# the name by which the project can be referenced within Serena +project_name: "submod" + + +# list of languages for which language servers are started; choose from: +# al bash clojure cpp csharp +# csharp_omnisharp dart elixir elm erlang +# fortran fsharp go groovy haskell +# java julia kotlin lua markdown +# matlab nix pascal perl php +# php_phpactor powershell python python_jedi r +# rego ruby ruby_solargraph rust scala +# swift terraform toml typescript typescript_vts +# vue yaml zig +# (This list may be outdated. For the current list, see values of Language enum here: +# https://github.com/oraios/serena/blob/main/src/solidlsp/ls_config.py +# For some languages, there are alternative language servers, e.g. csharp_omnisharp, ruby_solargraph.) +# Note: +# - For C, use cpp +# - For JavaScript, use typescript +# - For Free Pascal/Lazarus, use pascal +# Special requirements: +# Some languages require additional setup/installations. +# See here for details: https://oraios.github.io/serena/01-about/020_programming-languages.html#language-servers +# When using multiple languages, the first language server that supports a given file will be used for that file. +# The first language is the default language and the respective language server will be used as a fallback. +# Note that when using the JetBrains backend, language servers are not used and this list is correspondingly ignored. +languages: +- rust + +# the encoding used by text files in the project +# For a list of possible encodings, see https://docs.python.org/3.11/library/codecs.html#standard-encodings +encoding: "utf-8" + +# line ending convention to use when writing source files. +# Possible values: unset (use global setting), "lf", "crlf", or "native" (platform default) +# This does not affect Serena's own files (e.g. memories and configuration files), which always use native line endings. +line_ending: + +# The language backend to use for this project. +# If not set, the global setting from serena_config.yml is used. +# Valid values: LSP, JetBrains +# Note: the backend is fixed at startup. If a project with a different backend +# is activated post-init, an error will be returned. +language_backend: + +# whether to use project's .gitignore files to ignore files +ignore_all_files_in_gitignore: true + +# list of additional paths to ignore in this project. +# Same syntax as gitignore, so you can use * and **. +# Note: global ignored_paths from serena_config.yml are also applied additively. +ignored_paths: [] + +# whether the project is in read-only mode +# If set to true, all editing tools will be disabled and attempts to use them will result in an error +# Added on 2025-04-18 +read_only: false + +# list of tool names to exclude. +# This extends the existing exclusions (e.g. from the global configuration) +# +# Below is the complete list of tools for convenience. +# To make sure you have the latest list of tools, and to view their descriptions, +# execute `uv run scripts/print_tool_overview.py`. +# +# * `activate_project`: Activates a project by name. +# * `check_onboarding_performed`: Checks whether project onboarding was already performed. +# * `create_text_file`: Creates/overwrites a file in the project directory. +# * `delete_lines`: Deletes a range of lines within a file. +# * `delete_memory`: Deletes a memory from Serena's project-specific memory store. +# * `execute_shell_command`: Executes a shell command. +# * `find_referencing_code_snippets`: Finds code snippets in which the symbol at the given location is referenced. +# * `find_referencing_symbols`: Finds symbols that reference the symbol at the given location (optionally filtered by type). +# * `find_symbol`: Performs a global (or local) search for symbols with/containing a given name/substring (optionally filtered by type). +# * `get_current_config`: Prints the current configuration of the agent, including the active and available projects, tools, contexts, and modes. +# * `get_symbols_overview`: Gets an overview of the top-level symbols defined in a given file. +# * `initial_instructions`: Gets the initial instructions for the current project. +# Should only be used in settings where the system prompt cannot be set, +# e.g. in clients you have no control over, like Claude Desktop. +# * `insert_after_symbol`: Inserts content after the end of the definition of a given symbol. +# * `insert_at_line`: Inserts content at a given line in a file. +# * `insert_before_symbol`: Inserts content before the beginning of the definition of a given symbol. +# * `list_dir`: Lists files and directories in the given directory (optionally with recursion). +# * `list_memories`: Lists memories in Serena's project-specific memory store. +# * `onboarding`: Performs onboarding (identifying the project structure and essential tasks, e.g. for testing or building). +# * `prepare_for_new_conversation`: Provides instructions for preparing for a new conversation (in order to continue with the necessary context). +# * `read_file`: Reads a file within the project directory. +# * `read_memory`: Reads the memory with the given name from Serena's project-specific memory store. +# * `remove_project`: Removes a project from the Serena configuration. +# * `replace_lines`: Replaces a range of lines within a file with new content. +# * `replace_symbol_body`: Replaces the full definition of a symbol. +# * `restart_language_server`: Restarts the language server, may be necessary when edits not through Serena happen. +# * `search_for_pattern`: Performs a search for a pattern in the project. +# * `summarize_changes`: Provides instructions for summarizing the changes made to the codebase. +# * `switch_modes`: Activates modes by providing a list of their names +# * `think_about_collected_information`: Thinking tool for pondering the completeness of collected information. +# * `think_about_task_adherence`: Thinking tool for determining whether the agent is still on track with the current task. +# * `think_about_whether_you_are_done`: Thinking tool for determining whether the task is truly completed. +# * `write_memory`: Writes a named memory (for future reference) to Serena's project-specific memory store. +excluded_tools: [] + +# list of tools to include that would otherwise be disabled (particularly optional tools that are disabled by default). +# This extends the existing inclusions (e.g. from the global configuration). +included_optional_tools: [] + +# fixed set of tools to use as the base tool set (if non-empty), replacing Serena's default set of tools. +# This cannot be combined with non-empty excluded_tools or included_optional_tools. +fixed_tools: [] + +# list of mode names to that are always to be included in the set of active modes +# The full set of modes to be activated is base_modes + default_modes. +# If the setting is undefined, the base_modes from the global configuration (serena_config.yml) apply. +# Otherwise, this setting overrides the global configuration. +# Set this to [] to disable base modes for this project. +# Set this to a list of mode names to always include the respective modes for this project. +base_modes: + +# list of mode names that are to be activated by default. +# The full set of modes to be activated is base_modes + default_modes. +# If the setting is undefined, the default_modes from the global configuration (serena_config.yml) apply. +# Otherwise, this overrides the setting from the global configuration (serena_config.yml). +# This setting can, in turn, be overridden by CLI parameters (--mode). +default_modes: + +# initial prompt for the project. It will always be given to the LLM upon activating the project +# (contrary to the memories, which are loaded on demand). +initial_prompt: "" + +# time budget (seconds) per tool call for the retrieval of additional symbol information +# such as docstrings or parameter information. +# This overrides the corresponding setting in the global configuration; see the documentation there. +# If null or missing, use the setting from the global configuration. +symbol_info_budget: + +# list of regex patterns which, when matched, mark a memory entry as read‑only. +# Extends the list from the global configuration, merging the two lists. +read_only_memory_patterns: [] diff --git a/CLAUDE.md b/CLAUDE.md index 7b37ffa..d2b9582 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,110 +1,93 @@ - - # CLAUDE.md This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. -## Project Overview - -`submod` is a Rust CLI tool for managing git submodules with advanced sparse checkout support. It uses `gitoxide` and `git2` libraries for high-performance git operations, with fallbacks to CLI git commands when needed. - -## Development Commands - -### Building and Testing - -- `cargo build` - Build the project -- `cargo test` - Run unit tests -- `./scripts/run-tests.sh` - Run comprehensive integration test suite -- `./scripts/run-tests.sh --verbose` - Run tests with detailed output -- `./scripts/run-tests.sh --performance` - Include performance tests -- `./scripts/run-tests.sh --filter ` - Run specific test modules - -### Linting and Formatting - -- `cargo fmt` - Format code -- `cargo clippy` - Run linter -- `hk run check` - Run pre-commit checks via hk tool -- `hk run fix` - Run auto-fixable linters - -### Mise Tasks (if using mise) - -- `mise run build` or `mise run b` - Build the CLI -- `mise run test` - Run automated tests -- `mise run lint` - Lint with clippy -- `mise run ci` - Run CI tasks (build, lint, test) - -### Testing Philosophy - -The project follows integration-first testing. Focus on testing complete workflows and outputs rather than implementation details. Tests are run serially (`RUST_TEST_THREADS=1`) to avoid git submodule race conditions. +## Commands + +### Build & Run +```bash +cargo build # debug build +cargo build --release # release build +mise run build # alias: mise run b +``` + +### Test +```bash +cargo nextest run --all-features --no-fail-fast # run all tests (preferred) +cargo test --test integration_tests # integration tests only +cargo test --test command_contract_tests # command contract tests only +./scripts/run-tests.sh --verbose # comprehensive test runner with reporting +./scripts/run-tests.sh --filter sparse_checkout # filter to specific tests +mise run test # alias: mise run t (runs via hk) +``` + +Integration tests that modify git repos are serialized via nextest test groups (`.config/nextest.toml`); other tests run in parallel. Each test gets an isolated git config via `GIT_CONFIG_GLOBAL` so tests never race on `~/.gitconfig`. The test suite is integration-test-focused; unit tests are minimal by design. + +### Lint & Format +```bash +cargo fmt # format code +cargo clippy --all-features # lint +hk run check # run all linters (fmt, clippy, deny, typos, pkl) +hk fix # auto-fix where possible +mise run lint # alias for hk run check +``` + +### Full CI +```bash +mise run ci # build + lint + test +hk run ci # same via hk (uses --fail-fast for tests) +``` + +### Git Hooks +Managed by `hk` (configured in `hk.pkl`). Pre-commit runs: cargo fmt, clippy, check, nextest, typos, cargo-deny, pkl eval. Hooks install automatically with `mise install`. ## Architecture -### Core Modules - -- `src/main.rs` - CLI entry point, parses commands and dispatches to manager -- `src/commands.rs` - Command-line argument definitions using clap -- `src/config.rs` - TOML configuration parsing and submodule config management -- `src/git_manager.rs` - Core submodule operations using gitoxide/git2 -- `src/lib.rs` - Library exports (not a stable API) - -### Configuration System - -- Uses TOML configuration files (default: `submod.toml`) -- Supports global defaults with per-submodule overrides -- Handles sparse checkout paths, git options, and submodule settings - -### Git Operations Strategy - -1. **Primary**: gitoxide library for performance -2. **Fallback**: git2 library (optional feature `git2-support`) -3. **Final fallback**: CLI git commands - -### Key Design Patterns - -- Error handling with `anyhow` for application errors, `thiserror` for library errors -- Comprehensive documentation for all public APIs -- Strict linting configuration with pedantic clippy settings -- Integration tests over unit tests +`submod` is a CLI tool for managing git submodules with TOML configuration and sparse checkout support. It exposes the library as `src/lib.rs` primarily for integration testing (the public API is not stable). -## Configuration Files +### Layer Stack -### Key Files to Know +``` +CLI (commands.rs + main.rs) + ↓ clap parsing +GitManager (git_manager.rs) ← high-level submodule operations + ↓ delegates to +GitOpsManager (git_ops/mod.rs) ← unified backend with automatic fallback + ├── GixOperations (git_ops/gix_ops.rs) ← gitoxide (preferred) + ├── Git2Operations (git_ops/git2_ops.rs) ← libgit2 (fallback) + └── Git CLI ← last resort (spawned via std::process) +Config (config.rs) ← figment-based TOML config loading/saving +``` -- `Cargo.toml` - Project configuration with strict linting rules -- `hk.pkl` - Git hooks configuration (pre-commit, linting) -- `mise.toml` - Development environment and task definitions -- `sample_config/submod.toml` - Example configuration -- `scripts/run-tests.sh` - Comprehensive test runner +### Fallback Architecture -### Test Structure +The core design is a **gix-first, git2-fallback, CLI-last-resort** strategy, driven by the immaturity of gitoxide's submodule support. `GitOpsManager` wraps both backends behind the `GitOperations` trait and calls `try_with_fallback()` / `try_with_fallback_mut()` for every operation. When gix fails, it logs a warning and transparently retries with git2; `add_submodule` has an additional CLI fallback that also cleans up any partial state from the prior attempt. -- `tests/integration_tests.rs` - Core functionality tests -- `tests/config_tests.rs` - Configuration parsing tests -- `tests/sparse_checkout_tests.rs` - Sparse checkout functionality -- `tests/error_handling_tests.rs` - Error conditions and edge cases -- `tests/performance_tests.rs` - Performance and stress tests +After destructive operations (delete, nuke), `GitOpsManager::reopen()` must be called to refresh the in-memory repository state. git2 reopen errors are fatal; gix reopen errors are warnings only. -## Working with the Codebase +### Configuration -### Before Making Changes +`Config` uses [figment](https://docs.rs/figment) to load `submod.toml`. The schema has: +- `[defaults]` → `SubmoduleDefaults` (global git options applied to all submodules) +- `[]` sections → `SubmoduleEntry` (per-submodule config, overrides defaults) -1. Run `./scripts/run-tests.sh` to ensure tests pass -2. Check code formatting with `cargo fmt --check` -3. Run linter with `cargo clippy` +`SubmoduleEntry` merges `SubmoduleGitOptions` (ignore, fetch_recurse, branch, update) with submodule-specific fields (path, url, sparse_paths, active, shallow). Options are serialized to/from git-config-compatible strings via the `options.rs` newtype wrappers (`SerializableIgnore`, `SerializableUpdate`, etc.). -### When Adding Features +### Key Conventions -- Add integration tests to appropriate test modules -- Update configuration parsing if new config options are added -- Follow existing error handling patterns -- Document public APIs thoroughly +- **Unsafe code is forbidden** (`unsafe_code = "forbid"` in `Cargo.toml`) +- Clippy is configured at `pedantic` + `nursery` warn level; `correctness` is deny +- `missing_docs` is warn — public items need doc comments +- `module_name_repetitions` and `too_many_lines` are allowed +- All error handling uses `anyhow` for propagation and `thiserror` for defining error types +- `simple_gix.rs` contains lightweight gix helpers used in `gix_ops.rs` -### Code Quality Standards +### Testing Approach -- Unsafe code is forbidden (`unsafe_code = "forbid"`) -- All warnings treated seriously with comprehensive clippy configuration -- Focus on clear, descriptive naming and comprehensive error messages +Integration tests in `tests/` use a `TestHarness` (in `tests/common/mod.rs`) that creates temporary git repos and invokes the compiled binary. Test files: +- `integration_tests.rs` — end-to-end CLI behavior +- `command_contract_tests.rs` — CLI command argument contracts +- `config_tests.rs` — configuration parsing/serialization +- `sparse_checkout_tests.rs` — sparse checkout behavior +- `error_handling_tests.rs` — error conditions and messages +- `performance_tests.rs` — benchmarks (uses criterion) diff --git a/Cargo.lock b/Cargo.lock index aa9ea75..59137e0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -123,9 +123,6 @@ name = "bitflags" version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" -dependencies = [ - "serde_core", -] [[package]] name = "block-buffer" @@ -170,9 +167,6 @@ name = "bytesize" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6bd91ee7b2422bcb158d90ef4d14f75ef67f340943fc4149891dcce8f8b972a3" -dependencies = [ - "serde_core", -] [[package]] name = "cast" @@ -351,19 +345,6 @@ dependencies = [ "itertools", ] -[[package]] -name = "crossbeam" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8" -dependencies = [ - "crossbeam-channel", - "crossbeam-deque", - "crossbeam-epoch", - "crossbeam-queue", - "crossbeam-utils", -] - [[package]] name = "crossbeam-channel" version = "0.5.15" @@ -392,15 +373,6 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "crossbeam-queue" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" -dependencies = [ - "crossbeam-utils", -] - [[package]] name = "crossbeam-utils" version = "0.8.21" @@ -660,12 +632,8 @@ dependencies = [ "gix-pack", "gix-status", "gix-transport", - "gix-url", - "jwalk", "layout-rs", "open", - "serde", - "serde_json", "tempfile", "thiserror", ] @@ -726,7 +694,6 @@ dependencies = [ "gix-worktree-state", "nonempty", "parking_lot", - "serde", "signal-hook 0.4.3", "smallvec", "thiserror", @@ -741,7 +708,6 @@ dependencies = [ "bstr", "gix-date", "gix-error", - "serde", "winnow", ] @@ -757,7 +723,6 @@ dependencies = [ "gix-quote", "gix-trace", "kstring", - "serde", "smallvec", "thiserror", "unicode-bom", @@ -826,7 +791,6 @@ dependencies = [ "gix-hash", "memmap2", "nonempty", - "serde", ] [[package]] @@ -843,7 +807,6 @@ dependencies = [ "gix-ref", "gix-sec", "memchr", - "serde", "smallvec", "thiserror", "unicode-bom", @@ -860,7 +823,6 @@ dependencies = [ "bstr", "gix-path", "libc", - "serde", "thiserror", ] @@ -879,7 +841,6 @@ dependencies = [ "gix-sec", "gix-trace", "gix-url", - "serde", "thiserror", ] @@ -893,7 +854,6 @@ dependencies = [ "gix-error", "itoa", "jiff", - "serde", "smallvec", ] @@ -1043,7 +1003,6 @@ dependencies = [ "bstr", "gix-features", "gix-path", - "serde", ] [[package]] @@ -1054,7 +1013,6 @@ checksum = "d8ced05d2d7b13bff08b2f7eb4e47cfeaf00b974c2ddce08377c4fe1f706b3eb" dependencies = [ "faster-hex", "gix-features", - "serde", "sha1-checked", "thiserror", ] @@ -1080,7 +1038,6 @@ dependencies = [ "gix-glob", "gix-path", "gix-trace", - "serde", "unicode-bom", ] @@ -1108,7 +1065,6 @@ dependencies = [ "libc", "memmap2", "rustix", - "serde", "smallvec", "thiserror", ] @@ -1134,7 +1090,6 @@ dependencies = [ "gix-actor", "gix-date", "gix-error", - "serde", ] [[package]] @@ -1193,7 +1148,6 @@ dependencies = [ "gix-utils", "gix-validate", "itoa", - "serde", "smallvec", "thiserror", "winnow", @@ -1215,7 +1169,6 @@ dependencies = [ "gix-path", "gix-quote", "parking_lot", - "serde", "tempfile", "thiserror", ] @@ -1239,7 +1192,6 @@ dependencies = [ "gix-traverse", "memmap2", "parking_lot", - "serde", "smallvec", "thiserror", "uluru", @@ -1320,7 +1272,6 @@ dependencies = [ "gix-utils", "maybe-async", "nonempty", - "serde", "thiserror", "winnow", ] @@ -1353,7 +1304,6 @@ dependencies = [ "gix-utils", "gix-validate", "memmap2", - "serde", "thiserror", "winnow", ] @@ -1391,7 +1341,6 @@ dependencies = [ "gix-revwalk", "gix-trace", "nonempty", - "serde", ] [[package]] @@ -1419,7 +1368,6 @@ dependencies = [ "bitflags", "gix-path", "libc", - "serde", "windows-sys 0.61.2", ] @@ -1433,7 +1381,6 @@ dependencies = [ "gix-hash", "gix-lock", "nonempty", - "serde", "thiserror", ] @@ -1509,7 +1456,6 @@ dependencies = [ "gix-quote", "gix-sec", "gix-url", - "serde", "thiserror", ] @@ -1539,7 +1485,6 @@ dependencies = [ "bstr", "gix-path", "percent-encoding", - "serde", "thiserror", ] @@ -1579,7 +1524,6 @@ dependencies = [ "gix-object", "gix-path", "gix-validate", - "serde", ] [[package]] @@ -1926,23 +1870,12 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "jwalk" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2735847566356cd2179a2a38264839308f7079fa96e6bd5a42d740460e003c56" -dependencies = [ - "crossbeam", - "rayon", -] - [[package]] name = "kstring" version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "558bf9508a558512042d3095138b1f7b8fe90c5467d94f9f1da28b3731c5dbd1" dependencies = [ - "serde", "static_assertions", ] @@ -2086,9 +2019,6 @@ name = "nonempty" version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9737e026353e5cd0736f98eddae28665118eb6f6600902a7f50db585621fecb6" -dependencies = [ - "serde", -] [[package]] name = "nu-ansi-term" @@ -2518,9 +2448,6 @@ name = "smallvec" version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" -dependencies = [ - "serde", -] [[package]] name = "stable_deref_trait" @@ -2546,7 +2473,6 @@ version = "0.2.1" dependencies = [ "anyhow", "bitflags", - "bstr", "clap", "clap_complete", "clap_complete_nushell", @@ -2555,14 +2481,12 @@ dependencies = [ "git2", "gitoxide-core", "gix", - "gix-config", - "gix-glob", - "gix-pathspec", "gix-submodule", "prodash", "serde", "tempfile", "thiserror", + "toml", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index d4bf458..6b4912f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -40,23 +40,15 @@ rustdoc-args = ["--cfg", "docsrs"] [dependencies] -bstr = { version = "1.12.1", default-features = false } # Gitoxide ops gix = { version = "0.80.0", default-features = false, features = [ - "attributes", "blocking-network-client", - "excludes", - "command", "parallel", - "serde", "status", "worktree-mutation" ] } -gitoxide-core = { version = "0.54.0", default-features = false, features = ["blocking-client", "organize", "clean", "serde"]} -gix-config = { version = "0.53.0", features = ["serde"] } +gitoxide-core = { version = "0.54.0", default-features = false, features = ["blocking-client"]} gix-submodule = "0.27.0" -gix-pathspec = "0.16.0" -gix-glob = "0.24.0" # CLI clap = { version = "4.6.0", features = [ @@ -99,6 +91,7 @@ figment = { version = "0.10.19", default-features = false, features = [ ] } tempfile = "3.27.0" criterion = "0.8.2" +toml = "0.8" [lints.rust] # Deny unsafe code unless explicitly allowed @@ -109,6 +102,8 @@ unused = { level = "warn", priority = -1 } missing_docs = "warn" # Warn about unreachable code unreachable_code = "warn" +# Allow cfg(coverage) and cfg(coverage_nightly) set by cargo-llvm-cov +unexpected_cfgs = { level = "warn", check-cfg = ['cfg(coverage)', 'cfg(coverage_nightly)'] } [lints.clippy] # Cargo-specific lints diff --git a/hk.pkl b/hk.pkl index 3c34cba..fd4dcd3 100644 --- a/hk.pkl +++ b/hk.pkl @@ -20,7 +20,7 @@ local linters = new Mapping { ["cargo_test"] = new Step { workspace_indicator = "Cargo.toml" glob = "src/**/*.rs" - check = "cargo nextest --manifest-path {{ workspace_indicator }} run --all-features --no-fail-fast -j 1" + check = "cargo nextest --manifest-path {{ workspace_indicator }} run --all-features --no-fail-fast" env = new Mapping { ["RUST_BACKTRACE"] = "1" } @@ -45,7 +45,7 @@ local linters = new Mapping { local ci = (linters) { ["cargo_test"] { - check = "cargo nextest --manifest-path {{ workspace_indicator }} run --all-features --fail-fast -j 1" + check = "cargo nextest --manifest-path {{ workspace_indicator }} run --all-features --fail-fast -p ci" } } diff --git a/mise.toml b/mise.toml index f4366ce..b787525 100644 --- a/mise.toml +++ b/mise.toml @@ -23,7 +23,6 @@ uv = "latest" # Another runner for MCP servers. [env] CARGO_TARGET_DIR = "target" -RUST_TEST_THREADS = 1 HK_MISE = 1 [tasks.cleancache] diff --git a/sample_config/submod.toml b/sample_config/submod.toml index 7cbf849..54e8962 100644 --- a/sample_config/submod.toml +++ b/sample_config/submod.toml @@ -45,7 +45,7 @@ ignore = "dirty" # Override default ignore setting for all submodules # # ## `branch` # The submodule branch to check out. Defaults to the submodule's default branch (usually `main` or `master`). You may use `"."` (or aliases: `current`, `current-in-superproject`, `superproject`, `super`) to match the superproject branch. Do not use these as branch names in the submodule repository. -# TODO: Submit an issue if you encounter a branch name conflict. +# Submit an issue if you encounter a branch name conflict at https://github.com/bashandbone/submod/issues. # # ## `sparse_paths` # A list of relative paths or glob patterns to include in the sparse checkout. If omitted, includes all files. diff --git a/schemas/v1.0.0/submod_config_v1.0.0.json b/schemas/v1.0.0/submod_config_v1.0.0.json index d05a99a..ee5079c 100644 --- a/schemas/v1.0.0/submod_config_v1.0.0.json +++ b/schemas/v1.0.0/submod_config_v1.0.0.json @@ -58,7 +58,7 @@ }, "branch": { "type": "string", - "description": "Branch to track in the submodule. Defaults to the submodule's default branch (usually main or master).\nUse \".\" or the aliases \"current\", \"current-in-superproject\", \"superproject\", or \"super\" to track the superproject's current branch. Do not use these strings as actual branch names in the submodule repository." + "description": "Branch to track in the submodule. Defaults to the submodule's default branch (usually main or master).\nUse \".\" or the aliases \"current\", \"current-in-superproject\", \"superproject\", or \"super\" to track the superproject's current branch. **Do not use these strings as actual branch names in the submodule repository.** If you need to track a branch with one of these names, use the full branch name (e.g., \"refs/heads/current\")." }, "sparse_paths": { "type": "array", diff --git a/scripts/run-tests.sh b/scripts/run-tests.sh index 870a778..7e92e0c 100755 --- a/scripts/run-tests.sh +++ b/scripts/run-tests.sh @@ -6,6 +6,10 @@ # Test runner script for submod integration tests # This script runs the comprehensive test suite with proper reporting +# +# Test parallelism is managed by nextest test groups in .config/nextest.toml. +# Integration tests that modify git repos run serially within their group; +# other tests (unit, config, contract) run in parallel. set -e @@ -81,132 +85,63 @@ while [[ $# -gt 0 ]]; do esac done -print_status "Starting submod integration test suite..." +print_status "Starting submod test suite..." # Build the project first print_status "Building submod binary..." -if $VERBOSE; then - cargo build --bin submod -else - cargo build --bin submod >/dev/null 2>&1 +PROFILE="test" +if [[ "$PERFORMANCE" == true ]]; then + PROFILE="bench" fi -# Force Rust tests to run serially to avoid git submodule race conditions -export RUST_TEST_THREADS=1 - -if [[ $? -eq 0 ]]; then - print_success "Build completed successfully" +if $VERBOSE; then + cargo build --bin submod --profile "$PROFILE" else - print_error "Build failed" - exit 1 + cargo build --bin submod --profile "$PROFILE" >/dev/null 2>&1 fi +print_success "Build completed successfully" -# Function to run a specific test module -run_test_module() { - local module=$1 - local description=$2 - - print_status "Running $description..." - - local cmd="cargo test --test $module" - if [[ -n "$FILTER" ]]; then - cmd="$cmd $FILTER" - fi - - if $VERBOSE; then - cmd="$cmd -- --nocapture" - fi - - if eval "$cmd"; then - print_success "$description completed successfully" - return 0 - else - print_error "$description failed" - return 1 - fi -} +# Build the nextest command +NEXTEST_ARGS=( + nextest --manifest-path ./Cargo.toml run + --all-features + --no-fail-fast +) -# Track test results -declare -a failed_tests=() -total_tests=0 +# Build the filterset expression +FILTERSET="" -# Core integration tests -if [[ -z "$FILTER" ]] || [[ "integration_tests" =~ $FILTER ]]; then - total_tests=$((total_tests + 1)) - if ! run_test_module "integration_tests" "Core integration tests"; then - failed_tests+=("integration_tests") - fi +# Exclude performance tests unless explicitly requested +if [[ "$PERFORMANCE" != true ]]; then + FILTERSET="not binary_id(submod::performance_tests)" fi -# Configuration tests -if [[ -z "$FILTER" ]] || [[ "config_tests" =~ $FILTER ]]; then - total_tests=$((total_tests + 1)) - if ! run_test_module "config_tests" "Configuration management tests"; then - failed_tests+=("config_tests") - fi -fi - -# Sparse checkout tests -if [[ -z "$FILTER" ]] || [[ "sparse_checkout_tests" =~ $FILTER ]]; then - total_tests=$((total_tests + 1)) - if ! run_test_module "sparse_checkout_tests" "Sparse checkout functionality tests"; then - failed_tests+=("sparse_checkout_tests") - fi -fi - -# Error handling tests -if [[ -z "$FILTER" ]] || [[ "error_handling_tests" =~ $FILTER ]]; then - total_tests=$((total_tests + 1)) - if ! run_test_module "error_handling_tests" "Error handling and edge case tests"; then - failed_tests+=("error_handling_tests") +# Apply filter if provided +if [[ -n "$FILTER" ]]; then + if [[ -n "$FILTERSET" ]]; then + FILTERSET="($FILTERSET) & test(/$FILTER/)" + else + FILTERSET="test(/$FILTER/)" fi fi -# Performance tests (optional) -if $PERFORMANCE; then - if [[ -z "$FILTER" ]] || [[ "performance_tests" =~ $FILTER ]]; then - total_tests=$((total_tests + 1)) - print_warning "Running performance tests (this may take a while)..." - if ! run_test_module "performance_tests" "Performance and stress tests"; then - failed_tests+=("performance_tests") - fi - fi +if [[ -n "$FILTERSET" ]]; then + NEXTEST_ARGS+=(-E "$FILTERSET") fi -# Run unit tests as well -if [[ -z "$FILTER" ]] || [[ "unit" =~ $FILTER ]]; then - print_status "Running unit tests..." - total_tests=$((total_tests + 1)) - - cmd="cargo test --lib" - if $VERBOSE; then - cmd="$cmd -- --nocapture" - fi - - if eval "$cmd"; then - print_success "Unit tests completed successfully" - else - print_error "Unit tests failed" - failed_tests+=("unit_tests") - fi +# Run the full suite in a single nextest invocation so that test groups +# can schedule serial and parallel tests optimally. +print_status "Running tests..." +if $VERBOSE; then + print_status "cargo ${NEXTEST_ARGS[*]}" fi -# Report results -echo "" -print_status "Test Suite Summary" -echo "==================" - -if [[ ${#failed_tests[@]} -eq 0 ]]; then - print_success "All $total_tests test modules passed! 🎉" +if cargo "${NEXTEST_ARGS[@]}"; then echo "" - echo "The submod tool is working correctly and ready for use." + print_success "All tests passed!" exit 0 else - print_error "${#failed_tests[@]} out of $total_tests test modules failed:" - for test in "${failed_tests[@]}"; do - echo " - $test" - done echo "" - echo "Please check the test output above for details on the failures." + print_error "Some tests failed. See output above for details." exit 1 fi diff --git a/src/commands.rs b/src/commands.rs index 3f8ead9..0d7c0be 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -101,7 +101,11 @@ pub enum Commands { )] branch: Option, - #[arg(short = 'i', long = "ignore", help = "What changes in the submodule git should ignore.")] + #[arg( + short = 'i', + long = "ignore", + help = "What changes in the submodule git should ignore." + )] ignore: Option, #[arg( @@ -112,10 +116,18 @@ pub enum Commands { )] sparse_paths: Option>, - #[arg(short = 'f', long = "fetch", help = "Sets the recursive fetch behavior for the submodule (like, if we should fetch its submodules).")] + #[arg( + short = 'f', + long = "fetch", + help = "Sets the recursive fetch behavior for the submodule (like, if we should fetch its submodules)." + )] fetch: Option, - #[arg(short = 'u', long = "update", help = "How git should update the submodule when you run `git submodule update`.")] + #[arg( + short = 'u', + long = "update", + help = "How git should update the submodule when you run `git submodule update`." + )] update: Option, #[arg(short = 's', long = "shallow", default_value = "false", action = clap::ArgAction::SetTrue, default_missing_value = "true", help = "If given, sets the submodule as a shallow clone. It will only fetch the last commit of the branch, not the full history.")] diff --git a/src/config.rs b/src/config.rs index 3975568..8f3e373 100644 --- a/src/config.rs +++ b/src/config.rs @@ -683,8 +683,7 @@ impl<'de> Deserialize<'de> for SubmoduleEntries { /// Accepts a map where each key maps to a [`SubmoduleEntry`], building both the /// `submodules` map and the `sparse_checkouts` map from each entry's `sparse_paths`. fn deserialize>(deserializer: D) -> Result { - let map: HashMap = - HashMap::deserialize(deserializer)?; + let map: HashMap = HashMap::deserialize(deserializer)?; let mut sparse_checkouts: HashMap> = HashMap::new(); for (name, entry) in &map { if let Some(paths) = &entry.sparse_paths { @@ -1118,3 +1117,996 @@ impl Provider for Config { Some(REPO) } } + +#[cfg(test)] +#[cfg_attr(coverage_nightly, coverage(off))] +mod tests { + use super::*; + + // ================================================================ + // SubmoduleDefaults::merge_from + // ================================================================ + + #[test] + fn test_defaults_merge_from_both_set() { + let base = SubmoduleDefaults { + ignore: Some(SerializableIgnore::All), + fetch_recurse: Some(SerializableFetchRecurse::Always), + update: Some(SerializableUpdate::Rebase), + }; + let other = SubmoduleDefaults { + ignore: Some(SerializableIgnore::Dirty), + fetch_recurse: None, + update: Some(SerializableUpdate::Merge), + }; + let merged = base.merge_from(other); + // other.ignore overrides + assert_eq!(merged.ignore, Some(SerializableIgnore::Dirty)); + // other.fetch_recurse is None → base preserved + assert_eq!(merged.fetch_recurse, Some(SerializableFetchRecurse::Always)); + // other.update overrides + assert_eq!(merged.update, Some(SerializableUpdate::Merge)); + } + + #[test] + fn test_defaults_merge_from_empty_other() { + let base = SubmoduleDefaults { + ignore: Some(SerializableIgnore::All), + fetch_recurse: Some(SerializableFetchRecurse::Never), + update: Some(SerializableUpdate::Checkout), + }; + let other = SubmoduleDefaults::default(); + let merged = base.merge_from(other); + // Base values should be preserved + assert_eq!(merged.ignore, Some(SerializableIgnore::All)); + assert_eq!(merged.fetch_recurse, Some(SerializableFetchRecurse::Never)); + assert_eq!(merged.update, Some(SerializableUpdate::Checkout)); + } + + #[test] + fn test_defaults_merge_from_empty_base() { + let base = SubmoduleDefaults::default(); + let other = SubmoduleDefaults { + ignore: Some(SerializableIgnore::Dirty), + fetch_recurse: Some(SerializableFetchRecurse::Always), + update: Some(SerializableUpdate::Merge), + }; + let merged = base.merge_from(other); + assert_eq!(merged.ignore, Some(SerializableIgnore::Dirty)); + assert_eq!(merged.fetch_recurse, Some(SerializableFetchRecurse::Always)); + assert_eq!(merged.update, Some(SerializableUpdate::Merge)); + } + + #[test] + fn test_defaults_merge_from_both_empty_gets_defaults() { + let base = SubmoduleDefaults::default(); + let other = SubmoduleDefaults::default(); + let merged = base.merge_from(other); + // Should fill in defaults via .or_else + assert_eq!(merged.ignore, Some(SerializableIgnore::default())); + assert_eq!( + merged.fetch_recurse, + Some(SerializableFetchRecurse::default()) + ); + assert_eq!(merged.update, Some(SerializableUpdate::default())); + } + + // ================================================================ + // SubmoduleEntry::is_local / is_remote + // ================================================================ + + #[test] + fn test_entry_is_local() { + let mut entry = SubmoduleEntry::new( + Some("./local-repo".to_string()), + None, + None, + None, + None, + None, + None, + None, + None, + ); + assert!(entry.is_local()); + + entry.url = Some("../sibling".to_string()); + assert!(entry.is_local()); + + entry.url = Some("/absolute/path".to_string()); + assert!(entry.is_local()); + + // Not local + entry.url = Some("https://github.com/repo".to_string()); + assert!(!entry.is_local()); + + // None url + entry.url = None; + assert!(!entry.is_local()); + } + + #[test] + fn test_entry_is_remote() { + let mut entry = SubmoduleEntry::new( + Some("https://github.com/user/repo".to_string()), + None, + None, + None, + None, + None, + None, + None, + None, + ); + assert!(entry.is_remote()); + + entry.url = Some("http://example.com/repo".to_string()); + assert!(entry.is_remote()); + + entry.url = Some("ssh://git@github.com/repo".to_string()); + assert!(entry.is_remote()); + + entry.url = Some("git@github.com:user/repo.git".to_string()); + assert!(entry.is_remote()); + + entry.url = Some("git://example.com/repo".to_string()); + assert!(entry.is_remote()); + + // Not remote + entry.url = Some("./local".to_string()); + assert!(!entry.is_remote()); + + entry.url = None; + assert!(!entry.is_remote()); + } + + #[test] + fn test_entry_neither_local_nor_remote() { + // A bare name isn't classified as either + let entry = SubmoduleEntry::new( + Some("just-a-name".to_string()), + None, + None, + None, + None, + None, + None, + None, + None, + ); + assert!(!entry.is_local()); + assert!(!entry.is_remote()); + } + + // ================================================================ + // SubmoduleEntry::from_gitmodules + // ================================================================ + + #[test] + fn test_entry_from_gitmodules_full() { + let mut map = HashMap::new(); + map.insert( + "url".to_string(), + "https://github.com/user/repo.git".to_string(), + ); + map.insert("path".to_string(), "libs/repo".to_string()); + map.insert("branch".to_string(), "main".to_string()); + map.insert("ignore".to_string(), "dirty".to_string()); + map.insert("update".to_string(), "rebase".to_string()); + map.insert("fetchRecurse".to_string(), "true".to_string()); + map.insert("active".to_string(), "true".to_string()); + map.insert("shallow".to_string(), "true".to_string()); + + let entry = SubmoduleEntry::from_gitmodules(&"repo".to_string(), map); + assert_eq!( + entry.url, + Some("https://github.com/user/repo.git".to_string()) + ); + assert_eq!(entry.path, Some("libs/repo".to_string())); + assert_eq!( + entry.branch, + Some(SerializableBranch::Name("main".to_string())) + ); + assert_eq!(entry.ignore, Some(SerializableIgnore::Dirty)); + assert_eq!(entry.update, Some(SerializableUpdate::Rebase)); + assert_eq!(entry.fetch_recurse, Some(SerializableFetchRecurse::Always)); + assert_eq!(entry.active, Some(true)); + assert_eq!(entry.shallow, Some(true)); + } + + #[test] + fn test_entry_from_gitmodules_minimal() { + let mut map = HashMap::new(); + map.insert( + "url".to_string(), + "https://example.com/repo.git".to_string(), + ); + + let entry = SubmoduleEntry::from_gitmodules(&"mymod".to_string(), map); + assert_eq!(entry.url, Some("https://example.com/repo.git".to_string())); + // path defaults to name when not in the map + assert!(entry.path.is_some()); + // active defaults to true, shallow to false + assert_eq!(entry.active, Some(true)); + assert_eq!(entry.shallow, Some(false)); + } + + #[test] + fn test_entry_from_gitmodules_invalid_values_silently_ignored() { + let mut map = HashMap::new(); + map.insert("url".to_string(), "https://example.com/repo".to_string()); + map.insert("ignore".to_string(), "INVALID".to_string()); + map.insert("update".to_string(), "BOGUS".to_string()); + map.insert("active".to_string(), "not-a-bool".to_string()); + + let entry = SubmoduleEntry::from_gitmodules(&"mod".to_string(), map); + // Invalid values should result in None (parsed with .ok()) + assert_eq!(entry.ignore, None); + assert_eq!(entry.update, None); + // Invalid bool parses to None, defaults to true + assert_eq!(entry.active, Some(true)); + } + + #[test] + fn test_entry_from_gitmodules_branch_dot_alias() { + let mut map = HashMap::new(); + map.insert("branch".to_string(), ".".to_string()); + map.insert("url".to_string(), "https://example.com/repo".to_string()); + + let entry = SubmoduleEntry::from_gitmodules(&"mod".to_string(), map); + assert_eq!( + entry.branch, + Some(SerializableBranch::CurrentInSuperproject) + ); + } + + // ================================================================ + // SubmoduleEntry::update_with_options + // ================================================================ + + #[test] + fn test_entry_update_with_options() { + let entry = SubmoduleEntry::new( + Some("https://example.com".to_string()), + Some("path".to_string()), + Some(SerializableBranch::Name("main".to_string())), + Some(SerializableIgnore::None), + Some(SerializableUpdate::Checkout), + Some(SerializableFetchRecurse::OnDemand), + Some(true), + Some(false), + None, + ); + + let opts = SubmoduleGitOptions { + ignore: Some(SerializableIgnore::All), + fetch_recurse: None, + branch: Some(SerializableBranch::Name("develop".to_string())), + update: None, + }; + + let updated = entry.update_with_options(opts); + assert_eq!(updated.ignore, Some(SerializableIgnore::All)); + assert_eq!( + updated.fetch_recurse, + Some(SerializableFetchRecurse::OnDemand) + ); // unchanged + assert_eq!( + updated.branch, + Some(SerializableBranch::Name("develop".to_string())) + ); + assert_eq!(updated.update, Some(SerializableUpdate::Checkout)); // unchanged + } + + #[test] + fn test_entry_update_with_empty_options_preserves() { + let entry = SubmoduleEntry::new( + Some("url".to_string()), + Some("path".to_string()), + Some(SerializableBranch::Name("main".to_string())), + Some(SerializableIgnore::Dirty), + Some(SerializableUpdate::Rebase), + Some(SerializableFetchRecurse::Always), + Some(true), + None, + None, + ); + // Default-derived SubmoduleGitOptions has all None fields + let opts = SubmoduleGitOptions { + ignore: None, + fetch_recurse: None, + branch: None, + update: None, + }; + let updated = entry.update_with_options(opts); + // None options don't override existing values + assert_eq!(updated.ignore, Some(SerializableIgnore::Dirty)); + assert_eq!(updated.update, Some(SerializableUpdate::Rebase)); + assert_eq!( + updated.branch, + Some(SerializableBranch::Name("main".to_string())) + ); + assert_eq!( + updated.fetch_recurse, + Some(SerializableFetchRecurse::Always) + ); + } + + // ================================================================ + // SubmoduleEntries: sparse checkout operations + // ================================================================ + + #[test] + fn test_entries_add_checkout_replace() { + let mut entries = SubmoduleEntries::default(); + entries.add_checkout("mod1".to_string(), &["src/".to_string()], false); + assert_eq!( + entries.sparse_checkouts().unwrap().get("mod1").unwrap(), + &vec!["src/".to_string()] + ); + + // Append + entries.add_checkout("mod1".to_string(), &["docs/".to_string()], false); + assert_eq!( + entries.sparse_checkouts().unwrap().get("mod1").unwrap(), + &vec!["src/".to_string(), "docs/".to_string()] + ); + + // Replace + entries.add_checkout("mod1".to_string(), &["lib/".to_string()], true); + assert_eq!( + entries.sparse_checkouts().unwrap().get("mod1").unwrap(), + &vec!["lib/".to_string()] + ); + } + + #[test] + fn test_entries_add_checkout_when_none() { + let mut entries = SubmoduleEntries { + submodules: Some(HashMap::new()), + sparse_checkouts: None, + }; + entries.add_checkout("mod1".to_string(), &["src/".to_string()], false); + assert!(entries.sparse_checkouts().is_some()); + assert_eq!( + entries.sparse_checkouts().unwrap().get("mod1").unwrap(), + &vec!["src/".to_string()] + ); + } + + #[test] + fn test_entries_remove_sparse_path() { + let mut entries = SubmoduleEntries::default(); + entries.add_checkout( + "mod1".to_string(), + &["src/".to_string(), "docs/".to_string()], + false, + ); + + entries.remove_sparse_path("mod1".to_string(), "src/".to_string()); + assert_eq!( + entries.sparse_checkouts().unwrap().get("mod1").unwrap(), + &vec!["docs/".to_string()] + ); + + // Remove last path → entry is cleaned up + entries.remove_sparse_path("mod1".to_string(), "docs/".to_string()); + assert!(!entries.sparse_checkouts().unwrap().contains_key("mod1")); + } + + #[test] + fn test_entries_add_sparse_path() { + let mut entries = SubmoduleEntries::default(); + entries.add_sparse_path("mod1".to_string(), "src/".to_string()); + assert_eq!( + entries.sparse_checkouts().unwrap().get("mod1").unwrap(), + &vec!["src/".to_string()] + ); + entries.add_sparse_path("mod1".to_string(), "docs/".to_string()); + assert_eq!( + entries.sparse_checkouts().unwrap().get("mod1").unwrap(), + &vec!["src/".to_string(), "docs/".to_string()] + ); + } + + #[test] + fn test_entries_add_sparse_path_when_none() { + let mut entries = SubmoduleEntries { + submodules: Some(HashMap::new()), + sparse_checkouts: None, + }; + entries.add_sparse_path("mod1".to_string(), "src/".to_string()); + assert!(entries.sparse_checkouts().is_some()); + } + + #[test] + fn test_entries_delete_checkout() { + let mut entries = SubmoduleEntries::default(); + entries.add_checkout("mod1".to_string(), &["src/".to_string()], false); + entries.delete_checkout("mod1".to_string()); + assert!(!entries.sparse_checkouts().unwrap().contains_key("mod1")); + } + + // ================================================================ + // SubmoduleEntries: update_entry + // ================================================================ + + #[test] + fn test_entries_update_entry_with_sparse() { + let mut entries = SubmoduleEntries::default(); + let entry = SubmoduleEntry { + url: Some("https://example.com/repo".to_string()), + path: Some("libs/repo".to_string()), + branch: None, + ignore: None, + update: None, + fetch_recurse: None, + active: Some(true), + shallow: None, + no_init: None, + sparse_paths: Some(vec!["src/".to_string()]), + }; + entries.update_entry("repo".to_string(), entry); + + assert!(entries.submodules().unwrap().contains_key("repo")); + assert_eq!( + entries.sparse_checkouts().unwrap().get("repo").unwrap(), + &vec!["src/".to_string()] + ); + } + + #[test] + fn test_entries_update_entry_removes_sparse_when_empty() { + let mut entries = SubmoduleEntries::default(); + // First add with sparse + let entry_with_sparse = SubmoduleEntry { + url: Some("url".to_string()), + path: Some("path".to_string()), + branch: None, + ignore: None, + update: None, + fetch_recurse: None, + active: Some(true), + shallow: None, + no_init: None, + sparse_paths: Some(vec!["src/".to_string()]), + }; + entries.update_entry("repo".to_string(), entry_with_sparse); + assert!(entries.sparse_checkouts().unwrap().contains_key("repo")); + + // Update without sparse → should remove from sparse map + let entry_no_sparse = SubmoduleEntry { + url: Some("url".to_string()), + path: Some("path".to_string()), + branch: None, + ignore: None, + update: None, + fetch_recurse: None, + active: Some(true), + shallow: None, + no_init: None, + sparse_paths: None, + }; + entries.update_entry("repo".to_string(), entry_no_sparse); + assert!(!entries.sparse_checkouts().unwrap().contains_key("repo")); + } + + // ================================================================ + // SubmoduleEntries: iteration and queries + // ================================================================ + + #[test] + fn test_entries_contains_key() { + let mut entries = SubmoduleEntries::default(); + assert!(!entries.contains_key("mod1")); + + let entry = SubmoduleEntry::new( + Some("url".to_string()), + Some("path".to_string()), + None, + None, + None, + None, + Some(true), + None, + None, + ); + entries = entries.add_submodule("mod1".to_string(), entry); + assert!(entries.contains_key("mod1")); + assert!(!entries.contains_key("mod2")); + } + + #[test] + fn test_entries_submodule_names() { + let mut entries = SubmoduleEntries::default(); + let entry = SubmoduleEntry::new( + Some("url".to_string()), + Some("path".to_string()), + None, + None, + None, + None, + Some(true), + None, + None, + ); + entries = entries.add_submodule("alpha".to_string(), entry.clone()); + entries = entries.add_submodule("beta".to_string(), entry); + let names = entries.submodule_names().unwrap(); + assert!(names.contains(&"alpha".to_string())); + assert!(names.contains(&"beta".to_string())); + assert_eq!(names.len(), 2); + } + + #[test] + fn test_entries_remove_submodule() { + let entry = SubmoduleEntry::new( + Some("url".to_string()), + Some("path".to_string()), + None, + None, + None, + None, + Some(true), + None, + None, + ); + let mut entries = SubmoduleEntries::default() + .add_submodule("mod1".to_string(), entry.clone()) + .add_submodule("mod2".to_string(), entry); + assert!(entries.contains_key("mod1")); + entries.remove_submodule("mod1"); + assert!(!entries.contains_key("mod1")); + assert!(entries.contains_key("mod2")); + } + + #[test] + fn test_entries_iter_joins_sparse() { + let mut entries = SubmoduleEntries::default(); + let entry = SubmoduleEntry::new( + Some("url".to_string()), + Some("path".to_string()), + None, + None, + None, + None, + Some(true), + None, + None, + ); + entries = entries.add_submodule("mod1".to_string(), entry); + entries.add_checkout("mod1".to_string(), &["src/".to_string()], false); + + let items: Vec<_> = entries.iter().collect(); + assert_eq!(items.len(), 1); + let (name, (_, sparse)) = &items[0]; + assert_eq!(*name, "mod1"); + assert_eq!(*sparse, vec!["src/".to_string()]); + } + + #[test] + fn test_entries_iter_no_sparse() { + let mut entries = SubmoduleEntries::default(); + let entry = SubmoduleEntry::new( + Some("url".to_string()), + Some("path".to_string()), + None, + None, + None, + None, + Some(true), + None, + None, + ); + entries = entries.add_submodule("mod1".to_string(), entry); + + let items: Vec<_> = entries.iter().collect(); + let (_, (_, sparse)) = &items[0]; + assert!(sparse.is_empty()); + } + + // ================================================================ + // Config::apply_option_default + // ================================================================ + + #[test] + fn test_apply_option_default_none_gets_default() { + let mut val: Option = None; + let default = Some(SerializableIgnore::Dirty); + Config::apply_option_default(&mut val, &default, SerializableIgnore::Unspecified); + assert_eq!(val, Some(SerializableIgnore::Dirty)); + } + + #[test] + fn test_apply_option_default_unspecified_gets_default() { + let mut val = Some(SerializableIgnore::Unspecified); + let default = Some(SerializableIgnore::All); + Config::apply_option_default(&mut val, &default, SerializableIgnore::Unspecified); + assert_eq!(val, Some(SerializableIgnore::All)); + } + + #[test] + fn test_apply_option_default_real_value_preserved() { + let mut val = Some(SerializableIgnore::Dirty); + let default = Some(SerializableIgnore::All); + Config::apply_option_default(&mut val, &default, SerializableIgnore::Unspecified); + assert_eq!(val, Some(SerializableIgnore::Dirty)); + } + + #[test] + fn test_apply_option_default_none_value_none_default() { + let mut val: Option = None; + let default: Option = None; + Config::apply_option_default(&mut val, &default, SerializableIgnore::Unspecified); + // Falls back to the sentinel + assert_eq!(val, Some(SerializableIgnore::Unspecified)); + } + + // ================================================================ + // Config::apply_defaults + // ================================================================ + + #[test] + fn test_config_apply_defaults() { + let defaults = SubmoduleDefaults { + ignore: Some(SerializableIgnore::Dirty), + fetch_recurse: Some(SerializableFetchRecurse::Always), + update: Some(SerializableUpdate::Rebase), + }; + let entry = SubmoduleEntry::new( + Some("url".to_string()), + Some("path".to_string()), + None, + None, + None, + None, + Some(true), + None, + None, + ); + let entries = SubmoduleEntries::default().add_submodule("mod1".to_string(), entry); + let config = Config::new(defaults, entries); + + let applied = config.apply_defaults(); + let sub = applied.submodules.get("mod1").unwrap(); + // Submodule had None → should get defaults + assert_eq!(sub.ignore, Some(SerializableIgnore::Dirty)); + assert_eq!(sub.fetch_recurse, Some(SerializableFetchRecurse::Always)); + assert_eq!(sub.update, Some(SerializableUpdate::Rebase)); + } + + #[test] + fn test_config_apply_defaults_entry_overrides() { + let defaults = SubmoduleDefaults { + ignore: Some(SerializableIgnore::Dirty), + fetch_recurse: Some(SerializableFetchRecurse::Always), + update: Some(SerializableUpdate::Rebase), + }; + let entry = SubmoduleEntry::new( + Some("url".to_string()), + Some("path".to_string()), + None, + Some(SerializableIgnore::All), // explicit override + None, + None, + Some(true), + None, + None, + ); + let entries = SubmoduleEntries::default().add_submodule("mod1".to_string(), entry); + let config = Config::new(defaults, entries); + + let applied = config.apply_defaults(); + let sub = applied.submodules.get("mod1").unwrap(); + // Explicit override preserved + assert_eq!(sub.ignore, Some(SerializableIgnore::All)); + // Others get defaults + assert_eq!(sub.fetch_recurse, Some(SerializableFetchRecurse::Always)); + assert_eq!(sub.update, Some(SerializableUpdate::Rebase)); + } + + #[test] + fn test_config_apply_defaults_no_submodules() { + let config = Config::default(); + let applied = config.apply_defaults(); + // Should not panic + assert!(applied.submodules.submodules().unwrap().is_empty()); + } + + // ================================================================ + // SubmoduleAddOptions conversions + // ================================================================ + + #[test] + fn test_add_options_into_submodule_entry() { + let opts = SubmoduleAddOptions { + name: "mymod".to_string(), + path: PathBuf::from("libs/mymod"), + url: "https://example.com/repo.git".to_string(), + branch: Some(SerializableBranch::Name("main".to_string())), + ignore: Some(SerializableIgnore::Dirty), + update: None, + fetch_recurse: None, + shallow: true, + no_init: false, + }; + let entry = opts.into_submodule_entry(); + assert_eq!(entry.url, Some("https://example.com/repo.git".to_string())); + assert_eq!(entry.path, Some("libs/mymod".to_string())); + assert_eq!( + entry.branch, + Some(SerializableBranch::Name("main".to_string())) + ); + assert_eq!(entry.shallow, Some(true)); + assert_eq!(entry.active, Some(true)); // no_init=false → active=true + } + + #[test] + fn test_add_options_into_entry_no_init() { + let opts = SubmoduleAddOptions { + name: "mymod".to_string(), + path: PathBuf::from("libs/mymod"), + url: "https://example.com/repo.git".to_string(), + branch: None, + ignore: None, + update: None, + fetch_recurse: None, + shallow: false, + no_init: true, + }; + let entry = opts.into_submodule_entry(); + assert_eq!(entry.active, Some(false)); // no_init=true → active=false + assert_eq!(entry.no_init, Some(true)); + } + + #[test] + fn test_add_options_from_tuple_fallbacks() { + // When url is None, falls back to path, then name + let entry = SubmoduleEntry { + url: None, + path: Some("libs/mymod".to_string()), + branch: None, + ignore: None, + update: None, + fetch_recurse: None, + active: None, + shallow: None, + no_init: None, + sparse_paths: None, + }; + let opts = SubmoduleAddOptions::from_submodule_entries_tuple(("mymod".to_string(), entry)); + // url fallback: path + assert_eq!(opts.url, "libs/mymod"); + assert_eq!(opts.path, PathBuf::from("libs/mymod")); + } + + #[test] + fn test_add_options_from_tuple_no_url_no_path() { + let entry = SubmoduleEntry { + url: None, + path: None, + branch: None, + ignore: None, + update: None, + fetch_recurse: None, + active: None, + shallow: None, + no_init: None, + sparse_paths: None, + }; + let opts = SubmoduleAddOptions::from_submodule_entries_tuple(("mymod".to_string(), entry)); + // Falls back to name for both url and path + assert_eq!(opts.url, "mymod"); + assert_eq!(opts.path, PathBuf::from("mymod")); + } + + // ================================================================ + // SubmoduleEntry::git_options + // ================================================================ + + #[test] + fn test_entry_git_options() { + let entry = SubmoduleEntry::new( + Some("url".to_string()), + Some("path".to_string()), + Some(SerializableBranch::Name("dev".to_string())), + Some(SerializableIgnore::All), + Some(SerializableUpdate::Merge), + Some(SerializableFetchRecurse::Never), + Some(true), + None, + None, + ); + let opts = entry.git_options(); + assert_eq!( + opts.branch, + Some(SerializableBranch::Name("dev".to_string())) + ); + assert_eq!(opts.ignore, Some(SerializableIgnore::All)); + assert_eq!(opts.update, Some(SerializableUpdate::Merge)); + assert_eq!(opts.fetch_recurse, Some(SerializableFetchRecurse::Never)); + } + + // ================================================================ + // SubmoduleEntries: serialization roundtrip + // ================================================================ + + #[test] + fn test_entries_serde_roundtrip() { + let mut entries = SubmoduleEntries::default(); + let entry = SubmoduleEntry { + url: Some("https://example.com/repo".to_string()), + path: Some("libs/repo".to_string()), + branch: Some(SerializableBranch::Name("main".to_string())), + ignore: Some(SerializableIgnore::Dirty), + update: Some(SerializableUpdate::Rebase), + fetch_recurse: Some(SerializableFetchRecurse::Always), + active: Some(true), + shallow: Some(false), + no_init: None, + sparse_paths: Some(vec!["src/".to_string()]), + }; + entries = entries.add_submodule("mymod".to_string(), entry); + + let serialized = toml::to_string(&entries).unwrap(); + let deserialized: SubmoduleEntries = toml::from_str(&serialized).unwrap(); + + assert!(deserialized.submodules().unwrap().contains_key("mymod")); + let de_entry = deserialized.submodules().unwrap().get("mymod").unwrap(); + assert_eq!(de_entry.url, Some("https://example.com/repo".to_string())); + assert_eq!( + de_entry.branch, + Some(SerializableBranch::Name("main".to_string())) + ); + assert_eq!(de_entry.ignore, Some(SerializableIgnore::Dirty)); + // Sparse paths should be in the sparse_checkouts map + assert!( + deserialized + .sparse_checkouts() + .unwrap() + .contains_key("mymod") + ); + } + + // ================================================================ + // OtherSubmoduleSettings::name_from_url + // ================================================================ + + #[test] + fn test_other_settings_name_from_url() { + assert_eq!( + OtherSubmoduleSettings::name_from_url("https://github.com/user/repo.git"), + "repo" + ); + assert_eq!( + OtherSubmoduleSettings::name_from_url("git@github.com:user/lib.git"), + "lib" + ); + assert_eq!( + OtherSubmoduleSettings::name_from_url("https://github.com/user/repo/"), + "repo" + ); + assert_eq!(OtherSubmoduleSettings::name_from_url("simple"), "simple"); + } + + // ================================================================ + // Config full TOML roundtrip + // ================================================================ + + #[test] + fn test_config_toml_roundtrip() { + let toml_str = r#" +[defaults] +ignore = "dirty" +update = "rebase" + +[mymod] +path = "libs/mymod" +url = "https://example.com/repo.git" +branch = "main" +active = true +"#; + let config: Config = toml::from_str(toml_str).unwrap(); + assert_eq!(config.defaults.ignore, Some(SerializableIgnore::Dirty)); + assert_eq!(config.defaults.update, Some(SerializableUpdate::Rebase)); + assert!(config.submodules.contains_key("mymod")); + let entry = config.submodules.get("mymod").unwrap(); + assert_eq!(entry.url, Some("https://example.com/repo.git".to_string())); + assert_eq!( + entry.branch, + Some(SerializableBranch::Name("main".to_string())) + ); + } + + #[test] + fn test_config_toml_branch_aliases() { + let toml_str = r#" +[mymod] +path = "libs/mymod" +url = "https://example.com/repo.git" +branch = "." +active = true +"#; + let config: Config = toml::from_str(toml_str).unwrap(); + let entry = config.submodules.get("mymod").unwrap(); + assert_eq!( + entry.branch, + Some(SerializableBranch::CurrentInSuperproject) + ); + + let toml_str2 = r#" +[mymod] +path = "libs/mymod" +url = "https://example.com/repo.git" +branch = "super" +active = true +"#; + let config2: Config = toml::from_str(toml_str2).unwrap(); + let entry2 = config2.submodules.get("mymod").unwrap(); + assert_eq!( + entry2.branch, + Some(SerializableBranch::CurrentInSuperproject) + ); + } + + // ================================================================ + // SubmoduleGitOptions + // ================================================================ + + #[test] + fn test_git_options_new() { + let opts = SubmoduleGitOptions::new( + Some(SerializableIgnore::All), + Some(SerializableFetchRecurse::Always), + Some(SerializableBranch::Name("main".to_string())), + Some(SerializableUpdate::Merge), + ); + assert_eq!(opts.ignore, Some(SerializableIgnore::All)); + assert_eq!(opts.fetch_recurse, Some(SerializableFetchRecurse::Always)); + } + + // ================================================================ + // SubmoduleEntries::from_gitmodules + // ================================================================ + + #[test] + fn test_entries_from_gitmodules() { + let mut outer = HashMap::new(); + let mut inner = HashMap::new(); + inner.insert( + "url".to_string(), + "https://example.com/repo.git".to_string(), + ); + inner.insert("path".to_string(), "libs/repo".to_string()); + outer.insert("repo".to_string(), inner); + + let entries = SubmoduleEntries::from_gitmodules(outer); + assert!(entries.contains_key("repo")); + let entry = entries.get("repo").unwrap(); + assert_eq!(entry.url, Some("https://example.com/repo.git".to_string())); + } + + // ================================================================ + // Provider trait + // ================================================================ + + #[test] + fn test_config_provider_metadata() { + let config = Config::default(); + let meta = config.metadata(); + assert_eq!(meta.name, "CLI arguments"); + } + + #[test] + fn test_config_provider_profile() { + let config = Config::default(); + assert!(config.profile().is_some()); + } + + #[test] + fn test_config_provider_data() { + let config = Config::default(); + let data = config.data(); + assert!(data.is_ok()); + } +} diff --git a/src/git_manager.rs b/src/git_manager.rs index 040002b..e973fcb 100644 --- a/src/git_manager.rs +++ b/src/git_manager.rs @@ -168,14 +168,19 @@ impl GitManager { entry.sparse_paths = Some(paths); if let Some(ref stored_paths) = entry.sparse_paths { // Also populate sparse_checkouts so consumers using sparse_checkouts() see the paths - self.config.submodules.add_checkout(name.clone(), stored_paths, true); + self.config + .submodules + .add_checkout(name.clone(), stored_paths, true); } } // Normalize: convert Unspecified variants to None so they serialize cleanly if matches!(entry.ignore, Some(SerializableIgnore::Unspecified)) { entry.ignore = None; } - if matches!(entry.fetch_recurse, Some(SerializableFetchRecurse::Unspecified)) { + if matches!( + entry.fetch_recurse, + Some(SerializableFetchRecurse::Unspecified) + ) { entry.fetch_recurse = None; } if matches!(entry.update, Some(SerializableUpdate::Unspecified)) { @@ -201,7 +206,9 @@ impl GitManager { for (name, entry) in self.config.get_submodules() { // Determine whether this name needs quoting (contains TOML-special characters). // Simple names (alphanumeric, hyphens, underscores) can use the bare [name] form. - let needs_quoting = name.chars().any(|c| !c.is_alphanumeric() && c != '-' && c != '_'); + let needs_quoting = name + .chars() + .any(|c| !c.is_alphanumeric() && c != '-' && c != '_'); let escaped_name = name.replace('\\', "\\\\").replace('"', "\\\""); let section_header = if needs_quoting { format!("[\"{escaped_name}\"]") @@ -222,15 +229,24 @@ impl GitManager { output.push_str(§ion_header); output.push('\n'); if let Some(path) = &entry.path { - output.push_str(&format!("path = \"{}\"\n", path.replace('\\', "\\\\").replace('"', "\\\""))); + output.push_str(&format!( + "path = \"{}\"\n", + path.replace('\\', "\\\\").replace('"', "\\\"") + )); } if let Some(url) = &entry.url { - output.push_str(&format!("url = \"{}\"\n", url.replace('\\', "\\\\").replace('"', "\\\""))); + output.push_str(&format!( + "url = \"{}\"\n", + url.replace('\\', "\\\\").replace('"', "\\\"") + )); } if let Some(branch) = &entry.branch { let val = branch.to_string(); if !val.is_empty() { - output.push_str(&format!("branch = \"{}\"\n", val.replace('\\', "\\\\").replace('"', "\\\""))); + output.push_str(&format!( + "branch = \"{}\"\n", + val.replace('\\', "\\\\").replace('"', "\\\"") + )); } } if let Some(ignore) = &entry.ignore { @@ -263,7 +279,9 @@ impl GitManager { if !sparse_paths.is_empty() { let joined = sparse_paths .iter() - .map(|p| format!("\"{}\"", p.replace('\\', "\\\\").replace('"', "\\\""))) + .map(|p| { + format!("\"{}\"", p.replace('\\', "\\\\").replace('"', "\\\"")) + }) .collect::>() .join(", "); output.push_str(&format!("sparse_paths = [{joined}]\n")); @@ -272,8 +290,9 @@ impl GitManager { } } - std::fs::write(&self.config_path, &output) - .map_err(|e| SubmoduleError::ConfigError(format!("Failed to write config file: {e}")))?; + std::fs::write(&self.config_path, &output).map_err(|e| { + SubmoduleError::ConfigError(format!("Failed to write config file: {e}")) + })?; Ok(()) } @@ -449,7 +468,11 @@ impl GitManager { shallow: shallow.unwrap_or(false), no_init, }; - match self.git_ops.add_submodule(&opts).map_err(Self::map_git_ops_error) { + match self + .git_ops + .add_submodule(&opts) + .map_err(Self::map_git_ops_error) + { Ok(()) => { // Configure after successful submodule creation (clone/init handled by the underlying backend, currently the git CLI) self.configure_submodule_post_creation(&name, &path, sparse_paths.clone())?; @@ -710,7 +733,8 @@ impl GitManager { shallow: config.shallow.unwrap_or(false), no_init: false, }; - self.git_ops.add_submodule(&opts) + self.git_ops + .add_submodule(&opts) .map_err(Self::map_git_ops_error)?; } else { // Submodule is registered, just initialize and update using GitOperations @@ -871,15 +895,24 @@ impl GitManager { fn entry_to_kv_lines(entry: &SubmoduleEntry) -> Vec<(String, String)> { let mut kv: Vec<(String, String)> = Vec::new(); if let Some(path) = &entry.path { - kv.push(("path".into(), format!("\"{}\"", path.replace('\\', "\\\\").replace('"', "\\\"")))); + kv.push(( + "path".into(), + format!("\"{}\"", path.replace('\\', "\\\\").replace('"', "\\\"")), + )); } if let Some(url) = &entry.url { - kv.push(("url".into(), format!("\"{}\"", url.replace('\\', "\\\\").replace('"', "\\\"")))); + kv.push(( + "url".into(), + format!("\"{}\"", url.replace('\\', "\\\\").replace('"', "\\\"")), + )); } if let Some(branch) = &entry.branch { let val = branch.to_string(); if !val.is_empty() { - kv.push(("branch".into(), format!("\"{}\"", val.replace('\\', "\\\\").replace('"', "\\\"")))); + kv.push(( + "branch".into(), + format!("\"{}\"", val.replace('\\', "\\\\").replace('"', "\\\"")), + )); } } if let Some(ignore) = &entry.ignore { @@ -922,8 +955,17 @@ impl GitManager { } /// Known submodule key names (used to identify which lines to update vs. preserve). - const KNOWN_SUBMODULE_KEYS: &'static [&'static str] = - &["path", "url", "branch", "ignore", "fetch", "update", "active", "shallow", "sparse_paths"]; + const KNOWN_SUBMODULE_KEYS: &'static [&'static str] = &[ + "path", + "url", + "branch", + "ignore", + "fetch", + "update", + "active", + "shallow", + "sparse_paths", + ]; /// Known [defaults] key names. const KNOWN_DEFAULTS_KEYS: &'static [&'static str] = &["ignore", "fetch", "update"]; @@ -966,8 +1008,11 @@ impl GitManager { }; // Build the current submodule map sorted by name for deterministic append order - let current_entries: std::collections::BTreeMap = - self.config.get_submodules().map(|(n, e)| (n.clone(), e)).collect(); + let current_entries: std::collections::BTreeMap = self + .config + .get_submodules() + .map(|(n, e)| (n.clone(), e)) + .collect(); // Track which names appeared in the existing file (so we know what to append) let mut seen_names: std::collections::HashSet = std::collections::HashSet::new(); @@ -1014,15 +1059,21 @@ impl GitManager { let mut kv = Vec::new(); if let Some(ignore) = &defaults.ignore { let val = ignore.to_string(); - if !val.is_empty() { kv.push(("ignore".into(), format!("\"{val}\""))); } + if !val.is_empty() { + kv.push(("ignore".into(), format!("\"{val}\""))); + } } if let Some(fetch_recurse) = &defaults.fetch_recurse { let val = fetch_recurse.to_string(); - if !val.is_empty() { kv.push(("fetch".into(), format!("\"{val}\""))); } + if !val.is_empty() { + kv.push(("fetch".into(), format!("\"{val}\""))); + } } if let Some(update) = &defaults.update { let val = update.to_string(); - if !val.is_empty() { kv.push(("update".into(), format!("\"{val}\""))); } + if !val.is_empty() { + kv.push(("update".into(), format!("\"{val}\""))); + } } kv }; @@ -1046,11 +1097,8 @@ impl GitManager { // Rewrite [defaults] section preserving comments output.push_str(header); output.push('\n'); - let new_body = Self::merge_section_body( - body, - &defaults_kv, - Self::KNOWN_DEFAULTS_KEYS, - ); + let new_body = + Self::merge_section_body(body, &defaults_kv, Self::KNOWN_DEFAULTS_KEYS); for line in &new_body { output.push_str(line); output.push('\n'); @@ -1085,8 +1133,9 @@ impl GitManager { // Append submodule sections that weren't in the existing file (sorted for determinism) for (name, entry) in ¤t_entries { if !seen_names.contains(name.as_str()) { - let needs_quoting = - name.chars().any(|c| !c.is_alphanumeric() && c != '-' && c != '_'); + let needs_quoting = name + .chars() + .any(|c| !c.is_alphanumeric() && c != '-' && c != '_'); let escaped_name = name.replace('\\', "\\\\").replace('"', "\\\""); let section_header = if needs_quoting { format!("[\"{escaped_name}\"]") @@ -1102,8 +1151,9 @@ impl GitManager { } } - std::fs::write(&self.config_path, &output) - .map_err(|e| SubmoduleError::ConfigError(format!("Failed to write config file: {e}")))?; + std::fs::write(&self.config_path, &output).map_err(|e| { + SubmoduleError::ConfigError(format!("Failed to write config file: {e}")) + })?; Ok(()) } @@ -1117,11 +1167,12 @@ impl GitManager { known_keys: &[&str], ) -> Vec { // Build a lookup of new values by key - let kv_map: std::collections::HashMap<&str, &str> = - new_kv.iter().map(|(k, v)| (k.as_str(), v.as_str())).collect(); + let kv_map: std::collections::HashMap<&str, &str> = new_kv + .iter() + .map(|(k, v)| (k.as_str(), v.as_str())) + .collect(); - let mut emitted_keys: std::collections::HashSet<&str> = - std::collections::HashSet::new(); + let mut emitted_keys: std::collections::HashSet<&str> = std::collections::HashSet::new(); let mut result: Vec = Vec::new(); for line in body { @@ -1270,7 +1321,9 @@ impl GitManager { // Update the entry in config let mut updated = entry.clone(); updated.active = Some(false); - self.config.submodules.update_entry(name.to_string(), updated); + self.config + .submodules + .update_entry(name.to_string(), updated); self.write_full_config()?; println!("Disabled submodule '{name}'."); @@ -1373,19 +1426,20 @@ impl GitManager { })? .clone(); - let new_path = path.as_ref().map(|p| { - p.to_string_lossy().to_string() - }); + let new_path = path.as_ref().map(|p| p.to_string_lossy().to_string()); // If path is changing, delete and re-add if let Some(ref np) = new_path { let old_path = entry.path.as_deref().unwrap_or(name); if np != old_path { - let sub_url = url.as_deref() + let sub_url = url + .as_deref() .or(entry.url.as_deref()) - .ok_or_else(|| SubmoduleError::ConfigError( - "Cannot re-clone submodule: no URL available.".to_string(), - ))? + .ok_or_else(|| { + SubmoduleError::ConfigError( + "Cannot re-clone submodule: no URL available.".to_string(), + ) + })? .to_string(); // Delete old then re-add at new path @@ -1401,7 +1455,8 @@ impl GitManager { // Compute effective sparse paths: caller's value if provided, else preserve existing let effective_sparse = if let Some(ref sp) = sparse_paths { - let paths: Vec = sp.iter().map(|p| p.to_string_lossy().to_string()).collect(); + let paths: Vec = + sp.iter().map(|p| p.to_string_lossy().to_string()).collect(); if paths.is_empty() { None } else { Some(paths) } } else { entry.sparse_paths.clone().filter(|v| !v.is_empty()) @@ -1485,7 +1540,9 @@ impl GitManager { updated.sparse_paths = Some(new_paths); } } - self.config.submodules.update_entry(name.to_string(), updated); + self.config + .submodules + .update_entry(name.to_string(), updated); } self.write_full_config()?; @@ -1519,19 +1576,13 @@ impl GitManager { // Snapshot entries before deleting (needed for reinit) let snapshots: Vec<(String, SubmoduleEntry)> = targets .iter() - .filter_map(|n| { - self.config - .get_submodule(n) - .map(|e| (n.clone(), e.clone())) - }) + .filter_map(|n| self.config.get_submodule(n).map(|e| (n.clone(), e.clone()))) .collect(); // Validate all targets exist before starting for name in &targets { if self.config.get_submodule(name).is_none() { - return Err(SubmoduleError::SubmoduleNotFound { - name: name.clone(), - }); + return Err(SubmoduleError::SubmoduleNotFound { name: name.clone() }); } } @@ -1546,22 +1597,13 @@ impl GitManager { let url = match entry.url.clone() { Some(u) if !u.is_empty() => u, _ => { - eprintln!( - "Skipping reinit of '{name}': no URL in config entry." - ); + eprintln!("Skipping reinit of '{name}': no URL in config entry."); continue; } }; println!("🔄 Reinitializing submodule '{name}'..."); - let path = entry - .path - .as_deref() - .unwrap_or(&name) - .to_string(); - let sparse = entry - .sparse_paths - .clone() - .filter(|paths| !paths.is_empty()); + let path = entry.path.as_deref().unwrap_or(&name).to_string(); + let sparse = entry.sparse_paths.clone().filter(|paths| !paths.is_empty()); self.add_submodule( name.clone(), path.into(), @@ -1613,10 +1655,7 @@ impl GitManager { })?; // Build a Config from the SubmoduleEntries - let config = Config::new( - crate::config::SubmoduleDefaults::default(), - entries, - ); + let config = Config::new(crate::config::SubmoduleDefaults::default(), entries); // Serialize using write_full_config logic but to the output path let tmp_manager = GitManager { diff --git a/src/git_ops/gix_ops.rs b/src/git_ops/gix_ops.rs index 00be733..4f3758a 100644 --- a/src/git_ops/gix_ops.rs +++ b/src/git_ops/gix_ops.rs @@ -21,9 +21,7 @@ fn gix_file_from_bytes(bytes: Vec) -> Result> { } use super::{DetailedSubmoduleStatus, GitConfig, GitOperations, SubmoduleStatusFlags}; -use crate::config::{ - SubmoduleAddOptions, SubmoduleEntries, SubmoduleUpdateOptions, -}; +use crate::config::{SubmoduleAddOptions, SubmoduleEntries, SubmoduleUpdateOptions}; use crate::options::{ConfigLevel, GitmodulesConvert}; use crate::utilities; diff --git a/src/git_ops/mod.rs b/src/git_ops/mod.rs index b819b80..f695993 100644 --- a/src/git_ops/mod.rs +++ b/src/git_ops/mod.rs @@ -202,8 +202,9 @@ impl GitOpsManager { .to_path_buf(); // git2 is the required backend — propagate its reopen error. - self.git2_ops = Git2Operations::new(Some(&workdir)) - .with_context(|| format!("Failed to reopen git2 repository at {}", workdir.display()))?; + self.git2_ops = Git2Operations::new(Some(&workdir)).with_context(|| { + format!("Failed to reopen git2 repository at {}", workdir.display()) + })?; // gix is an optional optimistic backend — log failures but don't fail. match GixOperations::new(Some(&workdir)) { @@ -302,7 +303,9 @@ impl GitOperations for GitOpsManager { |git2| git2.add_submodule(opts), ) .or_else(|git2_err| { - let workdir = self.git2_ops.workdir() + let workdir = self + .git2_ops + .workdir() .ok_or_else(|| anyhow::anyhow!("Repository has no working directory"))?; // Clean up potentially partially initialized submodule path before fallback @@ -337,14 +340,22 @@ impl GitOperations for GitOpsManager { if gitconfig_path.exists() { // Remove by name (our submodule name) let _ = std::process::Command::new("git") - .args(["config", "--remove-section", &format!("submodule.{}", opts.name)]) + .args([ + "config", + "--remove-section", + &format!("submodule.{}", opts.name), + ]) .current_dir(workdir) .output(); // Remove by path (git2 uses path as key when name != path) let path_key = opts.path.display().to_string(); if path_key != opts.name { let _ = std::process::Command::new("git") - .args(["config", "--remove-section", &format!("submodule.{path_key}")]) + .args([ + "config", + "--remove-section", + &format!("submodule.{path_key}"), + ]) .current_dir(workdir) .output(); } diff --git a/src/git_ops/simple_gix.rs b/src/git_ops/simple_gix.rs index 95bf3bb..f52dcce 100644 --- a/src/git_ops/simple_gix.rs +++ b/src/git_ops/simple_gix.rs @@ -8,7 +8,9 @@ //! This module is adapted and simplified from the `gix` CLI (https://github.com/GitoxideLabs/gitoxide/tree/main/src/) and its supporting `gitoxide-core` crate. use anyhow::Result; -use gitoxide_core::repository::fetch::{Options as FetchOptions, PROGRESS_RANGE as FetchProgressRange}; +use gitoxide_core::repository::fetch::{ + Options as FetchOptions, PROGRESS_RANGE as FetchProgressRange, +}; use gix::{features::progress, progress::prodash}; use prodash::render::line; use std::io::{stderr, stdout}; diff --git a/src/lib.rs b/src/lib.rs index 04dac36..eb07705 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,7 @@ // SPDX-FileCopyrightText: 2025 Adam Poulemanos <89049923+bashandbone@users.noreply.github.com> // // SPDX-License-Identifier: LicenseRef-PlainMIT OR MIT +#![cfg_attr(coverage_nightly, feature(coverage_attribute))] //! A Rust CLI tool for managing Git submodules with enhanced features and user-friendly configuration. //! This module is exposed for integration testing; it is not intended for public use and may contain unstable APIs. diff --git a/src/main.rs b/src/main.rs index 2569c5b..cb228db 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,6 +1,7 @@ // SPDX-FileCopyrightText: 2025 Adam Poulemanos <89049923+bashandbone@users.noreply.github.com> // // SPDX-License-Identifier: LicenseRef-PlainMIT OR MIT +#![cfg_attr(coverage_nightly, feature(coverage_attribute))] #![doc = r" Main entry point for the submod CLI tool. @@ -36,6 +37,7 @@ use crate::utilities::{get_name, get_sparse_paths, set_path}; use anyhow::Result; use clap::Parser; use clap_complete::generate; +#[cfg_attr(coverage_nightly, coverage(off))] fn main() -> Result<()> { let cli = Cli::parse(); // config-path is always set because it has a default value, "submod.toml" @@ -101,7 +103,11 @@ fn main() -> Result<()> { .map_err(|e| anyhow::anyhow!("Failed to create manager: {}", e))?; // Collect names first to avoid borrow conflict - let names: Vec = manager.config().get_submodules().map(|(n, _)| n.clone()).collect(); + let names: Vec = manager + .config() + .get_submodules() + .map(|(n, _)| n.clone()) + .collect(); for name in &names { manager .init_submodule(name) @@ -113,7 +119,11 @@ fn main() -> Result<()> { .map_err(|e| anyhow::anyhow!("Failed to create manager: {}", e))?; // Collect names first to avoid borrow conflict - let names: Vec = manager.config().get_submodules().map(|(n, _)| n.clone()).collect(); + let names: Vec = manager + .config() + .get_submodules() + .map(|(n, _)| n.clone()) + .collect(); if names.is_empty() { println!("No submodules configured"); } else { @@ -164,7 +174,11 @@ fn main() -> Result<()> { .map_err(|e| anyhow::anyhow!("Failed to check submodules: {}", e))?; // Collect names first to avoid borrow conflict - let names: Vec = manager.config().get_submodules().map(|(n, _)| n.clone()).collect(); + let names: Vec = manager + .config() + .get_submodules() + .map(|(n, _)| n.clone()) + .collect(); for name in &names { manager .init_submodule(name) diff --git a/src/options.rs b/src/options.rs index 7b9ab27..4e463c4 100644 --- a/src/options.rs +++ b/src/options.rs @@ -386,7 +386,7 @@ impl std::fmt::Display for SerializableFetchRecurse { } /// Serializable enum for [`Branch`] config -#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize)] +#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] pub enum SerializableBranch { /// Use the same name for remote's branch name as the name of the currently activate branch in the superproject. /// This is a special value in git's settings. In a .git/config or .gitmodules it's represented by a period: `.`. @@ -395,16 +395,27 @@ pub enum SerializableBranch { Name(String), } +impl Serialize for SerializableBranch { + fn serialize(&self, serializer: S) -> Result { + serializer.serialize_str(&self.to_gitmodules()) + } +} + impl<'de> Deserialize<'de> for SerializableBranch { /// Deserialize from a plain string using the same logic as [`FromStr`]. - /// Accepts `"."`, `"current"`, `"current-in-super-project"`, `"superproject"`, or `"super"` - /// as [`CurrentInSuperproject`](SerializableBranch::CurrentInSuperproject); all other strings + /// Accepts `"."`, `"current"`, `"current-in-super-project"`, `"current-in-superproject"`, + /// `"superproject"`, or `"super"` as + /// [`CurrentInSuperproject`](SerializableBranch::CurrentInSuperproject); all other strings /// become [`Name`](SerializableBranch::Name). fn deserialize>(deserializer: D) -> Result { let s = String::deserialize(deserializer)?; - SerializableBranch::from_str(&s).map_err(|_| { - serde::de::Error::custom(format!("invalid branch value: {s}")) - }) + let branch = match s.as_str() { + "." | "current" | "current-in-super-project" | "current-in-superproject" | "superproject" | "super" => { + SerializableBranch::CurrentInSuperproject + } + _ => SerializableBranch::Name(s), + }; + Ok(branch) } } @@ -696,6 +707,7 @@ impl GixGit2Convert for SerializableUpdate { } #[cfg(test)] +#[cfg_attr(coverage_nightly, coverage(off))] mod tests { use super::*; @@ -772,4 +784,578 @@ mod tests { // Invalid UTF-8 assert!(SerializableIgnore::from_gitmodules_bytes(&[0xFF, 0xFE, 0xFD]).is_err()); } + + // ================================================================ + // SerializableIgnore: Display, OptionsChecks, TryFrom conversions + // ================================================================ + + #[test] + fn test_ignore_display() { + assert_eq!(format!("{}", SerializableIgnore::All), "all"); + assert_eq!(format!("{}", SerializableIgnore::Dirty), "dirty"); + assert_eq!(format!("{}", SerializableIgnore::Untracked), "untracked"); + assert_eq!(format!("{}", SerializableIgnore::None), "none"); + assert_eq!(format!("{}", SerializableIgnore::Unspecified), ""); + } + + #[test] + fn test_ignore_options_checks() { + assert!(SerializableIgnore::Unspecified.is_unspecified()); + assert!(!SerializableIgnore::All.is_unspecified()); + assert!(!SerializableIgnore::None.is_unspecified()); + + assert!(SerializableIgnore::None.is_default()); + assert!(!SerializableIgnore::All.is_default()); + assert!(!SerializableIgnore::Unspecified.is_default()); + } + + #[test] + fn test_ignore_gitmodules_key_path() { + assert_eq!( + SerializableIgnore::All.gitmodules_key_path("mymod"), + "submodule.mymod.ignore" + ); + } + + #[test] + fn test_ignore_tryfrom_git2_roundtrip() { + use git2::SubmoduleIgnore as G2; + // Our type → git2 + let git2_all: G2 = SerializableIgnore::All.try_into().unwrap(); + assert_eq!(git2_all, G2::All); + let git2_dirty: G2 = SerializableIgnore::Dirty.try_into().unwrap(); + assert_eq!(git2_dirty, G2::Dirty); + let git2_untracked: G2 = SerializableIgnore::Untracked.try_into().unwrap(); + assert_eq!(git2_untracked, G2::Untracked); + let git2_none: G2 = SerializableIgnore::None.try_into().unwrap(); + assert_eq!(git2_none, G2::None); + let git2_unspec: G2 = SerializableIgnore::Unspecified.try_into().unwrap(); + assert_eq!(git2_unspec, G2::Unspecified); + + // git2 → our type + let ours: SerializableIgnore = G2::All.try_into().unwrap(); + assert_eq!(ours, SerializableIgnore::All); + let ours: SerializableIgnore = G2::Dirty.try_into().unwrap(); + assert_eq!(ours, SerializableIgnore::Dirty); + let ours: SerializableIgnore = G2::None.try_into().unwrap(); + assert_eq!(ours, SerializableIgnore::None); + let ours: SerializableIgnore = G2::Unspecified.try_into().unwrap(); + assert_eq!(ours, SerializableIgnore::Unspecified); + } + + #[test] + fn test_ignore_tryfrom_gix_roundtrip() { + // Our type → gix + let gix_all: Ignore = SerializableIgnore::All.try_into().unwrap(); + assert_eq!(gix_all, Ignore::All); + let gix_dirty: Ignore = SerializableIgnore::Dirty.try_into().unwrap(); + assert_eq!(gix_dirty, Ignore::Dirty); + // Unspecified maps to None in gix + let gix_unspec: Ignore = SerializableIgnore::Unspecified.try_into().unwrap(); + assert_eq!(gix_unspec, Ignore::None); + + // gix → our type + let ours: SerializableIgnore = Ignore::All.try_into().unwrap(); + assert_eq!(ours, SerializableIgnore::All); + let ours: SerializableIgnore = Ignore::None.try_into().unwrap(); + assert_eq!(ours, SerializableIgnore::None); + } + + #[test] + fn test_ignore_gixgit2convert() { + let from_git2 = SerializableIgnore::from_git2(git2::SubmoduleIgnore::All).unwrap(); + assert_eq!(from_git2, SerializableIgnore::All); + let from_gix = SerializableIgnore::from_gix(Ignore::Dirty).unwrap(); + assert_eq!(from_gix, SerializableIgnore::Dirty); + } + + // ================================================================ + // SerializableFetchRecurse: full coverage + // ================================================================ + + #[test] + fn test_fetch_recurse_gitmodules_key() { + assert_eq!( + SerializableFetchRecurse::OnDemand.gitmodules_key(), + "fetchRecurseSubmodules" + ); + } + + #[test] + fn test_fetch_recurse_to_gitmodules() { + assert_eq!( + SerializableFetchRecurse::OnDemand.to_gitmodules(), + "on-demand" + ); + assert_eq!(SerializableFetchRecurse::Always.to_gitmodules(), "true"); + assert_eq!(SerializableFetchRecurse::Never.to_gitmodules(), "false"); + assert_eq!( + SerializableFetchRecurse::Unspecified.to_gitmodules(), + "on-demand" + ); + } + + #[test] + fn test_fetch_recurse_from_gitmodules() { + assert_eq!( + SerializableFetchRecurse::from_gitmodules("on-demand").unwrap(), + SerializableFetchRecurse::OnDemand + ); + assert_eq!( + SerializableFetchRecurse::from_gitmodules("true").unwrap(), + SerializableFetchRecurse::Always + ); + assert_eq!( + SerializableFetchRecurse::from_gitmodules("false").unwrap(), + SerializableFetchRecurse::Never + ); + assert_eq!( + SerializableFetchRecurse::from_gitmodules("").unwrap(), + SerializableFetchRecurse::Unspecified + ); + assert!(SerializableFetchRecurse::from_gitmodules("invalid").is_err()); + assert!(SerializableFetchRecurse::from_gitmodules("ON-DEMAND").is_err()); + } + + #[test] + fn test_fetch_recurse_from_gitmodules_bytes() { + assert_eq!( + SerializableFetchRecurse::from_gitmodules_bytes(b"on-demand").unwrap(), + SerializableFetchRecurse::OnDemand + ); + assert_eq!( + SerializableFetchRecurse::from_gitmodules_bytes(b"true").unwrap(), + SerializableFetchRecurse::Always + ); + assert!(SerializableFetchRecurse::from_gitmodules_bytes(&[0xFF]).is_err()); + } + + #[test] + fn test_fetch_recurse_options_checks() { + assert!(SerializableFetchRecurse::Unspecified.is_unspecified()); + assert!(!SerializableFetchRecurse::OnDemand.is_unspecified()); + + assert!(SerializableFetchRecurse::OnDemand.is_default()); + assert!(!SerializableFetchRecurse::Always.is_default()); + } + + #[test] + fn test_fetch_recurse_display() { + assert_eq!( + format!("{}", SerializableFetchRecurse::OnDemand), + "on-demand" + ); + assert_eq!(format!("{}", SerializableFetchRecurse::Always), "true"); + assert_eq!(format!("{}", SerializableFetchRecurse::Never), "false"); + } + + #[test] + fn test_fetch_recurse_tryfrom_gix_roundtrip() { + let gix_od: FetchRecurse = SerializableFetchRecurse::OnDemand.try_into().unwrap(); + assert_eq!(gix_od, FetchRecurse::OnDemand); + let gix_always: FetchRecurse = SerializableFetchRecurse::Always.try_into().unwrap(); + assert_eq!(gix_always, FetchRecurse::Always); + let gix_never: FetchRecurse = SerializableFetchRecurse::Never.try_into().unwrap(); + assert_eq!(gix_never, FetchRecurse::Never); + // Unspecified maps to OnDemand in gix + let gix_unspec: FetchRecurse = SerializableFetchRecurse::Unspecified.try_into().unwrap(); + assert_eq!(gix_unspec, FetchRecurse::OnDemand); + + let ours: SerializableFetchRecurse = FetchRecurse::OnDemand.try_into().unwrap(); + assert_eq!(ours, SerializableFetchRecurse::OnDemand); + let ours: SerializableFetchRecurse = FetchRecurse::Always.try_into().unwrap(); + assert_eq!(ours, SerializableFetchRecurse::Always); + let ours: SerializableFetchRecurse = FetchRecurse::Never.try_into().unwrap(); + assert_eq!(ours, SerializableFetchRecurse::Never); + } + + #[test] + fn test_fetch_recurse_gixgit2convert() { + let from_git2 = SerializableFetchRecurse::from_git2("on-demand".to_string()).unwrap(); + assert_eq!(from_git2, SerializableFetchRecurse::OnDemand); + let from_git2 = SerializableFetchRecurse::from_git2("true".to_string()).unwrap(); + assert_eq!(from_git2, SerializableFetchRecurse::Always); + assert!(SerializableFetchRecurse::from_git2("bogus".to_string()).is_err()); + + let from_gix = SerializableFetchRecurse::from_gix(FetchRecurse::Never).unwrap(); + assert_eq!(from_gix, SerializableFetchRecurse::Never); + } + + #[test] + fn test_fetch_recurse_gitmodules_key_path() { + assert_eq!( + SerializableFetchRecurse::Always.gitmodules_key_path("sub1"), + "submodule.sub1.fetchRecurseSubmodules" + ); + } + + // ================================================================ + // SerializableBranch: comprehensive coverage + // ================================================================ + + #[test] + fn test_branch_from_str_aliases() { + assert_eq!( + SerializableBranch::from_str(".").unwrap(), + SerializableBranch::CurrentInSuperproject + ); + assert_eq!( + SerializableBranch::from_str("current").unwrap(), + SerializableBranch::CurrentInSuperproject + ); + assert_eq!( + SerializableBranch::from_str("current-in-super-project").unwrap(), + SerializableBranch::CurrentInSuperproject + ); + assert_eq!( + SerializableBranch::from_str("superproject").unwrap(), + SerializableBranch::CurrentInSuperproject + ); + assert_eq!( + SerializableBranch::from_str("super").unwrap(), + SerializableBranch::CurrentInSuperproject + ); + } + + #[test] + fn test_branch_from_str_named() { + assert_eq!( + SerializableBranch::from_str("main").unwrap(), + SerializableBranch::Name("main".to_string()) + ); + assert_eq!( + SerializableBranch::from_str("develop").unwrap(), + SerializableBranch::Name("develop".to_string()) + ); + assert_eq!( + SerializableBranch::from_str("feature/my-feature").unwrap(), + SerializableBranch::Name("feature/my-feature".to_string()) + ); + } + + #[test] + fn test_branch_display() { + assert_eq!( + format!("{}", SerializableBranch::CurrentInSuperproject), + "." + ); + assert_eq!( + format!("{}", SerializableBranch::Name("main".to_string())), + "main" + ); + } + + #[test] + fn test_branch_default() { + let default = SerializableBranch::default(); + // Default should be a Name variant (either from gix default or "main" fallback) + match &default { + SerializableBranch::Name(_) => {} // expected + SerializableBranch::CurrentInSuperproject => { + // also acceptable if gix default is CurrentInSuperproject + } + } + } + + #[test] + fn test_branch_to_gitmodules() { + assert_eq!( + SerializableBranch::CurrentInSuperproject.to_gitmodules(), + "." + ); + assert_eq!( + SerializableBranch::Name("develop".to_string()).to_gitmodules(), + "develop" + ); + } + + #[test] + fn test_branch_gitmodules_key() { + assert_eq!( + SerializableBranch::CurrentInSuperproject.gitmodules_key(), + "branch" + ); + } + + #[test] + fn test_branch_from_gitmodules_aliases() { + assert_eq!( + SerializableBranch::from_gitmodules(".").unwrap(), + SerializableBranch::CurrentInSuperproject + ); + assert_eq!( + SerializableBranch::from_gitmodules("current").unwrap(), + SerializableBranch::CurrentInSuperproject + ); + assert_eq!( + SerializableBranch::from_gitmodules("superproject").unwrap(), + SerializableBranch::CurrentInSuperproject + ); + assert_eq!( + SerializableBranch::from_gitmodules("super").unwrap(), + SerializableBranch::CurrentInSuperproject + ); + } + + #[test] + fn test_branch_from_gitmodules_named() { + assert_eq!( + SerializableBranch::from_gitmodules("main").unwrap(), + SerializableBranch::Name("main".to_string()) + ); + } + + #[test] + fn test_branch_from_gitmodules_empty_is_err() { + assert!(SerializableBranch::from_gitmodules("").is_err()); + assert!(SerializableBranch::from_gitmodules(" ").is_err()); + } + + #[test] + fn test_branch_from_gitmodules_bytes() { + assert_eq!( + SerializableBranch::from_gitmodules_bytes(b".").unwrap(), + SerializableBranch::CurrentInSuperproject + ); + assert_eq!( + SerializableBranch::from_gitmodules_bytes(b"main").unwrap(), + SerializableBranch::Name("main".to_string()) + ); + assert!(SerializableBranch::from_gitmodules_bytes(&[0xFF]).is_err()); + } + + #[test] + fn test_branch_tryfrom_gix_roundtrip() { + let gix_cur: Branch = SerializableBranch::CurrentInSuperproject + .try_into() + .unwrap(); + assert_eq!(gix_cur, Branch::CurrentInSuperproject); + + let gix_name: Branch = SerializableBranch::Name("main".to_string()) + .try_into() + .unwrap(); + match gix_name { + Branch::Name(n) => assert_eq!(n.to_string(), "main"), + _ => panic!("Expected Branch::Name"), + } + + let ours: SerializableBranch = Branch::CurrentInSuperproject.try_into().unwrap(); + assert_eq!(ours, SerializableBranch::CurrentInSuperproject); + } + + #[test] + fn test_branch_gixgit2convert() { + let from_git2 = SerializableBranch::from_git2("main".to_string()).unwrap(); + assert_eq!(from_git2, SerializableBranch::Name("main".to_string())); + + let from_git2 = SerializableBranch::from_git2(".".to_string()).unwrap(); + assert_eq!(from_git2, SerializableBranch::CurrentInSuperproject); + + let from_gix = SerializableBranch::from_gix(Branch::CurrentInSuperproject).unwrap(); + assert_eq!(from_gix, SerializableBranch::CurrentInSuperproject); + } + + #[test] + fn test_branch_set_branch_with_name() { + let result = SerializableBranch::set_branch(Some("develop".to_string())).unwrap(); + assert_eq!(result, SerializableBranch::Name("develop".to_string())); + } + + #[test] + fn test_branch_set_branch_with_alias() { + let result = SerializableBranch::set_branch(Some(".".to_string())).unwrap(); + assert_eq!(result, SerializableBranch::CurrentInSuperproject); + + let result = SerializableBranch::set_branch(Some("super".to_string())).unwrap(); + assert_eq!(result, SerializableBranch::CurrentInSuperproject); + } + + #[test] + fn test_branch_set_branch_empty_returns_default() { + let result = SerializableBranch::set_branch(Some("".to_string())).unwrap(); + // Empty string → default + assert_eq!(result, SerializableBranch::default()); + } + + #[test] + fn test_branch_set_branch_none_returns_default() { + let result = SerializableBranch::set_branch(None).unwrap(); + assert_eq!(result, SerializableBranch::default()); + } + + #[test] + fn test_branch_set_branch_trims_whitespace() { + let result = SerializableBranch::set_branch(Some(" main ".to_string())).unwrap(); + assert_eq!(result, SerializableBranch::Name("main".to_string())); + } + + #[test] + fn test_branch_deserialize_from_toml() { + #[derive(Deserialize)] + struct TestConfig { + branch: SerializableBranch, + } + let toml_str = r#"branch = "main""#; + let config: TestConfig = toml::from_str(toml_str).unwrap(); + assert_eq!(config.branch, SerializableBranch::Name("main".to_string())); + + let toml_str = r#"branch = ".""#; + let config: TestConfig = toml::from_str(toml_str).unwrap(); + assert_eq!(config.branch, SerializableBranch::CurrentInSuperproject); + + let toml_str = r#"branch = "super""#; + let config: TestConfig = toml::from_str(toml_str).unwrap(); + assert_eq!(config.branch, SerializableBranch::CurrentInSuperproject); + } + + #[test] + fn test_branch_gitmodules_key_path() { + assert_eq!( + SerializableBranch::Name("main".to_string()).gitmodules_key_path("mymod"), + "submodule.mymod.branch" + ); + } + + // ================================================================ + // SerializableUpdate: full coverage + // ================================================================ + + #[test] + fn test_update_gitmodules_key() { + assert_eq!(SerializableUpdate::Checkout.gitmodules_key(), "update"); + } + + #[test] + fn test_update_to_gitmodules() { + assert_eq!(SerializableUpdate::Checkout.to_gitmodules(), "checkout"); + assert_eq!(SerializableUpdate::Rebase.to_gitmodules(), "rebase"); + assert_eq!(SerializableUpdate::Merge.to_gitmodules(), "merge"); + assert_eq!(SerializableUpdate::None.to_gitmodules(), "none"); + assert_eq!(SerializableUpdate::Unspecified.to_gitmodules(), ""); + } + + #[test] + fn test_update_from_gitmodules() { + assert_eq!( + SerializableUpdate::from_gitmodules("checkout").unwrap(), + SerializableUpdate::Checkout + ); + assert_eq!( + SerializableUpdate::from_gitmodules("rebase").unwrap(), + SerializableUpdate::Rebase + ); + assert_eq!( + SerializableUpdate::from_gitmodules("merge").unwrap(), + SerializableUpdate::Merge + ); + assert_eq!( + SerializableUpdate::from_gitmodules("none").unwrap(), + SerializableUpdate::None + ); + assert_eq!( + SerializableUpdate::from_gitmodules("").unwrap(), + SerializableUpdate::Unspecified + ); + assert!(SerializableUpdate::from_gitmodules("invalid").is_err()); + } + + #[test] + fn test_update_from_gitmodules_bytes() { + assert_eq!( + SerializableUpdate::from_gitmodules_bytes(b"checkout").unwrap(), + SerializableUpdate::Checkout + ); + assert_eq!( + SerializableUpdate::from_gitmodules_bytes(b"rebase").unwrap(), + SerializableUpdate::Rebase + ); + assert!(SerializableUpdate::from_gitmodules_bytes(&[0xFF]).is_err()); + } + + #[test] + fn test_update_options_checks() { + assert!(SerializableUpdate::Unspecified.is_unspecified()); + assert!(!SerializableUpdate::Checkout.is_unspecified()); + + assert!(SerializableUpdate::Checkout.is_default()); + assert!(!SerializableUpdate::Rebase.is_default()); + } + + #[test] + fn test_update_display() { + assert_eq!(format!("{}", SerializableUpdate::Checkout), "checkout"); + assert_eq!(format!("{}", SerializableUpdate::Rebase), "rebase"); + assert_eq!(format!("{}", SerializableUpdate::Merge), "merge"); + assert_eq!(format!("{}", SerializableUpdate::None), "none"); + assert_eq!(format!("{}", SerializableUpdate::Unspecified), ""); + } + + #[test] + fn test_update_tryfrom_git2_roundtrip() { + use git2::SubmoduleUpdate as G2U; + let git2_co: G2U = SerializableUpdate::Checkout.try_into().unwrap(); + assert_eq!(git2_co, G2U::Checkout); + let git2_rebase: G2U = SerializableUpdate::Rebase.try_into().unwrap(); + assert_eq!(git2_rebase, G2U::Rebase); + let git2_merge: G2U = SerializableUpdate::Merge.try_into().unwrap(); + assert_eq!(git2_merge, G2U::Merge); + let git2_none: G2U = SerializableUpdate::None.try_into().unwrap(); + assert_eq!(git2_none, G2U::None); + let git2_unspec: G2U = SerializableUpdate::Unspecified.try_into().unwrap(); + assert_eq!(git2_unspec, G2U::Default); + + let ours: SerializableUpdate = G2U::Checkout.try_into().unwrap(); + assert_eq!(ours, SerializableUpdate::Checkout); + let ours: SerializableUpdate = G2U::Rebase.try_into().unwrap(); + assert_eq!(ours, SerializableUpdate::Rebase); + let ours: SerializableUpdate = G2U::Default.try_into().unwrap(); + assert_eq!(ours, SerializableUpdate::Unspecified); + } + + #[test] + fn test_update_tryfrom_gix_roundtrip() { + let gix_co: Update = SerializableUpdate::Checkout.try_into().unwrap(); + assert_eq!(gix_co, Update::Checkout); + let gix_rebase: Update = SerializableUpdate::Rebase.try_into().unwrap(); + assert_eq!(gix_rebase, Update::Rebase); + let gix_merge: Update = SerializableUpdate::Merge.try_into().unwrap(); + assert_eq!(gix_merge, Update::Merge); + let gix_none: Update = SerializableUpdate::None.try_into().unwrap(); + assert_eq!(gix_none, Update::None); + // Unspecified maps to Checkout in gix + let gix_unspec: Update = SerializableUpdate::Unspecified.try_into().unwrap(); + assert_eq!(gix_unspec, Update::Checkout); + + let ours: SerializableUpdate = Update::Checkout.try_into().unwrap(); + assert_eq!(ours, SerializableUpdate::Checkout); + let ours: SerializableUpdate = Update::Rebase.try_into().unwrap(); + assert_eq!(ours, SerializableUpdate::Rebase); + let ours: SerializableUpdate = Update::None.try_into().unwrap(); + assert_eq!(ours, SerializableUpdate::None); + } + + #[test] + fn test_update_gix_command_variant_maps_to_unspecified() { + // Update::Command is not serializable; it should map to Unspecified + let cmd = Update::Command("!echo hello".into()); + let ours: SerializableUpdate = cmd.try_into().unwrap(); + assert_eq!(ours, SerializableUpdate::Unspecified); + } + + #[test] + fn test_update_gixgit2convert() { + let from_git2 = SerializableUpdate::from_git2(git2::SubmoduleUpdate::Merge).unwrap(); + assert_eq!(from_git2, SerializableUpdate::Merge); + + let from_gix = SerializableUpdate::from_gix(Update::Rebase).unwrap(); + assert_eq!(from_gix, SerializableUpdate::Rebase); + } + + #[test] + fn test_update_gitmodules_key_path() { + assert_eq!( + SerializableUpdate::Checkout.gitmodules_key_path("sub1"), + "submodule.sub1.update" + ); + } } diff --git a/src/shells.rs b/src/shells.rs index b3dcb83..e3453f8 100644 --- a/src/shells.rs +++ b/src/shells.rs @@ -96,6 +96,7 @@ impl TryFrom for Shell { } #[cfg(test)] +#[cfg_attr(coverage_nightly, coverage(off))] mod tests { use super::*; @@ -124,7 +125,10 @@ mod tests { // or paths that parse as expected on Unix. assert_eq!(Shell::from_path("powershell.exe"), Some(Shell::PowerShell)); assert_eq!(Shell::from_path("/usr/bin/pwsh"), Some(Shell::PowerShell)); - assert_eq!(Shell::from_path("powershell_ise.exe"), Some(Shell::PowerShell)); + assert_eq!( + Shell::from_path("powershell_ise.exe"), + Some(Shell::PowerShell) + ); } #[test] @@ -158,6 +162,178 @@ mod tests { // .bash file stem is ".bash" which won't match "bash", so it should be None. assert_eq!(Shell::from_path(".bash"), None); } + + // ================================================================ + // Shell::from_str and Display + // ================================================================ + + #[test] + fn test_shell_from_str() { + assert_eq!( + ::from_str("bash").unwrap(), + Shell::Bash + ); + assert_eq!( + ::from_str("zsh").unwrap(), + Shell::Zsh + ); + assert_eq!( + ::from_str("fish").unwrap(), + Shell::Fish + ); + assert_eq!( + ::from_str("powershell").unwrap(), + Shell::PowerShell + ); + assert_eq!( + ::from_str("elvish").unwrap(), + Shell::Elvish + ); + assert_eq!( + ::from_str("nushell").unwrap(), + Shell::Nushell + ); + } + + #[test] + fn test_shell_from_str_alias() { + assert_eq!( + ::from_str("pwsh").unwrap(), + Shell::PowerShell + ); + } + + #[test] + fn test_shell_from_str_invalid() { + assert!(::from_str("cmd").is_err()); + assert!(::from_str("").is_err()); + assert!(::from_str("BASH").is_err()); // case-sensitive + } + + #[test] + fn test_shell_display() { + assert_eq!(format!("{}", Shell::Bash), "bash"); + assert_eq!(format!("{}", Shell::Zsh), "zsh"); + assert_eq!(format!("{}", Shell::Fish), "fish"); + assert_eq!(format!("{}", Shell::PowerShell), "powershell"); + assert_eq!(format!("{}", Shell::Elvish), "elvish"); + assert_eq!(format!("{}", Shell::Nushell), "nushell"); + } + + // ================================================================ + // TryFrom conversions: AotShell ↔ Shell ↔ NushellShell + // ================================================================ + + #[test] + fn test_shell_to_aotshell() { + let aot: AotShell = Shell::Bash.try_into().unwrap(); + assert_eq!(aot, AotShell::Bash); + let aot: AotShell = Shell::Zsh.try_into().unwrap(); + assert_eq!(aot, AotShell::Zsh); + let aot: AotShell = Shell::Fish.try_into().unwrap(); + assert_eq!(aot, AotShell::Fish); + let aot: AotShell = Shell::PowerShell.try_into().unwrap(); + assert_eq!(aot, AotShell::PowerShell); + let aot: AotShell = Shell::Elvish.try_into().unwrap(); + assert_eq!(aot, AotShell::Elvish); + } + + #[test] + fn test_nushell_to_aotshell_fails() { + let result: Result = Shell::Nushell.try_into(); + assert!(result.is_err()); + } + + #[test] + fn test_aotshell_to_shell() { + let shell: Shell = AotShell::Bash.try_into().unwrap(); + assert_eq!(shell, Shell::Bash); + let shell: Shell = AotShell::Zsh.try_into().unwrap(); + assert_eq!(shell, Shell::Zsh); + let shell: Shell = AotShell::Fish.try_into().unwrap(); + assert_eq!(shell, Shell::Fish); + let shell: Shell = AotShell::PowerShell.try_into().unwrap(); + assert_eq!(shell, Shell::PowerShell); + let shell: Shell = AotShell::Elvish.try_into().unwrap(); + assert_eq!(shell, Shell::Elvish); + } + + #[test] + fn test_shell_to_nushell_shell() { + // NushellShell doesn't implement PartialEq/Debug, so just check it doesn't panic + let _nu: NushellShell = Shell::Nushell.try_into().unwrap(); + } + + #[test] + fn test_non_nushell_to_nushell_shell_fails() { + let result: Result = Shell::Bash.try_into(); + assert!(result.is_err()); + let result: Result = Shell::Zsh.try_into(); + assert!(result.is_err()); + } + + #[test] + fn test_nushell_shell_to_shell() { + let shell: Shell = NushellShell.try_into().unwrap(); + assert_eq!(shell, Shell::Nushell); + } + + // ================================================================ + // try_to_clap_complete + // ================================================================ + + #[test] + fn test_try_to_clap_complete_all_variants() { + // All shell variants should successfully convert + assert!(Shell::Bash.try_to_clap_complete().is_ok()); + assert!(Shell::Zsh.try_to_clap_complete().is_ok()); + assert!(Shell::Fish.try_to_clap_complete().is_ok()); + assert!(Shell::PowerShell.try_to_clap_complete().is_ok()); + assert!(Shell::Elvish.try_to_clap_complete().is_ok()); + assert!(Shell::Nushell.try_to_clap_complete().is_ok()); + } + + // ================================================================ + // ValueEnum + // ================================================================ + + #[test] + fn test_value_variants_count() { + assert_eq!(Shell::value_variants().len(), 6); + } + + #[test] + fn test_to_possible_value_all_some() { + for variant in Shell::value_variants() { + assert!(variant.to_possible_value().is_some()); + } + } + + // ================================================================ + // Generator: file_name and try_generate + // ================================================================ + + #[test] + fn test_generator_file_name() { + let bash_name = ::file_name(&Shell::Bash, "submod"); + assert!(!bash_name.is_empty()); + + let nu_name = ::file_name(&Shell::Nushell, "submod"); + assert!(nu_name.contains("submod")); + } + + #[test] + fn test_generator_try_generate_produces_output() { + use clap::Command; + + for shell in Shell::value_variants() { + let mut cmd = Command::new("test-cmd").subcommand(Command::new("sub1")); + let mut buf = Vec::new(); + // clap_complete::generate sets bin_name internally + clap_complete::generate(*shell, &mut cmd, "test-cmd", &mut buf); + assert!(!buf.is_empty(), "Empty output for {:?}", shell); + } + } } impl TryFrom for AotShell { diff --git a/src/utilities.rs b/src/utilities.rs index 458ba0f..e440afa 100644 --- a/src/utilities.rs +++ b/src/utilities.rs @@ -228,6 +228,7 @@ pub(crate) fn get_name( } #[cfg(test)] +#[cfg_attr(coverage_nightly, coverage(off))] mod tests { use super::*; @@ -238,12 +239,7 @@ mod tests { "my-repo" ); assert_eq!( - get_name( - Some(" spaced-repo ".to_string()), - None, - None - ) - .unwrap(), + get_name(Some(" spaced-repo ".to_string()), None, None).unwrap(), "spaced-repo" ); } @@ -282,12 +278,7 @@ mod tests { #[test] fn test_get_name_no_name_valid_path() { assert_eq!( - get_name( - None, - None, - Some(std::ffi::OsString::from("another-path")) - ) - .unwrap(), + get_name(None, None, Some(std::ffi::OsString::from("another-path"))).unwrap(), "another-path" ); } @@ -309,4 +300,157 @@ mod tests { fn test_get_name_all_none() { assert!(get_name(None, None, None).is_err()); } + + // ================================================================ + // name_from_url edge cases + // ================================================================ + + #[test] + fn test_name_from_url_standard() { + assert_eq!( + name_from_url("https://github.com/user/repo.git").unwrap(), + "repo" + ); + assert_eq!( + name_from_url("https://github.com/user/repo").unwrap(), + "repo" + ); + } + + #[test] + fn test_name_from_url_trailing_slashes() { + assert_eq!( + name_from_url("https://github.com/user/repo/").unwrap(), + "repo" + ); + assert_eq!( + name_from_url("https://github.com/user/repo///").unwrap(), + "repo" + ); + } + + #[test] + fn test_name_from_url_ssh_format() { + assert_eq!( + name_from_url("git@github.com:user/mylib.git").unwrap(), + "mylib" + ); + } + + #[test] + fn test_name_from_url_file_url() { + assert_eq!(name_from_url("file:///path/to/repo.git").unwrap(), "repo"); + } + + #[test] + fn test_name_from_url_simple_name() { + assert_eq!(name_from_url("repo").unwrap(), "repo"); + assert_eq!(name_from_url("my-lib.git").unwrap(), "my-lib"); + } + + #[test] + fn test_name_from_url_empty() { + assert!(name_from_url("").is_err()); + } + + // ================================================================ + // name_from_osstring + // ================================================================ + + #[test] + fn test_name_from_osstring_simple() { + assert_eq!( + name_from_osstring(std::ffi::OsString::from("my-repo")).unwrap(), + "my-repo" + ); + } + + #[test] + fn test_name_from_osstring_path() { + let path = PathBuf::from_iter(["path", "to", "module"]); + assert_eq!(name_from_osstring(path.into_os_string()).unwrap(), "module"); + } + + #[test] + fn test_name_from_osstring_empty() { + assert!(name_from_osstring(std::ffi::OsString::from("")).is_err()); + assert!(name_from_osstring(std::ffi::OsString::from(" ")).is_err()); + } + + #[test] + fn test_name_from_osstring_null_byte() { + assert!(name_from_osstring(std::ffi::OsString::from("foo\0bar")).is_err()); + } + + // ================================================================ + // osstring_to_string + // ================================================================ + + #[test] + fn test_osstring_to_string_valid() { + assert_eq!( + osstring_to_string(std::ffi::OsString::from("hello")).unwrap(), + "hello" + ); + } + + // ================================================================ + // path_to_string + // ================================================================ + + #[test] + fn test_path_to_string_valid() { + let path = std::path::Path::new("/home/user/repo"); + assert_eq!(path_to_string(path).unwrap(), "/home/user/repo"); + } + + // ================================================================ + // path_to_os_string + // ================================================================ + + #[test] + fn test_path_to_os_string_roundtrip() { + let path = std::path::Path::new("/some/path"); + let os = path_to_os_string(path); + assert_eq!(os, std::ffi::OsString::from("/some/path")); + } + + // ================================================================ + // set_path + // ================================================================ + + #[test] + fn test_set_path_valid_utf8() { + let os = std::ffi::OsString::from("/valid/path"); + assert_eq!(set_path(os).unwrap(), "/valid/path"); + } + + // ================================================================ + // get_sparse_paths + // ================================================================ + + #[test] + fn test_get_sparse_paths_valid() { + let paths = Some(vec!["src/".to_string(), "docs/".to_string()]); + let result = get_sparse_paths(paths).unwrap(); + assert_eq!(result, Some(vec!["src/".to_string(), "docs/".to_string()])); + } + + #[test] + fn test_get_sparse_paths_none() { + let result = get_sparse_paths(None).unwrap(); + assert!(result.is_none()); + } + + #[test] + fn test_get_sparse_paths_null_byte() { + let paths = Some(vec!["src/\0bad".to_string()]); + assert!(get_sparse_paths(paths).is_err()); + } + + #[test] + fn test_get_sparse_paths_empty_vec() { + let result = get_sparse_paths(Some(vec![])).unwrap(); + assert_eq!(result, Some(vec![])); + } } diff --git a/tests/command_contract_tests.rs b/tests/command_contract_tests.rs index 5f2ec56..7aa4137 100644 --- a/tests/command_contract_tests.rs +++ b/tests/command_contract_tests.rs @@ -358,10 +358,12 @@ mod tests { // Sparse checkout should be reconfigured let sparse_file = harness.get_sparse_checkout_file_path("lib/sparse-reinit"); - assert!(sparse_file.exists(), "Sparse checkout file should exist after reinit"); + assert!( + sparse_file.exists(), + "Sparse checkout file should exist after reinit" + ); - let sparse_content = - fs::read_to_string(&sparse_file).expect("Failed to read sparse file"); + let sparse_content = fs::read_to_string(&sparse_file).expect("Failed to read sparse file"); assert!( sparse_content.contains("src"), "Sparse checkout should contain 'src' after reinit" @@ -417,8 +419,7 @@ mod tests { assert!(output_path.exists(), "Output config file should exist"); - let content = - fs::read_to_string(&output_path).expect("Failed to read generated config"); + let content = fs::read_to_string(&output_path).expect("Failed to read generated config"); // The generated config should reference the submodule's URL (the most reliable identifier) assert!( content.contains(remote.to_str().unwrap_or("")), @@ -432,8 +433,7 @@ mod tests { harness.init_git_repo().expect("Failed to init git repo"); let output_path = harness.work_dir.join("force_gen.toml"); - fs::write(&output_path, "# existing content\n") - .expect("Failed to write existing file"); + fs::write(&output_path, "# existing content\n").expect("Failed to write existing file"); // Without --force should fail let no_force = harness @@ -467,8 +467,7 @@ mod tests { "Expected success message after --force; got: {stdout}" ); - let content = - fs::read_to_string(&output_path).expect("Failed to read generated config"); + let content = fs::read_to_string(&output_path).expect("Failed to read generated config"); // The existing content must be gone and replaced with generated defaults. assert!( !content.contains("# existing content") && content.contains("[defaults]"), @@ -491,18 +490,20 @@ mod tests { let url = format!("file://{}", remote.display()); harness - .run_submod_success(&["add", &url, "--name", "movable-lib", "--path", "lib/original"]) + .run_submod_success(&[ + "add", + &url, + "--name", + "movable-lib", + "--path", + "lib/original", + ]) .expect("Failed to add submodule"); assert!(harness.file_exists("lib/original/.git")); let stdout = harness - .run_submod_success(&[ - "change", - "movable-lib", - "--path", - "lib/moved", - ]) + .run_submod_success(&["change", "movable-lib", "--path", "lib/moved"]) .expect("Failed to change submodule path"); // Should confirm the update @@ -608,9 +609,8 @@ mod tests { let url = format!("file://{}", remote.display()); // Start disabled - let config_content = format!( - "[reenable-lib]\npath = \"lib/reenable\"\nurl = \"{url}\"\nactive = false\n" - ); + let config_content = + format!("[reenable-lib]\npath = \"lib/reenable\"\nurl = \"{url}\"\nactive = false\n"); harness .create_config(&config_content) .expect("Failed to create config"); @@ -655,12 +655,7 @@ mod tests { // Replace sparse paths (no --append) harness - .run_submod_success(&[ - "change", - "sparse-replace", - "--sparse-paths", - "tests", - ]) + .run_submod_success(&["change", "sparse-replace", "--sparse-paths", "tests"]) .expect("Failed to change sparse paths"); let config = harness.read_config().expect("Failed to read config"); @@ -738,9 +733,8 @@ mod tests { .expect("Failed to create remote"); let url = format!("file://{}", remote.display()); - let config_content = format!( - "[multi-lib]\npath = \"lib/multi\"\nurl = \"{url}\"\nactive = true\n" - ); + let config_content = + format!("[multi-lib]\npath = \"lib/multi\"\nurl = \"{url}\"\nactive = true\n"); harness .create_config(&config_content) .expect("Failed to create config"); @@ -777,13 +771,7 @@ mod tests { .expect("Failed to create config"); harness - .run_submod_success(&[ - "change-global", - "--update", - "rebase", - "--fetch", - "always", - ]) + .run_submod_success(&["change-global", "--update", "rebase", "--fetch", "always"]) .expect("Failed to run change-global with update and fetch"); let config = harness.read_config().expect("Failed to read config"); @@ -939,7 +927,14 @@ mod tests { let url = format!("file://{}", remote.display()); let stdout = harness - .run_submod_success(&["add", &url, "--name", "add-contract", "--path", "lib/addcnt"]) + .run_submod_success(&[ + "add", + &url, + "--name", + "add-contract", + "--path", + "lib/addcnt", + ]) .expect("Failed to add submodule"); assert!( @@ -1094,7 +1089,10 @@ mod tests { .run_submod_success(&["list"]) .expect("Failed to run list"); - assert!(stdout.contains("list-full"), "list should show submodule name"); + assert!( + stdout.contains("list-full"), + "list should show submodule name" + ); assert!(stdout.contains("lib/lf"), "list should show submodule path"); assert!(stdout.contains(&url), "list should show submodule URL"); assert!(stdout.contains("active"), "list should show active status"); @@ -1142,7 +1140,14 @@ mod tests { let url = format!("file://{}", remote.display()); harness - .run_submod_success(&["add", &url, "--name", "del-contract", "--path", "lib/delcon"]) + .run_submod_success(&[ + "add", + &url, + "--name", + "del-contract", + "--path", + "lib/delcon", + ]) .expect("Failed to add submodule"); let stdout = harness diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 4a5c349..724809f 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -18,6 +18,8 @@ pub struct TestHarness { pub submod_bin: PathBuf, /// Working directory within `temp_dir` pub work_dir: PathBuf, + /// Per-test git global config file (isolates tests from each other and from the user's ~/.gitconfig) + git_config_global: PathBuf, } impl TestHarness { @@ -34,26 +36,49 @@ impl TestHarness { // causing all subprocess-based tests to report zero coverage. let submod_bin = PathBuf::from(env!("CARGO_BIN_EXE_submod")); + // Create a per-test global gitconfig so parallel tests don't race on ~/.gitconfig. + // Pre-populate with protocol.file.allow=always (required for file:// submodule URLs). + let git_config_global = temp_dir.path().join("gitconfig"); + fs::write( + &git_config_global, + "[protocol \"file\"]\n\tallow = always\n", + )?; + Ok(Self { temp_dir, submod_bin, work_dir, + git_config_global, }) } + /// Return a `Command` for git with per-test config isolation. + /// + /// Sets `GIT_CONFIG_GLOBAL` to a test-local file and `GIT_CONFIG_SYSTEM` to + /// `/dev/null` so that tests never read or write the real user/system config. + fn git_cmd(&self) -> Command { + let mut cmd = Command::new("git"); + cmd.env("GIT_CONFIG_GLOBAL", &self.git_config_global); + cmd.env("GIT_CONFIG_SYSTEM", "/dev/null"); + cmd + } + /// Initialize a git repository in the working directory pub fn init_git_repo(&self) -> Result<(), Box> { // Use git commands for cleanup instead of direct filesystem operations - let _ = Command::new("git") + let _ = self + .git_cmd() .args(["submodule", "deinit", "--all", "-f"]) .current_dir(&self.work_dir) .output(); - let _ = Command::new("git") + let _ = self + .git_cmd() .args(["clean", "-fdx"]) .current_dir(&self.work_dir) .output(); - let output = Command::new("git") + let output = self + .git_cmd() .args(["init"]) .current_dir(&self.work_dir) .output()?; @@ -64,42 +89,38 @@ impl TestHarness { } // Ensure we're on the main branch - Command::new("git") + self.git_cmd() .args(["checkout", "-b", "main"]) .current_dir(&self.work_dir) .output()?; // Configure git user for tests - Command::new("git") + self.git_cmd() .args(["config", "user.name", "Test User"]) .current_dir(&self.work_dir) .output()?; - Command::new("git") + self.git_cmd() .args(["config", "user.email", "test@example.com"]) .current_dir(&self.work_dir) .output()?; - // Configure git to allow file protocol for tests (both local and global) - Command::new("git") + // protocol.file.allow=always is set in the per-test global gitconfig + // (created in TestHarness::new), so no --global write needed here. + self.git_cmd() .args(["config", "protocol.file.allow", "always"]) .current_dir(&self.work_dir) .output()?; - Command::new("git") - .args(["config", "--global", "protocol.file.allow", "always"]) - .current_dir(&self.work_dir) - .output()?; - // Create initial commit fs::write(self.work_dir.join("README.md"), "# Test Repository\n")?; - Command::new("git") + self.git_cmd() .args(["add", "README.md"]) .current_dir(&self.work_dir) .output()?; - Command::new("git") + self.git_cmd() .args(["commit", "-m", "Initial commit"]) .current_dir(&self.work_dir) .output()?; @@ -112,32 +133,32 @@ impl TestHarness { let remote_dir = self.temp_dir.path().join(format!("{name}.git")); // Initialize bare repository - Command::new("git") + self.git_cmd() .args(["init", "--bare"]) .arg(&remote_dir) .output()?; // Set the default branch to main for the bare repository - Command::new("git") + self.git_cmd() .args(["symbolic-ref", "HEAD", "refs/heads/main"]) .current_dir(&remote_dir) .output()?; // Create a working copy to add content let work_copy = self.temp_dir.path().join(format!("{name}_work")); - Command::new("git") + self.git_cmd() .args(["init"]) .arg(&work_copy) .output()?; // Set the default branch to main for the working copy - Command::new("git") + self.git_cmd() .args(["checkout", "-b", "main"]) .current_dir(&work_copy) .output()?; // Set up remote - Command::new("git") + self.git_cmd() .args(["remote", "add", "origin", remote_dir.to_str().unwrap()]) .current_dir(&work_copy) .output()?; @@ -162,27 +183,28 @@ impl TestHarness { fs::write(work_copy.join("LICENSE"), "MIT License\n")?; // Configure git and commit - Command::new("git") + self.git_cmd() .args(["config", "user.name", "Test User"]) .current_dir(&work_copy) .output()?; - Command::new("git") + self.git_cmd() .args(["config", "user.email", "test@example.com"]) .current_dir(&work_copy) .output()?; - Command::new("git") + self.git_cmd() .args(["add", "."]) .current_dir(&work_copy) .output()?; - Command::new("git") + self.git_cmd() .args(["commit", "-m", "Add test content"]) .current_dir(&work_copy) .output()?; - let push_output = Command::new("git") + let push_output = self + .git_cmd() .args(["push", "--no-verify", "origin", "main"]) .current_dir(&work_copy) .output()?; @@ -216,6 +238,8 @@ impl TestHarness { let output = Command::new(&self.submod_bin) .args(args) .current_dir(&self.work_dir) + .env("GIT_CONFIG_GLOBAL", &self.git_config_global) + .env("GIT_CONFIG_SYSTEM", "/dev/null") .output()?; Ok(output) @@ -344,113 +368,114 @@ impl TestHarness { let remote_dir = self.temp_dir.path().join(format!("{name}.git")); // Initialize bare repository - std::process::Command::new("git") + self.git_cmd() .args(["init", "--bare"]) .arg(&remote_dir) .output()?; // Set the default branch to main for the bare repository - std::process::Command::new("git") + self.git_cmd() .args(["symbolic-ref", "HEAD", "refs/heads/main"]) .current_dir(&remote_dir) .output()?; // Create a working copy to add content let work_copy = self.temp_dir.path().join(format!("{name}_work")); - std::process::Command::new("git") + self.git_cmd() .args(["init"]) .arg(&work_copy) .output()?; // Set up the main branch and remote - std::process::Command::new("git") + self.git_cmd() .args(["checkout", "-b", "main"]) .current_dir(&work_copy) .output()?; - std::process::Command::new("git") + self.git_cmd() .args(["remote", "add", "origin", remote_dir.to_str().unwrap()]) .current_dir(&work_copy) .output()?; // Configure git - std::process::Command::new("git") + self.git_cmd() .args(["config", "user.name", "Test User"]) .current_dir(&work_copy) .output()?; - std::process::Command::new("git") + self.git_cmd() .args(["config", "user.email", "test@example.com"]) .current_dir(&work_copy) .output()?; // Create main branch content - std::fs::create_dir_all(work_copy.join("src"))?; - std::fs::create_dir_all(work_copy.join("docs"))?; - std::fs::create_dir_all(work_copy.join("tests"))?; - std::fs::create_dir_all(work_copy.join("examples"))?; + fs::create_dir_all(work_copy.join("src"))?; + fs::create_dir_all(work_copy.join("docs"))?; + fs::create_dir_all(work_copy.join("tests"))?; + fs::create_dir_all(work_copy.join("examples"))?; - std::fs::write( + fs::write( work_copy.join("src").join("lib.rs"), "// Main library code\npub fn hello() { println!(\"Hello!\"); }\n", )?; - std::fs::write( + fs::write( work_copy.join("docs").join("API.md"), "# API Documentation\n", )?; - std::fs::write( + fs::write( work_copy.join("tests").join("test.rs"), "// Tests\n#[test]\nfn test_basic() { assert!(true); }\n", )?; - std::fs::write( + fs::write( work_copy.join("examples").join("basic.rs"), "// Example\nfn main() { println!(\"Example\"); }\n", )?; - std::fs::write( + fs::write( work_copy.join("Cargo.toml"), "[package]\nname = \"test-lib\"\nversion = \"0.1.0\"\n", )?; - std::fs::write(work_copy.join("README.md"), "# Test Library\n")?; + fs::write(work_copy.join("README.md"), "# Test Library\n")?; - std::process::Command::new("git") + self.git_cmd() .args(["add", "."]) .current_dir(&work_copy) .output()?; - std::process::Command::new("git") + self.git_cmd() .args(["commit", "-m", "Initial commit"]) .current_dir(&work_copy) .output()?; // Create a development branch - std::process::Command::new("git") + self.git_cmd() .args(["checkout", "-b", "develop"]) .current_dir(&work_copy) .output()?; - std::fs::write( + fs::write( work_copy.join("src").join("dev.rs"), "// Development code\npub fn dev_feature() {}\n", )?; - std::process::Command::new("git") + self.git_cmd() .args(["add", "."]) .current_dir(&work_copy) .output()?; - std::process::Command::new("git") + self.git_cmd() .args(["commit", "-m", "Add dev features"]) .current_dir(&work_copy) .output()?; // Create a tag - std::process::Command::new("git") + self.git_cmd() .args(["tag", "v0.1.0"]) .current_dir(&work_copy) .output()?; // Push everything with error checking - let push_main = std::process::Command::new("git") + let push_main = self + .git_cmd() .args(["push", "origin", "main"]) .current_dir(&work_copy) .output()?; @@ -460,7 +485,8 @@ impl TestHarness { return Err(format!("Failed to push main branch: {stderr}").into()); } - let push_develop = std::process::Command::new("git") + let push_develop = self + .git_cmd() .args(["push", "origin", "develop"]) .current_dir(&work_copy) .output()?; @@ -470,7 +496,8 @@ impl TestHarness { return Err(format!("Failed to push develop branch: {stderr}").into()); } - let push_tags = std::process::Command::new("git") + let push_tags = self + .git_cmd() .args(["push", "origin", "--tags"]) .current_dir(&work_copy) .output()?; diff --git a/tests/error_handling_tests.rs b/tests/error_handling_tests.rs index d0c5257..82e1b4c 100644 --- a/tests/error_handling_tests.rs +++ b/tests/error_handling_tests.rs @@ -48,7 +48,14 @@ mod tests { for invalid_url in invalid_urls { let output = harness - .run_submod(&["add", invalid_url, "--name", "invalid-test", "--path", "lib/invalid"]) + .run_submod(&[ + "add", + invalid_url, + "--name", + "invalid-test", + "--path", + "lib/invalid", + ]) .expect("Failed to run submod"); assert!(!output.status.success()); @@ -104,7 +111,14 @@ mod tests { // Try to add submodule to read-only directory let output = harness - .run_submod(&["add", &remote_url, "--name", "perm-test", "--path", "readonly/submodule"]) + .run_submod(&[ + "add", + &remote_url, + "--name", + "perm-test", + "--path", + "readonly/submodule", + ]) .expect("Failed to run submod"); assert!(!output.status.success()); @@ -233,7 +247,14 @@ mod tests { // Add a submodule harness - .run_submod_success(&["add", &remote_url, "--name", "concurrent-test", "--path", "lib/concurrent"]) + .run_submod_success(&[ + "add", + &remote_url, + "--name", + "concurrent-test", + "--path", + "lib/concurrent", + ]) .expect("Failed to add submodule"); // Simulate concurrent access by modifying config externally @@ -275,7 +296,14 @@ active = true // Add submodule - should handle any space issues gracefully let output = harness - .run_submod(&["add", &remote_url, "--name", "large-repo", "--path", "lib/large"]) + .run_submod(&[ + "add", + &remote_url, + "--name", + "large-repo", + "--path", + "lib/large", + ]) .expect("Failed to run submod"); // Should either succeed or fail with a meaningful error @@ -294,7 +322,7 @@ active = true vec!["--invalid-flag"], vec!["add"], // Missing required URL argument vec!["add", "--name", "x", "--path", "y"], // Missing required URL argument - vec!["reset"], // Missing submodule name when not using --all + vec!["reset"], // Missing submodule name when not using --all vec!["nonexistent-command"], ]; @@ -320,7 +348,14 @@ active = true let timeout_url = "http://nonexistent.invalid.domain.test/repo.git"; let output = harness - .run_submod(&["add", timeout_url, "--name", "timeout-test", "--path", "lib/timeout"]) + .run_submod(&[ + "add", + timeout_url, + "--name", + "timeout-test", + "--path", + "lib/timeout", + ]) .expect("Failed to run submod"); assert!(!output.status.success()); @@ -351,7 +386,14 @@ active = true let fake_url = format!("file://{}", fake_remote.display()); let output = harness - .run_submod(&["add", &fake_url, "--name", "fake-repo", "--path", "lib/fake"]) + .run_submod(&[ + "add", + &fake_url, + "--name", + "fake-repo", + "--path", + "lib/fake", + ]) .expect("Failed to run submod"); assert!(!output.status.success()); @@ -398,7 +440,14 @@ active = true // Try to add submodule (which requires writing to config) let output = harness - .run_submod(&["add", &remote_url, "--name", "locked-test", "--path", "lib/locked"]) + .run_submod(&[ + "add", + &remote_url, + "--name", + "locked-test", + "--path", + "lib/locked", + ]) .expect("Failed to run submod"); assert!(!output.status.success()); @@ -429,7 +478,14 @@ active = true // Try to add submodule to existing directory let output = harness - .run_submod(&["add", &remote_url, "--name", "partial-test", "--path", "lib/partial"]) + .run_submod(&[ + "add", + &remote_url, + "--name", + "partial-test", + "--path", + "lib/partial", + ]) .expect("Failed to run submod"); // Should handle existing directory appropriately diff --git a/tests/integration_tests.rs b/tests/integration_tests.rs index f8536c6..d3d7f5d 100644 --- a/tests/integration_tests.rs +++ b/tests/integration_tests.rs @@ -61,7 +61,14 @@ mod tests { // Add a submodule let stdout = harness - .run_submod_success(&["add", &remote_url, "--name", "test-lib", "--path", "lib/test"]) + .run_submod_success(&[ + "add", + &remote_url, + "--name", + "test-lib", + "--path", + "lib/test", + ]) .expect("Failed to add submodule"); assert!(stdout.contains("Added submodule")); @@ -170,7 +177,14 @@ sparse_paths = ["src"] // Add and initialize submodule first harness - .run_submod_success(&["add", &remote_url, "--name", "update-lib", "--path", "lib/update"]) + .run_submod_success(&[ + "add", + &remote_url, + "--name", + "update-lib", + "--path", + "lib/update", + ]) .expect("Failed to add submodule"); // Run update command @@ -193,7 +207,14 @@ sparse_paths = ["src"] // Add and initialize submodule harness - .run_submod_success(&["add", &remote_url, "--name", "reset-lib", "--path", "lib/reset"]) + .run_submod_success(&[ + "add", + &remote_url, + "--name", + "reset-lib", + "--path", + "lib/reset", + ]) .expect("Failed to add submodule"); // Make some changes in the submodule @@ -231,11 +252,25 @@ sparse_paths = ["src"] // Add two submodules harness - .run_submod_success(&["add", &remote_url1, "--name", "reset-lib1", "--path", "lib/reset1"]) + .run_submod_success(&[ + "add", + &remote_url1, + "--name", + "reset-lib1", + "--path", + "lib/reset1", + ]) .expect("Failed to add submodule 1"); harness - .run_submod_success(&["add", &remote_url2, "--name", "reset-lib2", "--path", "lib/reset2"]) + .run_submod_success(&[ + "add", + &remote_url2, + "--name", + "reset-lib2", + "--path", + "lib/reset2", + ]) .expect("Failed to add submodule 2"); // Make changes in both submodules @@ -371,7 +406,14 @@ active = true // Try to add submodule with invalid URL let output = harness - .run_submod(&["add", "not-a-valid-url", "--name", "invalid-lib", "--path", "lib/invalid"]) + .run_submod(&[ + "add", + "not-a-valid-url", + "--name", + "invalid-lib", + "--path", + "lib/invalid", + ]) .expect("Failed to run submod"); assert!(!output.status.success()); @@ -441,7 +483,14 @@ active = true let remote_url = format!("file://{}", remote_repo.display()); harness - .run_submod_success(&["add", &remote_url, "--name", "list-lib", "--path", "lib/list"]) + .run_submod_success(&[ + "add", + &remote_url, + "--name", + "list-lib", + "--path", + "lib/list", + ]) .expect("Failed to add submodule"); let stdout = harness @@ -489,7 +538,14 @@ active = true let remote_url = format!("file://{}", remote_repo.display()); harness - .run_submod_success(&["add", &remote_url, "--name", "disable-lib", "--path", "lib/disable"]) + .run_submod_success(&[ + "add", + &remote_url, + "--name", + "disable-lib", + "--path", + "lib/disable", + ]) .expect("Failed to add submodule"); let stdout = harness @@ -532,9 +588,18 @@ active = true let config = harness.read_config().expect("Failed to read config"); // Comments must be preserved - assert!(config.contains("# My project submodules"), "top-level comment lost"); - assert!(config.contains("# This is my main library"), "submodule comment lost"); - assert!(config.contains("# default settings"), "defaults comment lost"); + assert!( + config.contains("# My project submodules"), + "top-level comment lost" + ); + assert!( + config.contains("# This is my main library"), + "submodule comment lost" + ); + assert!( + config.contains("# default settings"), + "defaults comment lost" + ); // active must be updated assert!(config.contains("active = false"), "active not updated"); } @@ -550,7 +615,14 @@ active = true let remote_url = format!("file://{}", remote_repo.display()); harness - .run_submod_success(&["add", &remote_url, "--name", "delete-lib", "--path", "lib/delete"]) + .run_submod_success(&[ + "add", + &remote_url, + "--name", + "delete-lib", + "--path", + "lib/delete", + ]) .expect("Failed to add submodule"); // Verify it was added @@ -601,7 +673,10 @@ active = true assert!(config.contains("[keep-me]"), "kept section was removed"); assert!(config.contains("# Project submodules"), "top comment lost"); // delete-me must be gone - assert!(!config.contains("[delete-me]"), "deleted section still present"); + assert!( + !config.contains("[delete-me]"), + "deleted section still present" + ); } #[test] @@ -677,7 +752,9 @@ active = true let content = fs::read_to_string(&output_path).expect("Failed to read generated config"); // Template should contain sample config content (at minimum a section or defaults) assert!( - content.contains("[defaults]") || content.contains("vendor-utils") || content.contains("sparse_paths"), + content.contains("[defaults]") + || content.contains("vendor-utils") + || content.contains("sparse_paths"), "Template config should contain sample content; got: {content}" ); } @@ -699,8 +776,15 @@ active = true ]) .expect("Failed to generate empty config"); - assert!(stdout.contains("Generated empty config"), "Expected 'Generated empty config' in stdout, got: {stdout}"); - assert!(output_path.exists(), "Output file should exist at {}", output_path.display()); + assert!( + stdout.contains("Generated empty config"), + "Expected 'Generated empty config' in stdout, got: {stdout}" + ); + assert!( + output_path.exists(), + "Output file should exist at {}", + output_path.display() + ); } #[test] @@ -737,7 +821,14 @@ active = true let remote_url = format!("file://{}", remote_repo.display()); harness - .run_submod_success(&["add", &remote_url, "--name", "nuke-lib", "--path", "lib/nuke"]) + .run_submod_success(&[ + "add", + &remote_url, + "--name", + "nuke-lib", + "--path", + "lib/nuke", + ]) .expect("Failed to add submodule"); // Nuke with --kill (does not reinit) diff --git a/tests/performance_tests.rs b/tests/performance_tests.rs index ba274f2..c3669d3 100644 --- a/tests/performance_tests.rs +++ b/tests/performance_tests.rs @@ -161,7 +161,14 @@ ignore = "all" let start_time = Instant::now(); for (i, deep_path) in deep_paths.iter().enumerate() { harness - .run_submod_success(&["add", &remote_url, "--name", &format!("deep-{i}"), "--path", deep_path]) + .run_submod_success(&[ + "add", + &remote_url, + "--name", + &format!("deep-{i}"), + "--path", + deep_path, + ]) .expect("Failed to add deep submodule"); } diff --git a/tests/sparse_checkout_tests.rs b/tests/sparse_checkout_tests.rs index 418d70a..e8b0df3 100644 --- a/tests/sparse_checkout_tests.rs +++ b/tests/sparse_checkout_tests.rs @@ -153,7 +153,14 @@ mod tests { // Add submodule normally first harness - .run_submod_success(&["add", &remote_url, "--name", "sparse-disabled", "--path", "lib/sparse-disabled"]) + .run_submod_success(&[ + "add", + &remote_url, + "--name", + "sparse-disabled", + "--path", + "lib/sparse-disabled", + ]) .expect("Failed to add submodule"); // Update config to include sparse paths @@ -271,7 +278,14 @@ sparse_paths = ["src", "docs", "*.md"] // Add submodule without sparse checkout harness - .run_submod_success(&["add", &remote_url, "--name", "no-sparse", "--path", "lib/no-sparse"]) + .run_submod_success(&[ + "add", + &remote_url, + "--name", + "no-sparse", + "--path", + "lib/no-sparse", + ]) .expect("Failed to add submodule"); // Add submodule with sparse checkout