diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index bc12832b8..3bf1ad189 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -1,5 +1,7 @@ name: VFS for Git +run-name: ${{ inputs.run_name || 'VFS for Git' }} + on: pull_request: branches: [ master, releases/shipped ] @@ -11,12 +13,17 @@ on: description: 'Microsoft Git version tag to include in the build (leave empty for default)' required: false type: string + run_name: + description: 'Optional display name for this run (used for cross-repo automation)' + required: false + type: string permissions: contents: read + actions: read env: - GIT_VERSION: ${{ github.event.inputs.git_version || 'v2.50.1.vfs.0.1' }} + GIT_VERSION: ${{ github.event.inputs.git_version || 'v2.53.0.vfs.0.6' }} jobs: validate: @@ -93,7 +100,10 @@ jobs: } } - if (run.status === 'completed' && run.conclusion === 'success') return run.html_url + if (run.status === 'completed' && run.conclusion === 'success') { + core.notice(`Skipping: There already is a successful run: ${run.html_url}`) + return run.html_url + } } return '' } catch (e) { @@ -116,6 +126,48 @@ jobs: -Tag $env:GIT_VERSION && ` Write-Host ::notice title=Validation::Using microsoft/git version $env:GIT_VERSION + - name: Download microsoft/git installers + if: steps.check.outputs.result == '' + shell: cmd + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + gh release download %GIT_VERSION% --repo microsoft/git --pattern "Git*.exe" --dir MicrosoftGit + + - name: Create Git install script + if: steps.check.outputs.result == '' + shell: cmd + run: | + >MicrosoftGit\install.bat ( + echo @ECHO OFF + echo SETLOCAL + echo. + echo IF "%%PROCESSOR_ARCHITECTURE%%"=="AMD64" ^( + echo SET GIT_ARCH=64-bit + echo ^) ELSE IF "%%PROCESSOR_ARCHITECTURE%%"=="ARM64" ^( + echo SET GIT_ARCH=arm64 + echo ^) ELSE ^( + echo ECHO Unknown architecture: %%PROCESSOR_ARCHITECTURE%% + echo exit 1 + echo ^) + echo. + echo FOR /F "tokens=* USEBACKQ" %%%%F IN ^( `where /R %%~dp0 Git*-%%GIT_ARCH%%.exe` ^) DO SET GIT_INSTALLER=%%%%F + echo. + echo SET LOGDIR=%%~dp0\logs + echo IF EXIST %%LOGDIR%% ^( rmdir /S /Q %%LOGDIR%% ^) + echo mkdir %%LOGDIR%% + echo. + echo ECHO Installing Git ^(%%GIT_ARCH%%^)... + echo %%GIT_INSTALLER%% /LOG="%%LOGDIR%%\git.log" /VERYSILENT /SUPPRESSMSGBOXES /NORESTART /ALLOWDOWNGRADE=1 + ) + + - name: Upload microsoft/git installers + if: steps.check.outputs.result == '' + uses: actions/upload-artifact@v7 + with: + name: MicrosoftGit + path: MicrosoftGit + build: runs-on: windows-2025 name: Build and Unit Test @@ -150,7 +202,7 @@ jobs: - name: Add MSBuild to PATH if: steps.skip.outputs.result != 'true' - uses: microsoft/setup-msbuild@v2.0.0 + uses: microsoft/setup-msbuild@v3.0.0 - name: Build VFS for Git if: steps.skip.outputs.result != 'true' @@ -167,139 +219,39 @@ jobs: shell: cmd run: src\scripts\CreateBuildArtifacts.bat ${{ matrix.configuration }} artifacts - - name: Download microsoft/git installers - if: steps.skip.outputs.result != 'true' - shell: cmd - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - gh release download %GIT_VERSION% --repo microsoft/git --pattern "Git*.exe" --dir artifacts\GVFS.Installers - - name: Upload functional tests drop if: steps.skip.outputs.result != 'true' - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: FunctionalTests_${{ matrix.configuration }} path: artifacts\GVFS.FunctionalTests - name: Upload FastFetch drop if: steps.skip.outputs.result != 'true' - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: FastFetch_${{ matrix.configuration }} path: artifacts\FastFetch - - name: Upload installers + - name: Upload GVFS installer if: steps.skip.outputs.result != 'true' - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: - name: Installers_${{ matrix.configuration }} + name: GVFS_${{ matrix.configuration }} path: artifacts\GVFS.Installers - functional_test: - runs-on: ${{ matrix.architecture == 'arm64' && 'windows-11-arm' || 'windows-2025' }} + functional_tests: name: Functional Tests needs: [validate, build] - - strategy: - matrix: - configuration: [ Debug, Release ] - architecture: [ x86_64, arm64 ] - nr: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] # 10 parallel jobs to speed up the tests - fail-fast: false # most failures are flaky tests, no need to stop the other jobs from succeeding - - steps: - - name: Skip this job if there is a previous successful run - if: needs.validate.outputs.skip != '' - id: skip - uses: actions/github-script@v8 - with: - script: | - core.info(`Skipping: There already is a successful run: ${{ needs.validate.outputs.skip }}`) - return true - - - name: Download installers - if: steps.skip.outputs.result != 'true' - uses: actions/download-artifact@v7 - with: - name: Installers_${{ matrix.configuration }} - path: install - - - name: Download functional tests drop - if: steps.skip.outputs.result != 'true' - uses: actions/download-artifact@v7 - with: - name: FunctionalTests_${{ matrix.configuration }} - path: ft - - - name: ProjFS details (pre-install) - if: steps.skip.outputs.result != 'true' - shell: cmd - run: install\info.bat - - - name: Install product - if: steps.skip.outputs.result != 'true' - shell: cmd - run: install\install.bat - - - name: ProjFS details (post-install) - if: steps.skip.outputs.result != 'true' - shell: cmd - run: install\info.bat - - - name: Upload installation logs - if: always() && steps.skip.outputs.result != 'true' - uses: actions/upload-artifact@v6 - with: - name: InstallationLogs_${{ matrix.configuration }}_${{ matrix.architecture }}-${{ matrix.nr }} - path: install\logs - - - name: Run functional tests - if: steps.skip.outputs.result != 'true' - shell: cmd - run: | - SET PATH=C:\Program Files\VFS for Git;%PATH% - SET GIT_TRACE2_PERF=C:\temp\git-trace2.log - ft\GVFS.FunctionalTests.exe /result:TestResult.xml --ci --slice=${{ matrix.nr }},10 - - - name: Upload functional test results - if: always() && steps.skip.outputs.result != 'true' - uses: actions/upload-artifact@v6 - with: - name: FunctionalTests_Results_${{ matrix.configuration }}_${{ matrix.architecture }}-${{ matrix.nr }} - path: TestResult.xml - - - name: Upload Git trace2 output - if: always() && steps.skip.outputs.result != 'true' - uses: actions/upload-artifact@v6 - with: - name: GitTrace2_${{ matrix.configuration }}_${{ matrix.architecture }}-${{ matrix.nr }} - path: C:\temp\git-trace2.log - - - name: ProjFS details (post-test) - if: always() && steps.skip.outputs.result != 'true' - shell: cmd - run: install\info.bat - - ft_results: - runs-on: ubuntu-latest # quickest runners - name: Functional Tests - needs: [functional_test] - - strategy: - matrix: - configuration: [ Debug, Release ] - architecture: [ x86_64, arm64 ] - - steps: - - name: Success! # for easier identification of successful runs in the Checks Required for Pull Requests - run: echo "All functional test jobs successful for ${{ matrix.configuration }} / ${{ matrix.architecture }}!" + uses: ./.github/workflows/functional-tests.yaml + with: + skip: ${{ needs.validate.outputs.skip }} result: runs-on: ubuntu-latest name: Build, Unit and Functional Tests Successful - needs: [functional_test] + needs: [functional_tests] steps: - name: Success! # for easier identification of successful runs in the Checks Required for Pull Requests - run: echo "Workflow run is successful!" \ No newline at end of file + run: echo "Workflow run is successful!" diff --git a/.github/workflows/functional-tests.yaml b/.github/workflows/functional-tests.yaml new file mode 100644 index 000000000..16f0988cf --- /dev/null +++ b/.github/workflows/functional-tests.yaml @@ -0,0 +1,167 @@ +name: Functional Tests + +on: + workflow_call: + inputs: + vfs_repository: + description: 'Repository to download the VFSForGit artifacts from (defaults to the calling repository)' + required: false + type: string + default: '' + vfs_run_id: + description: 'Workflow run ID to download FT executables and GVFS installer from (defaults to the calling run)' + required: false + type: string + default: '' + git_repository: + description: 'Repository to download the Git installer artifact from (defaults to the calling repository)' + required: false + type: string + default: '' + git_run_id: + description: 'Workflow run ID to download the Git installer artifact from (defaults to the calling run)' + required: false + type: string + default: '' + git_artifact_name: + description: 'Name of the artifact containing the Git installer (must include an install.bat script)' + required: false + type: string + default: 'MicrosoftGit' + skip: + description: 'URL of a previous successful run; if non-empty, all steps are skipped (job still succeeds for required checks)' + required: false + type: string + default: '' + output_prefix: + description: 'Prefix for uploaded artifact names (e.g. "VFSForGit" to namespace artifacts in cross-repo runs)' + required: false + type: string + default: '' + secrets: + vfs_token: + description: 'Token for downloading VFSForGit artifacts (required for cross-repository downloads; defaults to GITHUB_TOKEN)' + required: false + git_token: + description: 'Token for downloading the Git installer artifact (required for cross-repository downloads; defaults to GITHUB_TOKEN)' + required: false + +permissions: + contents: read + actions: read + +jobs: + functional_test: + runs-on: ${{ matrix.architecture == 'arm64' && 'windows-11-arm' || 'windows-2025' }} + name: Test + + env: + ARTIFACT_PREFIX: ${{ inputs.output_prefix && format('{0}_', inputs.output_prefix) || '' }} + FT_MATRIX_NAME: ${{ format('{0}_{1}-{2}', matrix.configuration, matrix.architecture, matrix.nr) }} + + strategy: + matrix: + configuration: [ Debug, Release ] + architecture: [ x86_64, arm64 ] + nr: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] # 10 parallel jobs to speed up the tests + fail-fast: false # most failures are flaky tests, no need to stop the other jobs from succeeding + + steps: + - name: Skip this job if there is a previous successful run + if: inputs.skip != '' + id: skip + uses: actions/github-script@v8 + with: + script: | + core.info(`Skipping: There already is a successful run: ${{ inputs.skip }}`) + return true + + - name: Download Git installer + if: steps.skip.outputs.result != 'true' + uses: actions/download-artifact@v8 + with: + name: ${{ inputs.git_artifact_name }} + path: git + repository: ${{ inputs.git_repository || github.repository }} + run-id: ${{ inputs.git_run_id || github.run_id }} + github-token: ${{ secrets.git_token || github.token }} + + - name: Download GVFS installer + if: steps.skip.outputs.result != 'true' + uses: actions/download-artifact@v8 + with: + name: GVFS_${{ matrix.configuration }} + path: gvfs + repository: ${{ inputs.vfs_repository || github.repository }} + run-id: ${{ inputs.vfs_run_id || github.run_id }} + github-token: ${{ secrets.vfs_token || github.token }} + + - name: Download functional tests drop + if: steps.skip.outputs.result != 'true' + uses: actions/download-artifact@v8 + with: + name: FunctionalTests_${{ matrix.configuration }} + path: ft + repository: ${{ inputs.vfs_repository || github.repository }} + run-id: ${{ inputs.vfs_run_id || github.run_id }} + github-token: ${{ secrets.vfs_token || github.token }} + + - name: ProjFS details (pre-install) + if: steps.skip.outputs.result != 'true' + shell: cmd + continue-on-error: true + run: gvfs\info.bat + + - name: Install Git + if: steps.skip.outputs.result != 'true' + shell: cmd + run: git\install.bat + + - name: Install VFS for Git + if: steps.skip.outputs.result != 'true' + shell: cmd + run: gvfs\install.bat + + - name: ProjFS details (post-install) + if: steps.skip.outputs.result != 'true' + shell: cmd + continue-on-error: true + run: gvfs\info.bat + + - name: Upload installation logs + if: always() && steps.skip.outputs.result != 'true' + uses: actions/upload-artifact@v7 + continue-on-error: true + with: + name: ${{ env.ARTIFACT_PREFIX }}InstallationLogs_${{ env.FT_MATRIX_NAME }} + path: | + git\logs + gvfs\logs + + - name: Run functional tests + if: steps.skip.outputs.result != 'true' + shell: cmd + run: | + SET PATH=C:\Program Files\VFS for Git;%PATH% + SET GIT_TRACE2_PERF=C:\temp\git-trace2.log + ft\GVFS.FunctionalTests.exe /result:TestResult.xml --ci --slice=${{ matrix.nr }},10 + + - name: Upload functional test results + if: always() && steps.skip.outputs.result != 'true' + uses: actions/upload-artifact@v7 + with: + name: ${{ env.ARTIFACT_PREFIX }}FunctionalTests_Results_${{ env.FT_MATRIX_NAME }} + path: TestResult.xml + + - name: Upload Git trace2 output + if: always() && steps.skip.outputs.result != 'true' + uses: actions/upload-artifact@v7 + with: + name: ${{ env.ARTIFACT_PREFIX }}GitTrace2_${{ env.FT_MATRIX_NAME }} + path: C:\temp\git-trace2.log + + - name: ProjFS details (post-test) + if: always() && steps.skip.outputs.result != 'true' + shell: cmd + continue-on-error: true + run: gvfs\info.bat diff --git a/GVFS/FastFetch/FastFetchLibGit2Repo.cs b/GVFS/FastFetch/FastFetchLibGit2Repo.cs index 8c7716415..d278baa5a 100644 --- a/GVFS/FastFetch/FastFetchLibGit2Repo.cs +++ b/GVFS/FastFetch/FastFetchLibGit2Repo.cs @@ -18,7 +18,7 @@ public FastFetchLibGit2Repo(ITracer tracer, string repoPath) public virtual bool TryCopyBlobToFile(string sha, IEnumerable destinations, out long bytesWritten) { IntPtr objHandle; - if (Native.RevParseSingle(out objHandle, this.RepoHandle, sha) != Native.SuccessCode) + if (Native.RevParseSingle(out objHandle, this.RepoHandle, sha) != Native.ResultCode.Success) { bytesWritten = 0; EventMetadata metadata = new EventMetadata(); diff --git a/GVFS/GVFS.Common/Enlistment.cs b/GVFS/GVFS.Common/Enlistment.cs index 9e40b309c..5dcfd9e54 100644 --- a/GVFS/GVFS.Common/Enlistment.cs +++ b/GVFS/GVFS.Common/Enlistment.cs @@ -62,10 +62,18 @@ protected Enlistment( public string WorkingDirectoryRoot { get; } public string WorkingDirectoryBackingRoot { get; } - public string DotGitRoot { get; private set; } + public string DotGitRoot { get; protected set; } public abstract string GitObjectsRoot { get; protected set; } public abstract string LocalObjectsRoot { get; protected set; } public abstract string GitPackRoot { get; protected set; } + + /// + /// Path to the git index file. Override for worktree-specific paths. + /// + public virtual string GitIndexPath + { + get { return Path.Combine(this.WorkingDirectoryBackingRoot, GVFSConstants.DotGit.Index); } + } public string RepoUrl { get; } public bool FlushFileBuffersForPacks { get; } @@ -109,31 +117,5 @@ public virtual GitProcess CreateGitProcess() { return new GitProcess(this); } - - public bool GetTrustPackIndexesConfig() - { - var gitProcess = this.CreateGitProcess(); - bool trustPackIndexes = true; - if (gitProcess.TryGetFromConfig(GVFSConstants.GitConfig.TrustPackIndexes, forceOutsideEnlistment: false, out var valueString) - && bool.TryParse(valueString, out var trustPackIndexesConfig)) - { - trustPackIndexes = trustPackIndexesConfig; - } - - return trustPackIndexes; - } - - public bool GetStatusHydrationConfig() - { - var gitProcess = this.CreateGitProcess(); - - if (gitProcess.TryGetFromConfig(GVFSConstants.GitConfig.ShowHydrationStatus, forceOutsideEnlistment: false, out var valueString) - && bool.TryParse(valueString, out var statusHydrationConfig)) - { - return statusHydrationConfig; - } - - return GVFSConstants.GitConfig.ShowHydrationStatusDefault; - } } } diff --git a/GVFS/GVFS.Common/FileBasedLock.cs b/GVFS/GVFS.Common/FileBasedLock.cs index 9f709f2cf..e87fc7eb6 100644 --- a/GVFS/GVFS.Common/FileBasedLock.cs +++ b/GVFS/GVFS.Common/FileBasedLock.cs @@ -20,7 +20,23 @@ public FileBasedLock( protected string LockPath { get; } protected ITracer Tracer { get; } - public abstract bool TryAcquireLock(); + public bool TryAcquireLock() + { + return this.TryAcquireLock(out _); + } + + /// + /// Attempts to acquire the lock, providing the exception that prevented acquisition. + /// + /// + /// When the method returns false, contains the exception that prevented lock acquisition. + /// Callers can pattern-match on the exception type to distinguish lock contention + /// (e.g. with a sharing violation HResult) from + /// permission errors () or other failures. + /// Null when the method returns true. + /// + /// True if the lock was acquired, false otherwise. + public abstract bool TryAcquireLock(out Exception lockException); public abstract void Dispose(); } diff --git a/GVFS/GVFS.Common/FileSystem/PhysicalFileSystem.cs b/GVFS/GVFS.Common/FileSystem/PhysicalFileSystem.cs index 90bfd8913..3b1ebe267 100644 --- a/GVFS/GVFS.Common/FileSystem/PhysicalFileSystem.cs +++ b/GVFS/GVFS.Common/FileSystem/PhysicalFileSystem.cs @@ -49,6 +49,11 @@ public virtual void DeleteDirectory(string path, bool recursive = true, bool ign } } + public virtual void MoveDirectory(string sourceDirName, string destDirName) + { + Directory.Move(sourceDirName, destDirName); + } + public virtual void CopyDirectoryRecursive( string srcDirectoryPath, string dstDirectoryPath, diff --git a/GVFS/GVFS.Common/GVFSConstants.cs b/GVFS/GVFS.Common/GVFSConstants.cs index 3f3abecc4..24374b26a 100644 --- a/GVFS/GVFS.Common/GVFSConstants.cs +++ b/GVFS/GVFS.Common/GVFSConstants.cs @@ -42,9 +42,12 @@ public static class GitConfig /* Intended to be a temporary config to allow testing of distrusting pack indexes from cache server * before it is enabled by default. */ public const string TrustPackIndexes = GVFSPrefix + "trust-pack-indexes"; + public const bool TrustPackIndexesDefault = true; public const string ShowHydrationStatus = GVFSPrefix + "show-hydration-status"; public const bool ShowHydrationStatusDefault = false; + + public const string MaxHttpConnectionsConfig = GVFSPrefix + "max-http-connections"; } public static class LocalGVFSConfig @@ -98,6 +101,7 @@ public static class LogFileTypes public const string Clone = "clone"; public const string Dehydrate = "dehydrate"; + public const string Health = "health"; public const string MountVerb = MountPrefix + "_verb"; public const string MountProcess = MountPrefix + "_process"; public const string MountUpgrade = MountPrefix + "_repoupgrade"; @@ -115,6 +119,7 @@ public static class DotGVFS { public const string CorruptObjectsName = "CorruptObjects"; public const string LogName = "logs"; + public const string MountLock = "mount.lock"; public static class Databases { @@ -131,7 +136,11 @@ public static class GitStatusCache { public const string Name = "gitStatusCache"; public static readonly string CachePath = Path.Combine(Name, "GitStatusCache.dat"); - public static readonly string TreeCount = Path.Combine(Name, "TreeCountCache.dat"); + } + + public static class HydrationStatus + { + public static readonly string DisabledMarkerFile = Path.Combine("gitStatusCache", "HydrationStatusDisabled.dat"); } } @@ -139,6 +148,9 @@ public static class DotGit { public const string Root = ".git"; public const string HeadName = "HEAD"; + public const string GitDirPrefix = "gitdir: "; + public const string CommonDirName = "commondir"; + public const string SkipCleanCheckName = "skip-clean-check"; public const string IndexName = "index"; public const string PackedRefsName = "packed-refs"; public const string LockExtension = ".lock"; @@ -157,10 +169,14 @@ public static class DotGit public static class Logs { + public const string RootName = "logs"; public static readonly string HeadName = "HEAD"; - public static readonly string Root = Path.Combine(DotGit.Root, "logs"); + public static readonly string Root = Path.Combine(DotGit.Root, RootName); public static readonly string Head = Path.Combine(Logs.Root, Logs.HeadName); + + /// Path relative to the git directory (e.g., "logs/HEAD"). + public static readonly string HeadRelativePath = Path.Combine(RootName, HeadName); } public static class Hooks @@ -171,7 +187,8 @@ public static class Hooks public const string ReadObjectName = "read-object"; public const string VirtualFileSystemName = "virtual-filesystem"; public const string PostIndexChangedName = "post-index-change"; - public static readonly string Root = Path.Combine(DotGit.Root, "hooks"); + public const string RootName = "hooks"; + public static readonly string Root = Path.Combine(DotGit.Root, RootName); public static readonly string PreCommandPath = Path.Combine(Hooks.Root, PreCommandHookName); public static readonly string PostCommandPath = Path.Combine(Hooks.Root, PostCommandHookName); public static readonly string ReadObjectPath = Path.Combine(Hooks.Root, ReadObjectName); @@ -200,6 +217,9 @@ public static class Info { public static readonly string Root = Path.Combine(Objects.Root, "info"); public static readonly string Alternates = Path.Combine(Info.Root, "alternates"); + + /// Path relative to the git directory (e.g., "objects/info/alternates"). + public static readonly string AlternatesRelativePath = Path.Combine("objects", "info", "alternates"); } public static class Pack diff --git a/GVFS/GVFS.Common/GVFSEnlistment.Shared.cs b/GVFS/GVFS.Common/GVFSEnlistment.Shared.cs index a7e84ba33..26e2de306 100644 --- a/GVFS/GVFS.Common/GVFSEnlistment.Shared.cs +++ b/GVFS/GVFS.Common/GVFSEnlistment.Shared.cs @@ -1,5 +1,7 @@ using GVFS.Common.Tracing; using System; +using System.Collections.Generic; +using System.IO; using System.Security; namespace GVFS.Common @@ -25,5 +27,230 @@ public static bool IsUnattended(ITracer tracer) return false; } } + + /// + /// Returns true if is equal to or a subdirectory of + /// (case-insensitive). Both paths are + /// canonicalized with to resolve + /// relative segments (e.g. "/../") before comparison. + /// + public static bool IsPathInsideDirectory(string path, string directory) + { + string normalizedPath = Path.GetFullPath(path) + .TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + string normalizedDirectory = Path.GetFullPath(directory) + .TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + + return normalizedPath.StartsWith(normalizedDirectory + Path.DirectorySeparatorChar, StringComparison.OrdinalIgnoreCase) || + normalizedPath.Equals(normalizedDirectory, StringComparison.OrdinalIgnoreCase); + } + + /// + /// Detects if the given directory is a git worktree by checking for + /// a .git file (not directory) containing "gitdir: path/.git/worktrees/name". + /// Returns a pipe name suffix like "_WT_NAME" if so, or null if not a worktree. + /// + public static string GetWorktreePipeSuffix(string directory) + { + WorktreeInfo info = TryGetWorktreeInfo(directory); + return info?.PipeSuffix; + } + + /// + /// Detects if the given directory (or any ancestor) is a git worktree. + /// Walks up from looking for a .git + /// file (not directory) containing a gitdir: pointer. Returns + /// null if not inside a worktree. + /// + public static WorktreeInfo TryGetWorktreeInfo(string directory) + { + return TryGetWorktreeInfo(directory, out _); + } + + /// + /// Detects if the given directory (or any ancestor) is a git worktree. + /// Walks up from looking for a .git + /// file (not directory) containing a gitdir: pointer. Returns + /// null if not inside a worktree, with an error message if an I/O + /// error prevented detection. + /// + public static WorktreeInfo TryGetWorktreeInfo(string directory, out string error) + { + error = null; + + if (string.IsNullOrEmpty(directory)) + { + return null; + } + + // Canonicalize to an absolute path so walk-up and Path.Combine + // behave consistently regardless of the caller's CWD. + string current = Path.GetFullPath(directory); + while (current != null) + { + string dotGitPath = Path.Combine(current, ".git"); + + if (Directory.Exists(dotGitPath)) + { + // Found a real .git directory — this is a primary worktree, not a linked worktree + return null; + } + + if (File.Exists(dotGitPath)) + { + return TryParseWorktreeGitFile(current, dotGitPath, out error); + } + + string parent = Path.GetDirectoryName(current); + if (parent == current) + { + break; + } + + current = parent; + } + + return null; + } + + private static WorktreeInfo TryParseWorktreeGitFile(string worktreeRoot, string dotGitPath, out string error) + { + error = null; + + try + { + string gitdirLine = File.ReadAllText(dotGitPath).Trim(); + if (!gitdirLine.StartsWith(GVFSConstants.DotGit.GitDirPrefix)) + { + return null; + } + + string gitdirPath = gitdirLine.Substring(GVFSConstants.DotGit.GitDirPrefix.Length).Trim(); + gitdirPath = gitdirPath.Replace('/', Path.DirectorySeparatorChar); + + // Resolve relative paths against the worktree directory + if (!Path.IsPathRooted(gitdirPath)) + { + gitdirPath = Path.GetFullPath(Path.Combine(worktreeRoot, gitdirPath)); + } + + string worktreeName = Path.GetFileName(gitdirPath); + if (string.IsNullOrEmpty(worktreeName)) + { + return null; + } + + // Read commondir to find the shared .git/ directory. + // All valid worktrees must have a commondir file. + string commondirFile = Path.Combine(gitdirPath, GVFSConstants.DotGit.CommonDirName); + if (!File.Exists(commondirFile)) + { + return null; + } + + string commondirContent = File.ReadAllText(commondirFile).Trim(); + string sharedGitDir = Path.GetFullPath(Path.Combine(gitdirPath, commondirContent)); + + return new WorktreeInfo + { + Name = worktreeName, + WorktreePath = worktreeRoot, + WorktreeGitDir = gitdirPath, + SharedGitDir = sharedGitDir, + PipeSuffix = "_WT_" + worktreeName.ToUpper(), + }; + } + catch (IOException e) + { + error = e.Message; + return null; + } + catch (UnauthorizedAccessException e) + { + error = e.Message; + return null; + } + } + + /// + /// Returns the working directory paths of all worktrees registered + /// under /worktrees by reading each entry's + /// gitdir file. The primary worktree is not included. + /// + public static string[] GetKnownWorktreePaths(string gitDir) + { + string worktreesDir = Path.Combine(gitDir, "worktrees"); + if (!Directory.Exists(worktreesDir)) + { + return new string[0]; + } + + List paths = new List(); + foreach (string entry in Directory.GetDirectories(worktreesDir)) + { + string gitdirFile = Path.Combine(entry, "gitdir"); + if (!File.Exists(gitdirFile)) + { + continue; + } + + try + { + string gitdirContent = File.ReadAllText(gitdirFile).Trim(); + gitdirContent = gitdirContent.Replace('/', Path.DirectorySeparatorChar); + string worktreeDir = Path.GetDirectoryName(gitdirContent); + if (!string.IsNullOrEmpty(worktreeDir)) + { + paths.Add(Path.GetFullPath(worktreeDir)); + } + } + catch + { + } + } + + return paths.ToArray(); + } + + public class WorktreeInfo + { + public const string EnlistmentRootFileName = "gvfs-enlistment-root"; + + public string Name { get; set; } + public string WorktreePath { get; set; } + public string WorktreeGitDir { get; set; } + public string SharedGitDir { get; set; } + public string PipeSuffix { get; set; } + + /// + /// Returns the primary enlistment root, either from a stored + /// marker file or by deriving it from SharedGitDir. + /// + public string GetEnlistmentRoot() + { + // Prefer the explicit marker written during worktree creation + string markerPath = Path.Combine(this.WorktreeGitDir, EnlistmentRootFileName); + if (File.Exists(markerPath)) + { + string root = File.ReadAllText(markerPath).Trim(); + if (!string.IsNullOrEmpty(root)) + { + return root; + } + } + + // Fallback: derive from SharedGitDir (assumes /src/.git) + if (this.SharedGitDir != null) + { + string srcDir = Path.GetDirectoryName(this.SharedGitDir); + if (srcDir != null) + { + return Path.GetDirectoryName(srcDir); + } + } + + return null; + } + } } } diff --git a/GVFS/GVFS.Common/GVFSEnlistment.cs b/GVFS/GVFS.Common/GVFSEnlistment.cs index 731f1b355..eb407c175 100644 --- a/GVFS/GVFS.Common/GVFSEnlistment.cs +++ b/GVFS/GVFS.Common/GVFSEnlistment.cs @@ -48,12 +48,59 @@ private GVFSEnlistment(string enlistmentRoot, string gitBinPath, GitAuthenticati { } + // Worktree enlistment — overrides working directory, pipe name, and metadata paths + private GVFSEnlistment(string enlistmentRoot, string gitBinPath, GitAuthentication authentication, WorktreeInfo worktreeInfo, string repoUrl = null) + : base( + enlistmentRoot, + worktreeInfo.WorktreePath, + worktreeInfo.WorktreePath, + repoUrl, + gitBinPath, + flushFileBuffersForPacks: true, + authentication: authentication) + { + this.Worktree = worktreeInfo; + + // Override DotGitRoot to point to the shared .git directory. + // The base constructor sets it to WorkingDirectoryBackingRoot/.git + // which is a file (not directory) in worktrees. + this.DotGitRoot = worktreeInfo.SharedGitDir; + + this.DotGVFSRoot = Path.Combine(worktreeInfo.WorktreeGitDir, GVFSPlatform.Instance.Constants.DotGVFSRoot); + this.NamedPipeName = GVFSPlatform.Instance.GetNamedPipeName(enlistmentRoot) + worktreeInfo.PipeSuffix; + this.GitStatusCacheFolder = Path.Combine(this.DotGVFSRoot, GVFSConstants.DotGVFS.GitStatusCache.Name); + this.GitStatusCachePath = Path.Combine(this.DotGVFSRoot, GVFSConstants.DotGVFS.GitStatusCache.CachePath); + this.GVFSLogsRoot = Path.Combine(this.DotGVFSRoot, GVFSConstants.DotGVFS.LogName); + this.LocalObjectsRoot = Path.Combine(worktreeInfo.SharedGitDir, "objects"); + } + public string NamedPipeName { get; } public string DotGVFSRoot { get; } public string GVFSLogsRoot { get; } + public WorktreeInfo Worktree { get; } + + public bool IsWorktree => this.Worktree != null; + + /// + /// Path to the git index file. For worktrees this is in the + /// per-worktree git dir, not in the working directory. + /// + public override string GitIndexPath + { + get + { + if (this.IsWorktree) + { + return Path.Combine(this.Worktree.WorktreeGitDir, GVFSConstants.DotGit.IndexName); + } + + return base.GitIndexPath; + } + } + public string LocalCacheRoot { get; private set; } public string BlobSizesRoot { get; private set; } @@ -88,6 +135,37 @@ public static GVFSEnlistment CreateFromDirectory( { if (Directory.Exists(directory)) { + // Always check for worktree first. A worktree directory may + // be under the enlistment tree, so TryGetGVFSEnlistmentRoot + // can succeed by walking up — but we need a worktree enlistment. + string worktreeError; + WorktreeInfo wtInfo = TryGetWorktreeInfo(directory, out worktreeError); + if (worktreeError != null) + { + throw new InvalidRepoException($"Failed to check worktree status for '{directory}': {worktreeError}"); + } + + if (wtInfo?.SharedGitDir != null) + { + string primaryRoot = wtInfo.GetEnlistmentRoot(); + if (primaryRoot != null) + { + // Read origin URL via the shared .git dir (not the worktree's + // .git file) because the base Enlistment constructor runs + // git config before we can override DotGitRoot. + string srcDir = Path.GetDirectoryName(wtInfo.SharedGitDir); + string repoUrl = null; + if (srcDir != null) + { + GitProcess git = new GitProcess(gitBinRoot, srcDir); + GitProcess.ConfigResult urlResult = git.GetOriginUrl(); + urlResult.TryParseAsString(out repoUrl, out _); + } + + return CreateForWorktree(primaryRoot, gitBinRoot, authentication, wtInfo, repoUrl?.Trim()); + } + } + string errorMessage; string enlistmentRoot; if (!GVFSPlatform.Instance.TryGetGVFSEnlistmentRoot(directory, out enlistmentRoot, out errorMessage)) @@ -106,6 +184,21 @@ public static GVFSEnlistment CreateFromDirectory( throw new InvalidRepoException($"Directory '{directory}' does not exist"); } + /// + /// Creates a GVFSEnlistment for a git worktree. Uses the primary + /// enlistment root for shared config but maps working directory, + /// metadata, and pipe name to the worktree. + /// + public static GVFSEnlistment CreateForWorktree( + string primaryEnlistmentRoot, + string gitBinRoot, + GitAuthentication authentication, + WorktreeInfo worktreeInfo, + string repoUrl = null) + { + return new GVFSEnlistment(primaryEnlistmentRoot, gitBinRoot, authentication, worktreeInfo, repoUrl); + } + public static string GetNewGVFSLogFileName( string logsRoot, string logFileType, @@ -122,6 +215,11 @@ public static string GetNewGVFSLogFileName( public static bool WaitUntilMounted(ITracer tracer, string enlistmentRoot, bool unattended, out string errorMessage) { string pipeName = GVFSPlatform.Instance.GetNamedPipeName(enlistmentRoot); + return WaitUntilMounted(tracer, pipeName, enlistmentRoot, unattended, out errorMessage); + } + + public static bool WaitUntilMounted(ITracer tracer, string pipeName, string enlistmentRoot, bool unattended, out string errorMessage) + { tracer.RelatedInfo($"{nameof(WaitUntilMounted)}: Creating NamedPipeClient for pipe '{pipeName}'"); errorMessage = null; diff --git a/GVFS/GVFS.Common/Git/GVFSGitObjects.cs b/GVFS/GVFS.Common/Git/GVFSGitObjects.cs index df660dfa0..b9044b2be 100644 --- a/GVFS/GVFS.Common/Git/GVFSGitObjects.cs +++ b/GVFS/GVFS.Common/Git/GVFSGitObjects.cs @@ -14,12 +14,14 @@ public class GVFSGitObjects : GitObjects private static readonly TimeSpan NegativeCacheTTL = TimeSpan.FromSeconds(30); private ConcurrentDictionary objectNegativeCache; + internal ConcurrentDictionary> inflightDownloads; public GVFSGitObjects(GVFSContext context, GitObjectsHttpRequestor objectRequestor) : base(context.Tracer, context.Enlistment, objectRequestor, context.FileSystem) { this.Context = context; this.objectNegativeCache = new ConcurrentDictionary(StringComparer.OrdinalIgnoreCase); + this.inflightDownloads = new ConcurrentDictionary>(StringComparer.OrdinalIgnoreCase); } public enum RequestSource @@ -127,6 +129,54 @@ private DownloadAndSaveObjectResult TryDownloadAndSaveObject( this.objectNegativeCache.TryRemove(objectId, out negativeCacheRequestTime); } + // Coalesce concurrent requests for the same objectId so that only one HTTP + // download runs per SHA at a time. All concurrent callers share the result. + // Note: the first caller's cancellationToken and retryOnFailure settings are + // captured by the Lazy factory. Subsequent coalesced callers inherit those + // settings. In practice this is fine because the primary concurrent path + // (NamedPipeMessage from git.exe) always uses CancellationToken.None. + Lazy newLazy = new Lazy( + () => this.DoDownloadAndSaveObject(objectId, cancellationToken, requestSource, retryOnFailure)); + Lazy lazy = this.inflightDownloads.GetOrAdd(objectId, newLazy); + + if (!ReferenceEquals(lazy, newLazy)) + { + EventMetadata metadata = new EventMetadata(); + metadata.Add("objectId", objectId); + metadata.Add("requestSource", requestSource.ToString()); + this.Context.Tracer.RelatedEvent(EventLevel.Informational, "TryDownloadAndSaveObject_CoalescedRequest", metadata); + } + + try + { + return lazy.Value; + } + finally + { + this.TryRemoveInflightDownload(objectId, lazy); + } + } + + /// + /// Removes the inflight download entry only if the current value matches the + /// expected Lazy instance. This prevents an ABA race where a straggling thread's + /// finally block could remove a newer Lazy created by a later wave of requests. + /// Uses ICollection<KVP>.Remove which is the value-aware atomic removal on + /// .NET Framework 4.7.1. When we upgrade to .NET 10 (backlog), this can be + /// replaced with ConcurrentDictionary.TryRemove(KeyValuePair). + /// + private bool TryRemoveInflightDownload(string objectId, Lazy lazy) + { + return ((ICollection>>)this.inflightDownloads) + .Remove(new KeyValuePair>(objectId, lazy)); + } + + private DownloadAndSaveObjectResult DoDownloadAndSaveObject( + string objectId, + CancellationToken cancellationToken, + RequestSource requestSource, + bool retryOnFailure) + { // To reduce allocations, reuse the same buffer when writing objects in this batch byte[] bufToCopyWith = new byte[StreamUtil.DefaultCopyBufferSize]; diff --git a/GVFS/GVFS.Common/Git/GitAuthentication.cs b/GVFS/GVFS.Common/Git/GitAuthentication.cs index 27796f0e8..bb81a86c1 100644 --- a/GVFS/GVFS.Common/Git/GitAuthentication.cs +++ b/GVFS/GVFS.Common/Git/GitAuthentication.cs @@ -183,34 +183,98 @@ public bool TryGetCredentials(ITracer tracer, out string credentialString, out s return true; } + /// + /// Initialize authentication by probing the server. Determines whether + /// anonymous access is supported and, if not, fetches credentials. + /// Callers that also need the GVFS config should use + /// instead to avoid a + /// redundant HTTP round-trip. + /// public bool TryInitialize(ITracer tracer, Enlistment enlistment, out string errorMessage) + { + // Delegate to the combined method, discarding the config result. + // This avoids duplicating the anonymous-probe + credential-fetch logic. + return this.TryInitializeAndQueryGVFSConfig( + tracer, + enlistment, + new RetryConfig(), + out _, + out errorMessage); + } + + /// + /// Combines authentication initialization with the GVFS config query, + /// eliminating a redundant HTTP round-trip. The anonymous probe and + /// config query use the same request to /gvfs/config: + /// 1. Config query → /gvfs/config → 200 (anonymous) or 401 + /// 2. If 401: credential fetch, then retry → 200 + /// This saves one HTTP request compared to probing auth separately + /// and then querying config, and reuses the same TCP/TLS connection. + /// + public bool TryInitializeAndQueryGVFSConfig( + ITracer tracer, + Enlistment enlistment, + RetryConfig retryConfig, + out ServerGVFSConfig serverGVFSConfig, + out string errorMessage) { if (this.isInitialized) { throw new InvalidOperationException("Already initialized"); } + serverGVFSConfig = null; errorMessage = null; - bool isAnonymous; - if (!this.TryAnonymousQuery(tracer, enlistment, out isAnonymous)) + using (ConfigHttpRequestor configRequestor = new ConfigHttpRequestor(tracer, enlistment, retryConfig)) { - errorMessage = $"Unable to determine if authentication is required"; - return false; - } + HttpStatusCode? httpStatus; - if (!isAnonymous && - !this.TryCallGitCredential(tracer, out errorMessage)) - { + // First attempt without credentials. If anonymous access works, + // we get the config in a single request. + if (configRequestor.TryQueryGVFSConfig(false, out serverGVFSConfig, out httpStatus, out _)) + { + this.IsAnonymous = true; + this.isInitialized = true; + tracer.RelatedInfo("{0}: Anonymous access succeeded, config obtained in one request", nameof(this.TryInitializeAndQueryGVFSConfig)); + return true; + } + + if (httpStatus != HttpStatusCode.Unauthorized) + { + errorMessage = "Unable to query /gvfs/config"; + tracer.RelatedWarning("{0}: Config query failed with status {1}", nameof(this.TryInitializeAndQueryGVFSConfig), httpStatus?.ToString() ?? "None"); + return false; + } + + // Server requires authentication — fetch credentials + this.IsAnonymous = false; + + if (!this.TryCallGitCredential(tracer, out errorMessage)) + { + tracer.RelatedWarning("{0}: Credential fetch failed: {1}", nameof(this.TryInitializeAndQueryGVFSConfig), errorMessage); + return false; + } + + this.isInitialized = true; + + // Retry with credentials using the same ConfigHttpRequestor (reuses HttpClient/connection) + if (configRequestor.TryQueryGVFSConfig(true, out serverGVFSConfig, out _, out errorMessage)) + { + tracer.RelatedInfo("{0}: Config obtained with credentials", nameof(this.TryInitializeAndQueryGVFSConfig)); + return true; + } + + tracer.RelatedWarning("{0}: Config query failed with credentials: {1}", nameof(this.TryInitializeAndQueryGVFSConfig), errorMessage); return false; } - - this.IsAnonymous = isAnonymous; - this.isInitialized = true; - return true; } - public bool TryInitializeAndRequireAuth(ITracer tracer, out string errorMessage) + /// + /// Test-only initialization that skips the network probe and goes + /// straight to credential fetch. Not for production use. + /// + internal bool TryInitializeAndRequireAuth(ITracer tracer, out string errorMessage) { if (this.isInitialized) { @@ -267,45 +331,6 @@ private static bool TryParseCredentialString(string credentialString, out string return false; } - private bool TryAnonymousQuery(ITracer tracer, Enlistment enlistment, out bool isAnonymous) - { - bool querySucceeded; - using (ITracer anonymousTracer = tracer.StartActivity("AttemptAnonymousAuth", EventLevel.Informational)) - { - HttpStatusCode? httpStatus; - - using (ConfigHttpRequestor configRequestor = new ConfigHttpRequestor(anonymousTracer, enlistment, new RetryConfig())) - { - ServerGVFSConfig gvfsConfig; - const bool LogErrors = false; - if (configRequestor.TryQueryGVFSConfig(LogErrors, out gvfsConfig, out httpStatus, out _)) - { - querySucceeded = true; - isAnonymous = true; - } - else if (httpStatus == HttpStatusCode.Unauthorized) - { - querySucceeded = true; - isAnonymous = false; - } - else - { - querySucceeded = false; - isAnonymous = false; - } - } - - anonymousTracer.Stop(new EventMetadata - { - { "HttpStatus", httpStatus.HasValue ? ((int)httpStatus).ToString() : "None" }, - { "QuerySucceeded", querySucceeded }, - { "IsAnonymous", isAnonymous }, - }); - } - - return querySucceeded; - } - private DateTime GetNextAuthAttemptTime() { if (this.numberOfAttempts <= 1) diff --git a/GVFS/GVFS.Common/Git/GitCoreGVFSFlags.cs b/GVFS/GVFS.Common/Git/GitCoreGVFSFlags.cs new file mode 100644 index 000000000..551be80b2 --- /dev/null +++ b/GVFS/GVFS.Common/Git/GitCoreGVFSFlags.cs @@ -0,0 +1,63 @@ +using System; + +namespace GVFS.Common.Git +{ + [Flags] + public enum GitCoreGVFSFlags + { + // GVFS_SKIP_SHA_ON_INDEX + // Disables the calculation of the sha when writing the index + SkipShaOnIndex = 1 << 0, + + // GVFS_BLOCK_COMMANDS + // Blocks git commands that are not allowed in a GVFS/Scalar repo + BlockCommands = 1 << 1, + + // GVFS_MISSING_OK + // Normally git write-tree ensures that the objects referenced by the + // directory exist in the object database.This option disables this check. + MissingOk = 1 << 2, + + // GVFS_NO_DELETE_OUTSIDE_SPARSECHECKOUT + // When marking entries to remove from the index and the working + // directory this option will take into account what the + // skip-worktree bit was set to so that if the entry has the + // skip-worktree bit set it will not be removed from the working + // directory. This will allow virtualized working directories to + // detect the change to HEAD and use the new commit tree to show + // the files that are in the working directory. + NoDeleteOutsideSparseCheckout = 1 << 3, + + // GVFS_FETCH_SKIP_REACHABILITY_AND_UPLOADPACK + // While performing a fetch with a virtual file system we know + // that there will be missing objects and we don't want to download + // them just because of the reachability of the commits. We also + // don't want to download a pack file with commits, trees, and blobs + // since these will be downloaded on demand. This flag will skip the + // checks on the reachability of objects during a fetch as well as + // the upload pack so that extraneous objects don't get downloaded. + FetchSkipReachabilityAndUploadPack = 1 << 4, + + // 1 << 5 has been deprecated + + // GVFS_BLOCK_FILTERS_AND_EOL_CONVERSIONS + // With a virtual file system we only know the file size before any + // CRLF or smudge/clean filters processing is done on the client. + // To prevent file corruption due to truncation or expansion with + // garbage at the end, these filters must not run when the file + // is first accessed and brought down to the client. Git.exe can't + // currently tell the first access vs subsequent accesses so this + // flag just blocks them from occurring at all. + BlockFiltersAndEolConversions = 1 << 6, + + // GVFS_PREFETCH_DURING_FETCH + // While performing a `git fetch` command, use the gvfs-helper to + // perform a "prefetch" of commits and trees. + PrefetchDuringFetch = 1 << 7, + + // GVFS_SUPPORTS_WORKTREES + // Signals that this GVFS version supports git worktrees, + // allowing `git worktree add/remove` on VFS-enabled repos. + SupportsWorktrees = 1 << 8, + } +} diff --git a/GVFS/GVFS.Common/Git/GitObjects.cs b/GVFS/GVFS.Common/Git/GitObjects.cs index 6807494df..a9b0f2851 100644 --- a/GVFS/GVFS.Common/Git/GitObjects.cs +++ b/GVFS/GVFS.Common/Git/GitObjects.cs @@ -153,7 +153,7 @@ public virtual void DeleteTemporaryFiles() } } - public virtual bool TryDownloadPrefetchPacks(GitProcess gitProcess, long latestTimestamp, out List packIndexes) + public virtual bool TryDownloadPrefetchPacks(GitProcess gitProcess, long latestTimestamp, bool trustPackIndexes, out List packIndexes) { EventMetadata metadata = CreateEventMetadata(); metadata.Add("latestTimestamp", latestTimestamp); @@ -166,7 +166,6 @@ public virtual bool TryDownloadPrefetchPacks(GitProcess gitProcess, long latestT * pack file and an index file that do not match. * Eventually we will make this the default, but it has a high performance cost for the first prefetch after * cloning a large repository, so it must be explicitly enabled for now. */ - bool trustPackIndexes = this.Enlistment.GetTrustPackIndexesConfig(); metadata.Add("trustPackIndexes", trustPackIndexes); long requestId = HttpRequestor.GetNewRequestId(); @@ -211,6 +210,7 @@ public virtual bool TryDownloadPrefetchPacks(GitProcess gitProcess, long latestT { "Success", result.Succeeded }, { "Attempts", result.Attempts }, { "BytesDownloaded", bytesDownloaded }, + { "LatestPrefetchPackTimestamp", latestTimestamp }, }); return result.Succeeded; diff --git a/GVFS/GVFS.Common/Git/GitProcess.cs b/GVFS/GVFS.Common/Git/GitProcess.cs index a86b6131a..a6ac3c748 100644 --- a/GVFS/GVFS.Common/Git/GitProcess.cs +++ b/GVFS/GVFS.Common/Git/GitProcess.cs @@ -509,6 +509,138 @@ public Result StatusPorcelain() return this.InvokeGitInWorkingDirectoryRoot(command, useReadObjectHook: false); } + /// + /// Returns staged file changes (index vs HEAD) as null-separated pairs of + /// status and path: "A\0path1\0M\0path2\0D\0path3\0". + /// Status codes: A=added, M=modified, D=deleted, R=renamed, C=copied. + /// + /// Inline pathspecs to scope the diff, or null for all. + /// + /// Path to a file containing additional pathspecs (one per line), forwarded + /// as --pathspec-from-file to git. Null if not used. + /// + /// + /// When true and pathspecFromFile is set, pathspec entries in the file are + /// separated by NUL instead of newline (--pathspec-file-nul). + /// + public Result DiffCachedNameStatus(string[] pathspecs = null, string pathspecFromFile = null, bool pathspecFileNul = false) + { + string command = "diff --cached --name-status -z --no-renames"; + + if (pathspecFromFile != null) + { + command += " --pathspec-from-file=" + QuoteGitPath(pathspecFromFile); + if (pathspecFileNul) + { + command += " --pathspec-file-nul"; + } + } + + if (pathspecs != null && pathspecs.Length > 0) + { + command += " -- " + string.Join(" ", pathspecs.Select(p => QuoteGitPath(p))); + } + + return this.InvokeGitInWorkingDirectoryRoot(command, useReadObjectHook: false); + } + + /// + /// Writes the staged (index) version of the specified files to the working + /// tree with correct line endings and attributes. Batches multiple paths into + /// a single git process invocation where possible, respecting the Windows + /// command line length limit. + /// + public List CheckoutIndexForFiles(IEnumerable paths) + { + // Windows command line limit is 32,767 characters. Leave headroom for + // the base command and other arguments. + const int MaxCommandLength = 30000; + const string BaseCommand = "-c core.hookspath= checkout-index --force --"; + + List results = new List(); + StringBuilder command = new StringBuilder(BaseCommand); + foreach (string path in paths) + { + string quotedPath = " " + QuoteGitPath(path); + + if (command.Length + quotedPath.Length > MaxCommandLength && command.Length > BaseCommand.Length) + { + // Flush current batch + results.Add(this.InvokeGitInWorkingDirectoryRoot(command.ToString(), useReadObjectHook: false)); + command.Clear(); + command.Append(BaseCommand); + } + + command.Append(quotedPath); + } + + // Flush remaining paths + if (command.Length > BaseCommand.Length) + { + results.Add(this.InvokeGitInWorkingDirectoryRoot(command.ToString(), useReadObjectHook: false)); + } + + return results; + } + + /// + /// Wraps a path in double quotes for use as a git command argument, + /// escaping any embedded double quotes and any backslashes that + /// immediately precede a double quote (to prevent them from being + /// interpreted as escape characters by the Windows C runtime argument + /// parser). Lone backslashes used as path separators are left as-is. + /// + public static string QuoteGitPath(string path) + { + StringBuilder sb = new StringBuilder(path.Length + 4); + sb.Append('"'); + + for (int i = 0; i < path.Length; i++) + { + if (path[i] == '"') + { + sb.Append('\\'); + sb.Append('"'); + } + else if (path[i] == '\\') + { + // Count consecutive backslashes + int backslashCount = 0; + while (i < path.Length && path[i] == '\\') + { + backslashCount++; + i++; + } + + if (i < path.Length && path[i] == '"') + { + // Backslashes before a quote: double them all, then escape the quote + sb.Append('\\', backslashCount * 2); + sb.Append('\\'); + sb.Append('"'); + } + else if (i == path.Length) + { + // Backslashes at end of string (before closing quote): double them + sb.Append('\\', backslashCount * 2); + } + else + { + // Backslashes not before a quote: keep as-is (path separators) + sb.Append('\\', backslashCount); + i--; // Re-process current non-backslash char + } + } + else + { + sb.Append(path[i]); + } + } + + sb.Append('"'); + return sb.ToString(); + } + public Result SerializeStatus(bool allowObjectDownloads, string serializePath) { // specify ignored=matching and --untracked-files=complete @@ -682,11 +814,6 @@ public Result MultiPackIndexRepack(string gitObjectDirectory, string batchSize) return this.InvokeGitAgainstDotGitFolder($"-c pack.threads=1 -c repack.packKeptObjects=true multi-pack-index repack --object-dir=\"{gitObjectDirectory}\" --batch-size={batchSize} --no-progress"); } - public Result GetHeadTreeId() - { - return this.InvokeGitAgainstDotGitFolder("rev-parse \"HEAD^{tree}\"", usePreCommandHook: false); - } - public Process GetGitProcess(string command, string workingDirectory, string dotGitDirectory, bool useReadObjectHook, bool redirectStandardError, string gitObjectsDirectory, bool usePreCommandHook) { ProcessStartInfo processInfo = new ProcessStartInfo(this.gitBinPath); diff --git a/GVFS/GVFS.Common/Git/GitRepo.cs b/GVFS/GVFS.Common/Git/GitRepo.cs index cd11436d8..b2b3ad7b3 100644 --- a/GVFS/GVFS.Common/Git/GitRepo.cs +++ b/GVFS/GVFS.Common/Git/GitRepo.cs @@ -51,6 +51,11 @@ public GVFSLock GVFSLock private set; } + internal LibGit2RepoInvoker LibGit2RepoInvoker + { + get { return this.libgit2RepoInvoker; } + } + public void CloseActiveRepo() { this.libgit2RepoInvoker?.DisposeSharedRepo(); diff --git a/GVFS/GVFS.Common/Git/LibGit2Exception.cs b/GVFS/GVFS.Common/Git/LibGit2Exception.cs new file mode 100644 index 000000000..27034e513 --- /dev/null +++ b/GVFS/GVFS.Common/Git/LibGit2Exception.cs @@ -0,0 +1,19 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace GVFS.Common.Git +{ + public class LibGit2Exception : Exception + { + public LibGit2Exception(string message) : base(message) + { + } + + public LibGit2Exception(string message, Exception innerException) : base(message, innerException) + { + } + } +} diff --git a/GVFS/GVFS.Common/Git/LibGit2Repo.cs b/GVFS/GVFS.Common/Git/LibGit2Repo.cs index f9edcce64..60e69450d 100644 --- a/GVFS/GVFS.Common/Git/LibGit2Repo.cs +++ b/GVFS/GVFS.Common/Git/LibGit2Repo.cs @@ -10,21 +10,27 @@ public class LibGit2Repo : IDisposable { private bool disposedValue = false; + public delegate void MultiVarConfigCallback(string value); + public LibGit2Repo(ITracer tracer, string repoPath) { this.Tracer = tracer; - Native.Init(); + InitNative(); IntPtr repoHandle; - if (Native.Repo.Open(out repoHandle, repoPath) != Native.SuccessCode) + if (TryOpenRepo(repoPath, out repoHandle) != Native.ResultCode.Success) { - string reason = Native.GetLastError(); + string reason = GetLastNativeError(); string message = "Couldn't open repo at " + repoPath + ": " + reason; tracer.RelatedWarning(message); - Native.Shutdown(); - throw new InvalidDataException(message); + if (!reason.EndsWith(" is not owned by current user") + || !CheckSafeDirectoryConfigForCaseSensitivityIssue(tracer, repoPath, out repoHandle)) + { + ShutdownNative(); + throw new InvalidDataException(message); + } } this.RepoHandle = repoHandle; @@ -32,6 +38,7 @@ public LibGit2Repo(ITracer tracer, string repoPath) protected LibGit2Repo() { + this.Tracer = NullTracer.Instance; } ~LibGit2Repo() @@ -45,7 +52,7 @@ protected LibGit2Repo() public Native.ObjectTypes? GetObjectType(string sha) { IntPtr objHandle; - if (Native.RevParseSingle(out objHandle, this.RepoHandle, sha) != Native.SuccessCode) + if (Native.RevParseSingle(out objHandle, this.RepoHandle, sha) != Native.ResultCode.Success) { return null; } @@ -63,7 +70,7 @@ protected LibGit2Repo() public virtual string GetTreeSha(string commitish) { IntPtr objHandle; - if (Native.RevParseSingle(out objHandle, this.RepoHandle, commitish) != Native.SuccessCode) + if (Native.RevParseSingle(out objHandle, this.RepoHandle, commitish) != Native.ResultCode.Success) { return null; } @@ -99,7 +106,7 @@ public virtual bool CommitAndRootTreeExists(string commitish, out string treeSha public virtual bool ObjectExists(string sha) { IntPtr objHandle; - if (Native.RevParseSingle(out objHandle, this.RepoHandle, sha) != Native.SuccessCode) + if (Native.RevParseSingle(out objHandle, this.RepoHandle, sha) != Native.ResultCode.Success) { return false; } @@ -111,7 +118,7 @@ public virtual bool ObjectExists(string sha) public virtual bool TryCopyBlob(string sha, Action writeAction) { IntPtr objHandle; - if (Native.RevParseSingle(out objHandle, this.RepoHandle, sha) != Native.SuccessCode) + if (Native.RevParseSingle(out objHandle, this.RepoHandle, sha) != Native.ResultCode.Success) { return false; } @@ -157,7 +164,7 @@ public virtual string[] GetMissingSubTrees(string treeSha) { List missingSubtreesList = new List(); IntPtr treeHandle; - if (Native.RevParseSingle(out treeHandle, this.RepoHandle, treeSha) != Native.SuccessCode + if (Native.RevParseSingle(out treeHandle, this.RepoHandle, treeSha) != Native.ResultCode.Success || treeHandle == IntPtr.Zero) { return Array.Empty(); @@ -187,6 +194,125 @@ public virtual string[] GetMissingSubTrees(string treeSha) return missingSubtreesList.ToArray(); } + /// + /// Get a config value from the repo's git config. + /// + /// Name of the config entry + /// The config value, or null if not found. + public virtual string GetConfigString(string name) + { + IntPtr configHandle; + if (Native.Config.GetConfig(out configHandle, this.RepoHandle) != Native.ResultCode.Success) + { + throw new LibGit2Exception($"Failed to get config handle: {Native.GetLastError()}"); + } + try + { + string value; + Native.ResultCode resultCode = Native.Config.GetString(out value, configHandle, name); + if (resultCode == Native.ResultCode.NotFound) + { + return null; + } + else if (resultCode != Native.ResultCode.Success) + { + throw new LibGit2Exception($"Failed to get config value for '{name}': {Native.GetLastError()}"); + } + + return value; + } + finally + { + Native.Config.Free(configHandle); + } + } + + public virtual bool? GetConfigBool(string name) + { + IntPtr configHandle; + if (Native.Config.GetConfig(out configHandle, this.RepoHandle) != Native.ResultCode.Success) + { + throw new LibGit2Exception($"Failed to get config handle: {Native.GetLastError()}"); + } + try + { + bool value; + Native.ResultCode resultCode = Native.Config.GetBool(out value, configHandle, name); + if (resultCode == Native.ResultCode.NotFound) + { + return null; + } + else if (resultCode != Native.ResultCode.Success) + { + throw new LibGit2Exception($"Failed to get config value for '{name}': {Native.GetLastError()}"); + } + + return value; + } + finally + { + Native.Config.Free(configHandle); + } + } + + public void ForEachMultiVarConfig(string key, MultiVarConfigCallback callback) + { + if (Native.Config.GetConfig(out IntPtr configHandle, this.RepoHandle) != Native.ResultCode.Success) + { + throw new LibGit2Exception($"Failed to get config handle: {Native.GetLastError()}"); + } + try + { + ForEachMultiVarConfig(configHandle, key, callback); + } + finally + { + Native.Config.Free(configHandle); + } + } + + public static void ForEachMultiVarConfigInGlobalAndSystemConfig(string key, MultiVarConfigCallback callback) + { + if (Native.Config.GetGlobalAndSystemConfig(out IntPtr configHandle) != Native.ResultCode.Success) + { + throw new LibGit2Exception($"Failed to get global and system config handle: {Native.GetLastError()}"); + } + try + { + ForEachMultiVarConfig(configHandle, key, callback); + } + finally + { + Native.Config.Free(configHandle); + } + } + + private static void ForEachMultiVarConfig(IntPtr configHandle, string key, MultiVarConfigCallback callback) + { + Native.Config.GitConfigMultivarCallback nativeCallback = (entryPtr, payload) => + { + try + { + var entry = Marshal.PtrToStructure(entryPtr); + callback(entry.GetValue()); + } + catch (Exception) + { + return Native.ResultCode.Failure; + } + return 0; + }; + if (Native.Config.GetMultivarForeach( + configHandle, + key, + regex:"", + nativeCallback, + IntPtr.Zero) != Native.ResultCode.Success) + { + throw new LibGit2Exception($"Failed to get multivar config for '{key}': {Native.GetLastError()}"); + } + } + /// /// Determine if the given index of a tree is a subtree and if it is missing. /// If it is a missing subtree, return the SHA of the subtree. @@ -240,9 +366,88 @@ protected virtual void Dispose(bool disposing) } } + /// + /// Normalize a path for case-insensitive safe.directory comparison: + /// replace backslashes with forward slashes, convert to upper-case, + /// and trim trailing slashes. + /// + internal static string NormalizePathForSafeDirectoryComparison(string path) + { + if (string.IsNullOrEmpty(path)) + { + return path; + } + + string normalized = path.Replace('\\', '/').ToUpperInvariant(); + return normalized.TrimEnd('/'); + } + + /// + /// Retrieve all configured safe.directory values from global and system git config. + /// Virtual so tests can provide fake entries without touching real config. + /// + protected virtual void GetSafeDirectoryConfigEntries(MultiVarConfigCallback callback) + { + ForEachMultiVarConfigInGlobalAndSystemConfig("safe.directory", callback); + } + + /// + /// Try to open a repository at the given path. Virtual so tests can + /// avoid the native P/Invoke call. + /// + protected virtual Native.ResultCode TryOpenRepo(string path, out IntPtr repoHandle) + { + return Native.Repo.Open(out repoHandle, path); + } + + protected virtual void InitNative() + { + Native.Init(); + } + + protected virtual void ShutdownNative() + { + Native.Shutdown(); + } + + protected virtual string GetLastNativeError() + { + return Native.GetLastError(); + } + + protected bool CheckSafeDirectoryConfigForCaseSensitivityIssue(ITracer tracer, string repoPath, out IntPtr repoHandle) + { + /* Libgit2 has a bug where it is case sensitive for safe.directory (especially the + * drive letter) when git.exe isn't. Until a fix can be made and propagated, work + * around it by matching the repo path we request to the configured safe directory. + * + * See https://github.com/libgit2/libgit2/issues/7037 + */ + repoHandle = IntPtr.Zero; + + string normalizedRequestedPath = NormalizePathForSafeDirectoryComparison(repoPath); + + string configuredMatchingDirectory = null; + GetSafeDirectoryConfigEntries((string value) => + { + string normalizedConfiguredPath = NormalizePathForSafeDirectoryComparison(value); + if (normalizedConfiguredPath == normalizedRequestedPath) + { + configuredMatchingDirectory = value; + } + }); + + return configuredMatchingDirectory != null && TryOpenRepo(configuredMatchingDirectory, out repoHandle) == Native.ResultCode.Success; + } + public static class Native { - public const uint SuccessCode = 0; + public enum ResultCode : int + { + Success = 0, + Failure = -1, + NotFound = -3, + } public const string Git2NativeLibName = GVFSConstants.LibGit2LibraryName; @@ -265,7 +470,7 @@ public static GitOid IntPtrToGitOid(IntPtr oidPtr) public static extern int Shutdown(); [DllImport(Git2NativeLibName, EntryPoint = "git_revparse_single")] - public static extern uint RevParseSingle(out IntPtr objectHandle, IntPtr repoHandle, string oid); + public static extern ResultCode RevParseSingle(out IntPtr objectHandle, IntPtr repoHandle, string oid); public static string GetLastError() { @@ -293,12 +498,82 @@ private struct GitError public static class Repo { [DllImport(Git2NativeLibName, EntryPoint = "git_repository_open")] - public static extern uint Open(out IntPtr repoHandle, string path); + public static extern ResultCode Open(out IntPtr repoHandle, string path); [DllImport(Git2NativeLibName, EntryPoint = "git_repository_free")] public static extern void Free(IntPtr repoHandle); } + public static class Config + { + [DllImport(Git2NativeLibName, EntryPoint = "git_repository_config")] + public static extern ResultCode GetConfig(out IntPtr configHandle, IntPtr repoHandle); + + [DllImport(Git2NativeLibName, EntryPoint = "git_config_open_default")] + public static extern ResultCode GetGlobalAndSystemConfig(out IntPtr configHandle); + + [DllImport(Git2NativeLibName, EntryPoint = "git_config_get_string")] + public static extern ResultCode GetString(out string value, IntPtr configHandle, string name); + + [DllImport(Git2NativeLibName, EntryPoint = "git_config_get_multivar_foreach")] + public static extern ResultCode GetMultivarForeach( + IntPtr configHandle, + string name, + string regex, + GitConfigMultivarCallback callback, + IntPtr payload); + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + public delegate ResultCode GitConfigMultivarCallback( + IntPtr entryPtr, + IntPtr payload); + + [StructLayout(LayoutKind.Sequential)] + public struct GitConfigEntry + { + public IntPtr Name; + public IntPtr Value; + public IntPtr BackendType; + public IntPtr OriginPath; + public uint IncludeDepth; + public int Level; + + public string GetValue() + { + return Value != IntPtr.Zero ? MarshalUtf8String(Value) : null; + } + + public string GetName() + { + return Name != IntPtr.Zero ? MarshalUtf8String(Name) : null; + } + + private static string MarshalUtf8String(IntPtr ptr) + { + if (ptr == IntPtr.Zero) + { + return null; + } + + int length = 0; + while (Marshal.ReadByte(ptr, length) != 0) + { + length++; + } + + byte[] buffer = new byte[length]; + Marshal.Copy(ptr, buffer, 0, length); + return System.Text.Encoding.UTF8.GetString(buffer); + } + } + + [DllImport(Git2NativeLibName, EntryPoint = "git_config_get_bool")] + public static extern ResultCode GetBool(out bool value, IntPtr configHandle, string name); + + [DllImport(Git2NativeLibName, EntryPoint = "git_config_free")] + public static extern void Free(IntPtr configHandle); + } + public static class Object { [DllImport(Git2NativeLibName, EntryPoint = "git_object_type")] diff --git a/GVFS/GVFS.Common/Git/LibGit2RepoInvoker.cs b/GVFS/GVFS.Common/Git/LibGit2RepoInvoker.cs index 8d3ec2e06..44b084049 100644 --- a/GVFS/GVFS.Common/Git/LibGit2RepoInvoker.cs +++ b/GVFS/GVFS.Common/Git/LibGit2RepoInvoker.cs @@ -13,6 +13,11 @@ public class LibGit2RepoInvoker : IDisposable private volatile int activeCallers; private LibGit2Repo sharedRepo; + public LibGit2RepoInvoker(ITracer tracer, string repoPath) + : this(tracer, () => new LibGit2Repo(tracer, repoPath)) + { + } + public LibGit2RepoInvoker(ITracer tracer, Func createRepo) { this.tracer = tracer; @@ -82,6 +87,17 @@ public void InitializeSharedRepo() this.GetSharedRepo()?.ObjectExists("30380be3963a75e4a34e10726795d644659e1129"); } + public bool GetConfigBoolOrDefault(string key, bool defaultValue) + { + bool? value = defaultValue; + if (this.TryInvoke(repo => repo.GetConfigBool(key), out value)) + { + return value ?? defaultValue; + } + + return defaultValue; + } + private LibGit2Repo GetSharedRepo() { lock (this.sharedRepoLock) diff --git a/GVFS/GVFS.Common/Git/RequiredGitConfig.cs b/GVFS/GVFS.Common/Git/RequiredGitConfig.cs new file mode 100644 index 000000000..2220a795b --- /dev/null +++ b/GVFS/GVFS.Common/Git/RequiredGitConfig.cs @@ -0,0 +1,184 @@ +using System; +using System.Collections.Generic; +using System.IO; + +namespace GVFS.Common.Git +{ + /// + /// Single source of truth for the git config settings required by GVFS. + /// These settings are enforced during clone, mount, and repair. + /// + public static class RequiredGitConfig + { + /// + /// Returns the dictionary of required git config settings for a GVFS enlistment. + /// These settings override any existing local configuration values. + /// + public static Dictionary GetRequiredSettings(Enlistment enlistment) + { + string expectedHooksPath = Path.Combine(enlistment.DotGitRoot, GVFSConstants.DotGit.Hooks.RootName); + expectedHooksPath = Paths.ConvertPathToGitFormat(expectedHooksPath); + + // Single-quote the path: git executes core.virtualfilesystem via the + // shell (use_shell=1 in virtualfilesystem.c), so spaces in an absolute + // path would split the command. Git's config parser strips double quotes + // but preserves single quotes, and bash treats single-quoted strings as + // a single token. + string virtualFileSystemPath = "'" + Paths.ConvertPathToGitFormat( + Path.Combine(enlistment.DotGitRoot, GVFSConstants.DotGit.Hooks.RootName, GVFSConstants.DotGit.Hooks.VirtualFileSystemName)) + "'"; + + string gitStatusCachePath = null; + if (!GVFSEnlistment.IsUnattended(tracer: null) && GVFSPlatform.Instance.IsGitStatusCacheSupported()) + { + gitStatusCachePath = Path.Combine( + enlistment.EnlistmentRoot, + GVFSPlatform.Instance.Constants.DotGVFSRoot, + GVFSConstants.DotGVFS.GitStatusCache.CachePath); + + gitStatusCachePath = Paths.ConvertPathToGitFormat(gitStatusCachePath); + } + + string coreGVFSFlags = Convert.ToInt32( + GitCoreGVFSFlags.SkipShaOnIndex | + GitCoreGVFSFlags.BlockCommands | + GitCoreGVFSFlags.MissingOk | + GitCoreGVFSFlags.NoDeleteOutsideSparseCheckout | + GitCoreGVFSFlags.FetchSkipReachabilityAndUploadPack | + GitCoreGVFSFlags.BlockFiltersAndEolConversions | + GitCoreGVFSFlags.SupportsWorktrees) + .ToString(); + + return new Dictionary + { + // When running 'git am' it will remove the CRs from the patch file by default. This causes the patch to fail to apply because the + // file that is getting the patch applied will still have the CRs. There is a --keep-cr option that you can pass the 'git am' command + // but since we always want to keep CRs it is better to just set the config setting to always keep them so the user doesn't have to + // remember to pass the flag. + { "am.keepcr", "true" }, + + // Update git settings to enable optimizations in git 2.20 + // Set 'checkout.optimizeNewBranch=true' to enable optimized 'checkout -b' + { "checkout.optimizenewbranch", "true" }, + + // Enable parallel checkout by auto-detecting the number of workers based on CPU count. + { "checkout.workers", "0" }, + + // We don't support line ending conversions - automatic conversion of LF to Crlf by git would cause un-necessary hydration. Disabling it. + { "core.autocrlf", "false" }, + + // Enable commit graph. https://devblogs.microsoft.com/devops/supercharging-the-git-commit-graph/ + { "core.commitGraph", "true" }, + + // Perf - Git for Windows uses this to bulk-read and cache lstat data of entire directories (instead of doing lstat file by file). + { "core.fscache", "true" }, + + // Turns on all special gvfs logic. https://github.com/microsoft/git/blob/be5e0bb969495c428e219091e6976b52fb33b301/gvfs.h + { "core.gvfs", coreGVFSFlags }, + + // Use 'multi-pack-index' builtin instead of 'midx' to match upstream implementation + { "core.multiPackIndex", "true" }, + + // Perf - Enable parallel index preload for operations like git diff + { "core.preloadIndex", "true" }, + + // VFS4G never wants git to adjust line endings (causes un-necessary hydration of files)- explicitly setting core.safecrlf to false. + { "core.safecrlf", "false" }, + + // Possibly cause hydration while creating untrackedCache. + { "core.untrackedCache", "false" }, + + // This is to match what git init does. + { "core.repositoryformatversion", "0" }, + + // Turn on support for file modes on Mac & Linux. + { "core.filemode", GVFSPlatform.Instance.FileSystem.SupportsFileMode ? "true" : "false" }, + + // For consistency with git init. + { "core.bare", "false" }, + + // For consistency with git init. + { "core.logallrefupdates", "true" }, + + // Git to download objects on demand. + { GitConfigSetting.CoreVirtualizeObjectsName, "true" }, + + // Configure hook that git calls to get the paths git needs to consider for changes or untracked files + { GitConfigSetting.CoreVirtualFileSystemName, virtualFileSystemPath }, + + // Ensure hooks path is configured correctly. + { "core.hookspath", expectedHooksPath }, + + // Hostname is no longer sufficent for VSTS authentication. VSTS now requires dev.azure.com/account to determine the tenant. + // By setting useHttpPath, credential managers will get the path which contains the account as the first parameter. They can then use this information for auth appropriately. + { GitConfigSetting.CredentialUseHttpPath, "true" }, + + // Turn off credential validation(https://github.com/microsoft/Git-Credential-Manager-for-Windows/blob/master/Docs/Configuration.md#validate). + // We already have logic to call git credential if we get back a 401, so there's no need to validate the PAT each time we ask for it. + { "credential.validate", "false" }, + + // This setting is not needed anymore, because current version of gvfs does not use index.lock. + // (This change was introduced initially to prevent `git diff` from acquiring index.lock file.) + // Explicitly setting this to true (which also is the default value) because the repo could have been + // cloned in the past when autoRefreshIndex used to be set to false. + { "diff.autoRefreshIndex", "true" }, + + // In Git 2.24.0, some new config settings were created. Disable them locally in VFS for Git repos in case a user has set them globally. + // https://github.com/microsoft/VFSForGit/pull/1594 + // This applies to feature.manyFiles, feature.experimental and fetch.writeCommitGraph settings. + { "feature.manyFiles", "false" }, + { "feature.experimental", "false" }, + { "fetch.writeCommitGraph", "false" }, + + // Turn off of git garbage collection. Git garbage collection does not work with virtualized object. + // We do run maintenance jobs now that do the packing of loose objects so in theory we shouldn't need + // this - but it is not hurting anything and it will prevent a gc from getting kicked off if for some + // reason the maintenance jobs have not been running and there are too many loose objects + { "gc.auto", "0" }, + + // Prevent git GUI from displaying GC warnings. + { "gui.gcwarning", "false" }, + + // Update git settings to enable optimizations in git 2.20 + // Set 'index.threads=true' to enable multi-threaded index reads + { "index.threads", "true" }, + + // index parsing code in VFSForGit currently only supports version 4. + { "index.version", "4" }, + + // Perf - avoid un-necessary blob downloads during a merge. + { "merge.stat", "false" }, + + // Perf - avoid un-necessary blob downloads while git tries to search and find renamed files. + { "merge.renames", "false" }, + + // Don't use bitmaps to determine pack file contents, because we use MIDX for this. + { "pack.useBitmaps", "false" }, + + // Update Git to include sparse push algorithm + { "pack.useSparse", "true" }, + + // Stop automatic git GC + { "receive.autogc", "false" }, + + // Update git settings to enable optimizations in git 2.20 + // Set 'reset.quiet=true' to speed up 'git reset " + { "reset.quiet", "true" }, + + // Configure git to use our serialize status file - make git use the serialized status file rather than compute the status by + // parsing the index file and going through the files to determine changes. + { "status.deserializePath", gitStatusCachePath }, + + // The GVFS Protocol forbids submodules, so prevent a user's + // global config of "status.submoduleSummary=true" from causing + // extreme slowness in "git status" + { "status.submoduleSummary", "false" }, + + // Generation number v2 isn't ready for full use. Wait for v3. + { "commitGraph.generationVersion", "1" }, + + // Disable the builtin FS Monitor in case it was enabled globally. + { "core.useBuiltinFSMonitor", "false" }, + }; + } + } +} diff --git a/GVFS/GVFS.Common/GitStatusCache.cs b/GVFS/GVFS.Common/GitStatusCache.cs index fd4ef90b9..efe13a49d 100644 --- a/GVFS/GVFS.Common/GitStatusCache.cs +++ b/GVFS/GVFS.Common/GitStatusCache.cs @@ -47,6 +47,12 @@ public class GitStatusCache : IDisposable private bool isStopping; private bool isInitialized; private StatusStatistics statistics; + private CancellationTokenSource shutdownTokenSource; + private Task activeHydrationTask; + + private volatile EnlistmentHydrationSummary cachedHydrationSummary; + + private Func projectedFolderCountProvider; private volatile CacheState cacheState = CacheState.Dirty; @@ -65,10 +71,28 @@ public GitStatusCache(GVFSContext context, TimeSpan backoffTime) this.backoffTime = backoffTime; this.serializedGitStatusFilePath = this.context.Enlistment.GitStatusCachePath; this.statistics = new StatusStatistics(); + this.shutdownTokenSource = new CancellationTokenSource(); this.wakeUpThread = new AutoResetEvent(false); } + /// + /// Sets the provider used to get the total projected folder count for hydration + /// summary computation. Must be called before for + /// hydration summary to function. + /// + /// + /// This is set post-construction because of a circular dependency: + /// InProcessMount creates GitStatusCache before FileSystemCallbacks, + /// but the provider requires GitIndexProjection, which is created + /// inside FileSystemCallbacks. FileSystemCallbacks calls this method + /// after GitIndexProjection is available. + /// + public void SetProjectedFolderCountProvider(Func provider) + { + this.projectedFolderCountProvider = provider; + } + public virtual void Initialize() { this.isInitialized = true; @@ -79,6 +103,7 @@ public virtual void Initialize() public virtual void Shutdown() { this.isStopping = true; + this.shutdownTokenSource.Cancel(); if (this.isInitialized && this.updateStatusCacheThread != null) { @@ -113,6 +138,15 @@ public void RefreshAndWait() this.RebuildStatusCacheIfNeeded(ignoreBackoff: true); } + /// + /// Returns the cached hydration summary if one has been computed, + /// or null if no valid summary is available yet. + /// + public EnlistmentHydrationSummary GetCachedHydrationSummary() + { + return this.cachedHydrationSummary; + } + /// /// The GitStatusCache gets a chance to approve / deny requests for a /// command to take the GVFS lock. The GitStatusCache will only block @@ -177,6 +211,26 @@ public virtual void Dispose() { this.Shutdown(); + // Wait for the hydration task to complete before disposing the + // token source it may still be using. + Task hydrationTask = Interlocked.Exchange(ref this.activeHydrationTask, null); + if (hydrationTask != null) + { + try + { + hydrationTask.Wait(TimeSpan.FromSeconds(5)); + } + catch (AggregateException) + { + } + } + + if (this.shutdownTokenSource != null) + { + this.shutdownTokenSource.Dispose(); + this.shutdownTokenSource = null; + } + if (this.wakeUpThread != null) { this.wakeUpThread.Dispose(); @@ -317,10 +371,30 @@ private void RebuildStatusCacheIfNeeded(bool ignoreBackoff) if (needToRebuild) { this.statistics.RecordBackgroundStatusScanRun(); - this.UpdateHydrationSummary(); + + // Run hydration summary in parallel with git status — they are independent + // operations and neither should delay the other. + Task hydrationTask = Task.Run(() => this.UpdateHydrationSummary()); + Interlocked.Exchange(ref this.activeHydrationTask, hydrationTask); bool rebuildStatusCacheSucceeded = this.TryRebuildStatusCache(); + // Wait for hydration to complete before logging final stats. + // Exceptions are observed here to avoid unobserved task exceptions. + try + { + hydrationTask.Wait(); + } + catch (AggregateException ex) + { + EventMetadata errorMetadata = new EventMetadata(); + errorMetadata.Add("Area", EtwArea); + errorMetadata.Add("Exception", ex.InnerException?.ToString()); + this.context.Tracer.RelatedError( + errorMetadata, + $"{nameof(GitStatusCache)}.{nameof(RebuildStatusCacheIfNeeded)}: Unhandled exception in hydration summary task."); + } + TimeSpan delayedTime = startTime - this.initialDelayTime; TimeSpan statusRunTime = DateTime.UtcNow - startTime; @@ -341,12 +415,27 @@ private void RebuildStatusCacheIfNeeded(bool ignoreBackoff) private void UpdateHydrationSummary() { - bool enabled = TEST_EnableHydrationSummaryOverride ?? this.context.Enlistment.GetStatusHydrationConfig(); + if (this.projectedFolderCountProvider == null) + { + return; + } + + bool enabled = TEST_EnableHydrationSummaryOverride + ?? this.context.Repository.LibGit2RepoInvoker.GetConfigBoolOrDefault(GVFSConstants.GitConfig.ShowHydrationStatus, GVFSConstants.GitConfig.ShowHydrationStatusDefault); if (!enabled) { return; } + HydrationStatusCircuitBreaker circuitBreaker = new HydrationStatusCircuitBreaker( + this.context.Enlistment.DotGVFSRoot, + this.context.Tracer); + + if (circuitBreaker.IsDisabled()) + { + return; + } + try { /* While not strictly part of git status, enlistment hydration summary is used @@ -355,11 +444,13 @@ private void UpdateHydrationSummary() * and this is also a convenient place to log telemetry for it. */ EnlistmentHydrationSummary hydrationSummary = - EnlistmentHydrationSummary.CreateSummary(this.context.Enlistment, this.context.FileSystem); + EnlistmentHydrationSummary.CreateSummary(this.context.Enlistment, this.context.FileSystem, this.context.Tracer, this.projectedFolderCountProvider, this.shutdownTokenSource.Token); EventMetadata metadata = new EventMetadata(); metadata.Add("Area", EtwArea); if (hydrationSummary.IsValid) { + this.cachedHydrationSummary = hydrationSummary; + metadata[nameof(hydrationSummary.TotalFolderCount)] = hydrationSummary.TotalFolderCount; metadata[nameof(hydrationSummary.TotalFileCount)] = hydrationSummary.TotalFileCount; metadata[nameof(hydrationSummary.HydratedFolderCount)] = hydrationSummary.HydratedFolderCount; @@ -371,17 +462,28 @@ private void UpdateHydrationSummary() metadata, Keywords.Telemetry); } - else + else if (hydrationSummary.Error != null) { - metadata["Exception"] = hydrationSummary.Error?.ToString(); + this.cachedHydrationSummary = null; + circuitBreaker.RecordFailure(); + metadata["Exception"] = hydrationSummary.Error.ToString(); this.context.Tracer.RelatedWarning( metadata, $"{nameof(GitStatusCache)}{nameof(RebuildStatusCacheIfNeeded)}: hydration summary could not be calculated.", Keywords.Telemetry); } + else + { + // Invalid summary with no error — likely cancelled during shutdown + this.cachedHydrationSummary = null; + this.context.Tracer.RelatedInfo( + $"{nameof(GitStatusCache)}{nameof(RebuildStatusCacheIfNeeded)}: hydration summary was cancelled."); + } } catch (Exception ex) { + this.cachedHydrationSummary = null; + circuitBreaker.RecordFailure(); EventMetadata metadata = new EventMetadata(); metadata.Add("Area", EtwArea); metadata.Add("Exception", ex.ToString()); diff --git a/GVFS/GVFS.Common/HealthCalculator/EnlistmentHydrationSummary.cs b/GVFS/GVFS.Common/HealthCalculator/EnlistmentHydrationSummary.cs index 600ba91c5..a2f83afd4 100644 --- a/GVFS/GVFS.Common/HealthCalculator/EnlistmentHydrationSummary.cs +++ b/GVFS/GVFS.Common/HealthCalculator/EnlistmentHydrationSummary.cs @@ -1,26 +1,34 @@ using GVFS.Common.FileSystem; -using GVFS.Common.Git; +using GVFS.Common.Tracing; using System; +using System.Diagnostics; using System.IO; -using System.Linq; +using System.Threading; namespace GVFS.Common { public class EnlistmentHydrationSummary { - public int HydratedFileCount { get; private set; } + public int PlaceholderFileCount { get; private set; } + public int PlaceholderFolderCount { get; private set; } + public int ModifiedFileCount { get; private set; } + public int ModifiedFolderCount { get; private set; } public int TotalFileCount { get; private set; } - public int HydratedFolderCount { get; private set; } public int TotalFolderCount { get; private set; } public Exception Error { get; private set; } = null; + public int HydratedFileCount => PlaceholderFileCount + ModifiedFileCount; + public int HydratedFolderCount => PlaceholderFolderCount + ModifiedFolderCount; + public bool IsValid { get { - return HydratedFileCount >= 0 - && HydratedFolderCount >= 0 + return PlaceholderFileCount >= 0 + && PlaceholderFolderCount >= 0 + && ModifiedFileCount >= 0 + && ModifiedFolderCount >= 0 && TotalFileCount >= HydratedFileCount && TotalFolderCount >= HydratedFolderCount; } @@ -33,48 +41,115 @@ public string ToMessage() return "Error calculating hydration summary. Run 'gvfs health' at the repository root for hydration status details."; } - int fileHydrationPercent = TotalFileCount == 0 ? 0 : (100 * HydratedFileCount) / TotalFileCount; - int folderHydrationPercent = TotalFolderCount == 0 ? 0 : ((100 * HydratedFolderCount) / TotalFolderCount); + int fileHydrationPercent = TotalFileCount == 0 ? 0 : (int)((100L * HydratedFileCount) / TotalFileCount); + int folderHydrationPercent = TotalFolderCount == 0 ? 0 : (int)((100L * HydratedFolderCount) / TotalFolderCount); return $"{fileHydrationPercent}% of files and {folderHydrationPercent}% of folders hydrated. Run 'gvfs health' at the repository root for details."; } public static EnlistmentHydrationSummary CreateSummary( GVFSEnlistment enlistment, - PhysicalFileSystem fileSystem) + PhysicalFileSystem fileSystem, + ITracer tracer, + Func projectedFolderCountProvider, + CancellationToken cancellationToken = default) { + Stopwatch totalStopwatch = Stopwatch.StartNew(); + Stopwatch phaseStopwatch = new Stopwatch(); + try { /* Getting all the file paths from git index is slow and we only need the total count, * so we read the index file header instead of calling GetPathsFromGitIndex */ + phaseStopwatch.Restart(); int totalFileCount = GetIndexFileCount(enlistment, fileSystem); - - /* Getting all the directories is also slow, but not as slow as reading the entire index, - * GetTotalPathCount caches the count so this is only slow occasionally, - * and the GitStatusCache manager also calls this to ensure it is updated frequently. */ - int totalFolderCount = GetHeadTreeCount(enlistment, fileSystem); + long indexReadMs = phaseStopwatch.ElapsedMilliseconds; + cancellationToken.ThrowIfCancellationRequested(); EnlistmentPathData pathData = new EnlistmentPathData(); /* FUTURE: These could be optimized to only deal with counts instead of full path lists */ + phaseStopwatch.Restart(); pathData.LoadPlaceholdersFromDatabase(enlistment); - pathData.LoadModifiedPaths(enlistment); + long placeholderLoadMs = phaseStopwatch.ElapsedMilliseconds; + cancellationToken.ThrowIfCancellationRequested(); + + phaseStopwatch.Restart(); + pathData.LoadModifiedPaths(enlistment, tracer); + long modifiedPathsLoadMs = phaseStopwatch.ElapsedMilliseconds; + cancellationToken.ThrowIfCancellationRequested(); + + int placeholderFileCount = pathData.PlaceholderFilePaths.Count; + int placeholderFolderCount = pathData.PlaceholderFolderPaths.Count; + int modifiedFileCount = pathData.ModifiedFilePaths.Count; + int modifiedFolderCount = pathData.ModifiedFolderPaths.Count; + + /* Getting the head tree count (used for TotalFolderCount) is potentially slower than the other parts + * of the operation, so we do it last and check that the other parts would succeed before running it. + */ + var soFar = new EnlistmentHydrationSummary() + { + PlaceholderFileCount = placeholderFileCount, + PlaceholderFolderCount = placeholderFolderCount, + ModifiedFileCount = modifiedFileCount, + ModifiedFolderCount = modifiedFolderCount, + TotalFileCount = totalFileCount, + TotalFolderCount = placeholderFolderCount + modifiedFolderCount + 1, // Not calculated yet, use a dummy valid value. + }; + + if (!soFar.IsValid) + { + soFar.TotalFolderCount = 0; // Set to default invalid value to avoid confusion with the dummy value above. + tracer.RelatedWarning( + $"Hydration summary early exit: data invalid before tree count. " + + $"TotalFileCount={totalFileCount}, PlaceholderFileCount={placeholderFileCount}, " + + $"ModifiedFileCount={modifiedFileCount}, PlaceholderFolderCount={placeholderFolderCount}, " + + $"ModifiedFolderCount={modifiedFolderCount}"); + EmitDurationTelemetry(tracer, totalStopwatch.ElapsedMilliseconds, indexReadMs, placeholderLoadMs, modifiedPathsLoadMs, treeCountMs: 0, earlyExit: true); + return soFar; + } + + /* Get the total folder count from the caller-provided function. + * In the mount process, this comes from the in-memory projection (essentially free). + * In gvfs health --status fallback, this parses the git index via GitIndexProjection. */ + cancellationToken.ThrowIfCancellationRequested(); + phaseStopwatch.Restart(); + int totalFolderCount = projectedFolderCountProvider(); + long treeCountMs = phaseStopwatch.ElapsedMilliseconds; + + EmitDurationTelemetry(tracer, totalStopwatch.ElapsedMilliseconds, indexReadMs, placeholderLoadMs, modifiedPathsLoadMs, treeCountMs, earlyExit: false); - int hydratedFileCount = pathData.ModifiedFilePaths.Count + pathData.PlaceholderFilePaths.Count; - int hydratedFolderCount = pathData.ModifiedFolderPaths.Count + pathData.PlaceholderFolderPaths.Count; return new EnlistmentHydrationSummary() { - HydratedFileCount = hydratedFileCount, - HydratedFolderCount = hydratedFolderCount, + PlaceholderFileCount = placeholderFileCount, + PlaceholderFolderCount = placeholderFolderCount, + ModifiedFileCount = modifiedFileCount, + ModifiedFolderCount = modifiedFolderCount, TotalFileCount = totalFileCount, TotalFolderCount = totalFolderCount, }; } + catch (OperationCanceledException) + { + tracer.RelatedInfo($"Hydration summary cancelled after {totalStopwatch.ElapsedMilliseconds}ms"); + return new EnlistmentHydrationSummary() + { + PlaceholderFileCount = -1, + PlaceholderFolderCount = -1, + ModifiedFileCount = -1, + ModifiedFolderCount = -1, + TotalFileCount = -1, + TotalFolderCount = -1, + }; + } catch (Exception e) { + tracer.RelatedError($"Hydration summary failed with exception after {totalStopwatch.ElapsedMilliseconds}ms: {e.Message}"); return new EnlistmentHydrationSummary() { - HydratedFileCount = -1, - HydratedFolderCount = -1, + PlaceholderFileCount = -1, + PlaceholderFolderCount = -1, + ModifiedFileCount = -1, + ModifiedFolderCount = -1, TotalFileCount = -1, TotalFolderCount = -1, Error = e, @@ -82,12 +157,35 @@ public static EnlistmentHydrationSummary CreateSummary( } } + private static void EmitDurationTelemetry( + ITracer tracer, + long totalMs, + long indexReadMs, + long placeholderLoadMs, + long modifiedPathsLoadMs, + long treeCountMs, + bool earlyExit) + { + EventMetadata metadata = new EventMetadata(); + metadata["TotalMs"] = totalMs; + metadata["IndexReadMs"] = indexReadMs; + metadata["PlaceholderLoadMs"] = placeholderLoadMs; + metadata["ModifiedPathsLoadMs"] = modifiedPathsLoadMs; + metadata["TreeCountMs"] = treeCountMs; + metadata["EarlyExit"] = earlyExit; + tracer.RelatedEvent( + EventLevel.Informational, + "HydrationSummaryDuration", + metadata, + Keywords.Telemetry); + } + /// /// Get the total number of files in the index. /// internal static int GetIndexFileCount(GVFSEnlistment enlistment, PhysicalFileSystem fileSystem) { - string indexPath = Path.Combine(enlistment.WorkingDirectoryBackingRoot, GVFSConstants.DotGit.Index); + string indexPath = enlistment.GitIndexPath; using (var indexFile = fileSystem.OpenFileStream(indexPath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite, callFlushFileBuffers: false)) { if (indexFile.Length < 12) @@ -111,67 +209,5 @@ internal static int GetIndexFileCount(GVFSEnlistment enlistment, PhysicalFileSys } } - /// - /// Get the total number of trees in the repo at HEAD. - /// - /// - /// This is used as the denominator in displaying percentage of hydrated - /// directories as part of git status pre-command hook. - /// It can take several seconds to calculate, so we cache it near the git status cache. - /// - /// - /// The number of subtrees at HEAD, which may be 0. - /// Will return 0 if unsuccessful. - /// - internal static int GetHeadTreeCount(GVFSEnlistment enlistment, PhysicalFileSystem fileSystem) - { - var gitProcess = enlistment.CreateGitProcess(); - var headResult = gitProcess.GetHeadTreeId(); - if (headResult.ExitCodeIsFailure) - { - return 0; - } - var headSha = headResult.Output.Trim(); - var cacheFile = Path.Combine( - enlistment.DotGVFSRoot, - GVFSConstants.DotGVFS.GitStatusCache.TreeCount); - - // Load from cache if cache matches current HEAD. - if (fileSystem.FileExists(cacheFile)) - { - try - { - var lines = fileSystem.ReadLines(cacheFile).ToArray(); - if (lines.Length == 2 - && lines[0] == headSha - && int.TryParse(lines[1], out int cachedCount)) - { - return cachedCount; - } - } - catch - { - // Ignore errors reading the cache - } - } - - int totalPathCount = 0; - GitProcess.Result folderResult = gitProcess.LsTree( - GVFSConstants.DotGit.HeadName, - line => totalPathCount++, - recursive: true, - showDirectories: true); - try - { - fileSystem.CreateDirectory(Path.GetDirectoryName(cacheFile)); - fileSystem.WriteAllText(cacheFile, $"{headSha}\n{totalPathCount}"); - } - catch - { - // Ignore errors writing the cache - } - - return totalPathCount; - } } } diff --git a/GVFS/GVFS.Common/HealthCalculator/EnlistmentPathData.cs b/GVFS/GVFS.Common/HealthCalculator/EnlistmentPathData.cs index 911b3f805..8a6df6818 100644 --- a/GVFS/GVFS.Common/HealthCalculator/EnlistmentPathData.cs +++ b/GVFS/GVFS.Common/HealthCalculator/EnlistmentPathData.cs @@ -2,6 +2,7 @@ using GVFS.Common.FileSystem; using GVFS.Common.Git; using GVFS.Common.NamedPipes; +using GVFS.Common.Tracing; using System; using System.Collections.Generic; using System.IO; @@ -94,30 +95,39 @@ public void LoadPathsFromGitIndex(GVFSEnlistment enlistment) this.GitTrackingPaths.AddRange(skipWorktreeFiles); } - public void LoadModifiedPaths(GVFSEnlistment enlistment) + public void LoadModifiedPaths(GVFSEnlistment enlistment, ITracer tracer) { - if (TryLoadModifiedPathsFromPipe(enlistment)) + if (TryLoadModifiedPathsFromPipe(enlistment, tracer)) { return; } + + // Most likely GVFS is not mounted. Give a basic effort to read the modified paths database. + string filePath = Path.Combine(enlistment.DotGVFSRoot, GVFSConstants.DotGVFS.Databases.ModifiedPaths); try { - /* Most likely GVFS is not mounted. Give a basic effort to read the modified paths database */ - var filePath = Path.Combine(enlistment.DotGVFSRoot, GVFSConstants.DotGVFS.Databases.ModifiedPaths); - using (var file = File.Open(filePath, FileMode.OpenOrCreate, FileAccess.Read, FileShare.Read)) - using (var reader = new StreamReader(file)) + using (FileStream file = File.Open(filePath, FileMode.OpenOrCreate, FileAccess.Read, FileShare.Read)) + using (StreamReader reader = new StreamReader(file)) { AddModifiedPaths(ReadModifiedPathDatabaseLines(reader)); } } - catch { } + catch (Exception ex) + { + tracer.RelatedWarning($"Failed to read modified paths file at {filePath}: {ex.Message}"); + } } private IEnumerable ReadModifiedPathDatabaseLines(StreamReader r) { while (!r.EndOfStream) { - var line = r.ReadLine(); + string line = r.ReadLine(); + if (line == null) + { + continue; + } + const string LinePrefix = "A "; if (line.StartsWith(LinePrefix)) { @@ -133,7 +143,7 @@ private IEnumerable ReadModifiedPathDatabaseLines(StreamReader r) /// If/when modified paths are moved to SQLite go there instead /// The enlistment being operated on /// An array containing all of the modified paths in string format - private bool TryLoadModifiedPathsFromPipe(GVFSEnlistment enlistment) + private bool TryLoadModifiedPathsFromPipe(GVFSEnlistment enlistment, ITracer tracer) { using (NamedPipeClient pipeClient = new NamedPipeClient(enlistment.NamedPipeName)) { @@ -157,8 +167,9 @@ private bool TryLoadModifiedPathsFromPipe(GVFSEnlistment enlistment) modifiedPathsList = modifiedPathsResponse.Body.Split(new char[] { '\0' }, StringSplitOptions.RemoveEmptyEntries); } - catch (BrokenPipeException e) + catch (Exception ex) { + tracer.RelatedWarning($"Failed to load modified paths via named pipe: {ex.Message}"); return false; } diff --git a/GVFS/GVFS.Common/HealthCalculator/HydrationStatusCircuitBreaker.cs b/GVFS/GVFS.Common/HealthCalculator/HydrationStatusCircuitBreaker.cs new file mode 100644 index 000000000..315b4d606 --- /dev/null +++ b/GVFS/GVFS.Common/HealthCalculator/HydrationStatusCircuitBreaker.cs @@ -0,0 +1,164 @@ +using GVFS.Common.Tracing; +using System; +using System.IO; + +namespace GVFS.Common +{ + /// + /// Tracks hydration status computation failures and auto-disables the feature + /// after repeated failures to protect users from persistent performance issues. + /// + /// The circuit breaker resets when: + /// - A new calendar day begins (UTC) + /// - The GVFS version changes (indicating an update that may fix the issue) + /// + /// This class intentionally avoids dependencies on PhysicalFileSystem so it can + /// be file-linked into lightweight projects like GVFS.Hooks. + /// + public class HydrationStatusCircuitBreaker + { + public const int MaxFailuresPerDay = 3; + + private readonly string markerFilePath; + private readonly ITracer tracer; + + public HydrationStatusCircuitBreaker( + string dotGVFSRoot, + ITracer tracer) + { + this.markerFilePath = Path.Combine( + dotGVFSRoot, + GVFSConstants.DotGVFS.HydrationStatus.DisabledMarkerFile); + this.tracer = tracer; + } + + /// + /// Returns true if the hydration status feature should be skipped due to + /// too many recent failures. + /// + public bool IsDisabled() + { + try + { + if (!File.Exists(this.markerFilePath)) + { + return false; + } + + string content = File.ReadAllText(this.markerFilePath); + if (!TryParseMarkerFile(content, out string markerDate, out string markerVersion, out int failureCount)) + { + return false; + } + + string today = DateTime.UtcNow.ToString("yyyy-MM-dd"); + string currentVersion = ProcessHelper.GetCurrentProcessVersion(); + + // Stale marker from a previous day or version — not disabled. + // RecordFailure will reset the count when it next runs. + if (markerDate != today || markerVersion != currentVersion) + { + return false; + } + + return failureCount >= MaxFailuresPerDay; + } + catch (Exception ex) + { + this.tracer.RelatedWarning($"Error reading hydration status circuit breaker: {ex.Message}"); + return false; + } + } + + /// + /// Records a failure. After failures in a day, + /// the circuit breaker trips and returns true. + /// Uses exclusive file access to prevent concurrent processes from losing counts. + /// + public void RecordFailure() + { + try + { + int failureCount = 1; + string today = DateTime.UtcNow.ToString("yyyy-MM-dd"); + string currentVersion = ProcessHelper.GetCurrentProcessVersion(); + + Directory.CreateDirectory(Path.GetDirectoryName(this.markerFilePath)); + + // Use exclusive file access to prevent concurrent read-modify-write races. + // If another process holds the file, we skip this failure rather than block. + try + { + using (FileStream fs = new FileStream( + this.markerFilePath, + FileMode.OpenOrCreate, + FileAccess.ReadWrite, + FileShare.None)) + { + string existingContent; + using (StreamReader reader = new StreamReader(fs, System.Text.Encoding.UTF8, detectEncodingFromByteOrderMarks: true, bufferSize: 256, leaveOpen: true)) + { + existingContent = reader.ReadToEnd(); + } + + if (TryParseMarkerFile(existingContent, out string markerDate, out string markerVersion, out int existingCount) + && markerDate == today + && markerVersion == currentVersion) + { + failureCount = existingCount + 1; + } + + // Reset to beginning and write new content + fs.Position = 0; + fs.SetLength(0); + using (StreamWriter writer = new StreamWriter(fs)) + { + writer.Write($"{today}\n{currentVersion}\n{failureCount}"); + } + } + } + catch (IOException) + { + // Another process holds the file — skip this failure count + return; + } + + if (failureCount >= MaxFailuresPerDay) + { + this.tracer.RelatedWarning( + $"Hydration status circuit breaker tripped after {failureCount} failures today. " + + $"Feature will be disabled until tomorrow or a GVFS update."); + } + } + catch (Exception ex) + { + this.tracer.RelatedWarning($"Error writing hydration status circuit breaker: {ex.Message}"); + } + } + + /// + /// Parses the marker file format: date\nversion\ncount + /// + internal static bool TryParseMarkerFile(string content, out string date, out string version, out int failureCount) + { + date = null; + version = null; + failureCount = 0; + + if (string.IsNullOrEmpty(content)) + { + return false; + } + + string[] lines = content.Split(new[] { '\n', '\r' }, StringSplitOptions.RemoveEmptyEntries); + if (lines.Length < 3) + { + return false; + } + + date = lines[0]; + version = lines[1]; + return int.TryParse(lines[2], out failureCount); + } + } +} diff --git a/GVFS/GVFS.Common/Http/HttpRequestor.cs b/GVFS/GVFS.Common/Http/HttpRequestor.cs index e3c163d42..1f05d6aab 100644 --- a/GVFS/GVFS.Common/Http/HttpRequestor.cs +++ b/GVFS/GVFS.Common/Http/HttpRequestor.cs @@ -17,8 +17,12 @@ namespace GVFS.Common.Http { public abstract class HttpRequestor : IDisposable { + private const int ConnectionPoolWaitTimeoutMs = 30_000; + private const int ConnectionPoolContentionThresholdMs = 100; + private static long requestCount = 0; private static SemaphoreSlim availableConnections; + private static int connectionLimitConfigured = 0; private readonly ProductInfoHeaderValue userAgentHeader; @@ -34,8 +38,12 @@ static HttpRequestor() using (var machineConfigLock = GetMachineConfigLock()) { ServicePointManager.SecurityProtocol = ServicePointManager.SecurityProtocol | SecurityProtocolType.Tls12; - ServicePointManager.DefaultConnectionLimit = Environment.ProcessorCount; - availableConnections = new SemaphoreSlim(ServicePointManager.DefaultConnectionLimit); + + // HTTP downloads are I/O-bound, not CPU-bound, so we default to + // 2x ProcessorCount. Can be overridden via gvfs.max-http-connections. + int connectionLimit = 2 * Environment.ProcessorCount; + ServicePointManager.DefaultConnectionLimit = connectionLimit; + availableConnections = new SemaphoreSlim(connectionLimit); } } @@ -47,6 +55,13 @@ protected HttpRequestor(ITracer tracer, RetryConfig retryConfig, Enlistment enli this.Tracer = tracer; + // On first instantiation, check git config for a custom connection limit. + // This runs before any requests are made (during mount initialization). + if (Interlocked.CompareExchange(ref connectionLimitConfigured, 1, 0) == 0) + { + TryApplyConnectionLimitFromConfig(tracer, enlistment); + } + HttpClientHandler httpClientHandler = new HttpClientHandler() { UseDefaultCredentials = true }; this.authentication.ConfigureHttpClientHandlerSslIfNeeded(this.Tracer, httpClientHandler, enlistment.CreateGitProcess()); @@ -126,8 +141,30 @@ protected GitEndPointResponseData SendRequest( responseMetadata.Add("availableConnections", availableConnections.CurrentCount); Stopwatch requestStopwatch = Stopwatch.StartNew(); - availableConnections.Wait(cancellationToken); - TimeSpan connectionWaitTime = requestStopwatch.Elapsed; + + if (!availableConnections.Wait(ConnectionPoolWaitTimeoutMs, cancellationToken)) + { + TimeSpan connectionWaitTime = requestStopwatch.Elapsed; + responseMetadata.Add("connectionWaitTimeMS", $"{connectionWaitTime.TotalMilliseconds:F4}"); + this.Tracer.RelatedWarning(responseMetadata, "SendRequest: Connection pool exhausted, all connections busy"); + + return new GitEndPointResponseData( + HttpStatusCode.ServiceUnavailable, + new GitObjectsHttpException(HttpStatusCode.ServiceUnavailable, "Connection pool exhausted - all connections busy"), + shouldRetry: true, + message: null, + onResponseDisposed: null); + } + + TimeSpan connectionWaitTimeElapsed = requestStopwatch.Elapsed; + if (connectionWaitTimeElapsed.TotalMilliseconds > ConnectionPoolContentionThresholdMs) + { + EventMetadata contentionMetadata = new EventMetadata(); + contentionMetadata.Add("RequestId", requestId); + contentionMetadata.Add("availableConnections", availableConnections.CurrentCount); + contentionMetadata.Add("connectionWaitTimeMS", $"{connectionWaitTimeElapsed.TotalMilliseconds:F4}"); + this.Tracer.RelatedWarning(contentionMetadata, "SendRequest: Connection pool contention detected"); + } TimeSpan responseWaitTime = default(TimeSpan); GitEndPointResponseData gitEndPointResponseData = null; @@ -248,7 +285,7 @@ protected GitEndPointResponseData SendRequest( } finally { - responseMetadata.Add("connectionWaitTimeMS", $"{connectionWaitTime.TotalMilliseconds:F4}"); + responseMetadata.Add("connectionWaitTimeMS", $"{connectionWaitTimeElapsed.TotalMilliseconds:F4}"); responseMetadata.Add("responseWaitTimeMS", $"{responseWaitTime.TotalMilliseconds:F4}"); this.Tracer.RelatedEvent(EventLevel.Informational, "NetworkResponse", responseMetadata); @@ -337,6 +374,58 @@ private static bool TryGetResponseMessageFromHttpRequestException(HttpRequestExc } + private static void TryApplyConnectionLimitFromConfig(ITracer tracer, Enlistment enlistment) + { + try + { + GitProcess.ConfigResult result = enlistment.CreateGitProcess().GetFromConfig(GVFSConstants.GitConfig.MaxHttpConnectionsConfig); + string error; + int configuredLimit; + if (!result.TryParseAsInt(0, 1, out configuredLimit, out error)) + { + EventMetadata metadata = new EventMetadata(); + metadata.Add("error", error); + tracer.RelatedWarning(metadata, "HttpRequestor: Invalid gvfs.max-http-connections config value, using default"); + return; + } + + if (configuredLimit > 0) + { + int currentLimit = ServicePointManager.DefaultConnectionLimit; + ServicePointManager.DefaultConnectionLimit = configuredLimit; + + // Adjust the existing semaphore rather than replacing it, so any + // in-flight waiters release permits to the correct instance. + int delta = configuredLimit - currentLimit; + if (delta > 0) + { + for (int i = 0; i < delta; i++) + { + availableConnections.Release(); + } + } + else if (delta < 0) + { + for (int i = 0; i < -delta; i++) + { + availableConnections.Wait(); + } + } + + EventMetadata metadata = new EventMetadata(); + metadata.Add("configuredLimit", configuredLimit); + metadata.Add("previousLimit", currentLimit); + tracer.RelatedEvent(EventLevel.Informational, "HttpRequestor_ConnectionLimitConfigured", metadata); + } + } + catch (Exception e) + { + EventMetadata metadata = new EventMetadata(); + metadata.Add("Exception", e.ToString()); + tracer.RelatedWarning(metadata, "HttpRequestor: Failed to read gvfs.max-http-connections config, using default"); + } + } + private static FileStream GetMachineConfigLock() { var machineConfigLocation = RuntimeEnvironment.SystemConfigurationFile; diff --git a/GVFS/GVFS.Common/Maintenance/PrefetchStep.cs b/GVFS/GVFS.Common/Maintenance/PrefetchStep.cs index a494ac6cc..163089afb 100644 --- a/GVFS/GVFS.Common/Maintenance/PrefetchStep.cs +++ b/GVFS/GVFS.Common/Maintenance/PrefetchStep.cs @@ -56,7 +56,9 @@ public bool TryPrefetchCommitsAndTrees(out string error, GitProcess gitProcess = return false; } - if (!this.GitObjects.TryDownloadPrefetchPacks(gitProcess, maxGoodTimeStamp, out packIndexes)) + var trustPackIndexes = this.Context.Repository.LibGit2RepoInvoker.GetConfigBoolOrDefault(GVFSConstants.GitConfig.TrustPackIndexes, GVFSConstants.GitConfig.TrustPackIndexesDefault); + + if (!this.GitObjects.TryDownloadPrefetchPacks(gitProcess, maxGoodTimeStamp, trustPackIndexes, out packIndexes)) { error = "Failed to download prefetch packs"; return false; diff --git a/GVFS/GVFS.Common/MissingTreeTracker.cs b/GVFS/GVFS.Common/MissingTreeTracker.cs new file mode 100644 index 000000000..3d5ca78a1 --- /dev/null +++ b/GVFS/GVFS.Common/MissingTreeTracker.cs @@ -0,0 +1,318 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using GVFS.Common.Tracing; + +namespace GVFS.Common +{ + /// + /// Tracks missing trees per commit to support batching tree downloads. + /// Maintains LRU eviction based on commits (not individual trees). + /// A single tree SHA may be shared across multiple commits. + /// + public class MissingTreeTracker + { + private const string EtwArea = nameof(MissingTreeTracker); + + private readonly int treeCapacity; + private readonly ITracer tracer; + private readonly object syncLock = new object(); + + // Primary storage: commit -> set of missing trees + private readonly Dictionary> missingTreesByCommit; + + // Reverse lookup: tree -> set of commits (for fast lookups) + private readonly Dictionary> commitsByTree; + + // LRU ordering based on commits + private readonly LinkedList commitOrder; + private readonly Dictionary> commitNodes; + + public MissingTreeTracker(ITracer tracer, int treeCapacity) + { + this.tracer = tracer; + this.treeCapacity = treeCapacity; + this.missingTreesByCommit = new Dictionary>(StringComparer.OrdinalIgnoreCase); + this.commitsByTree = new Dictionary>(StringComparer.OrdinalIgnoreCase); + this.commitOrder = new LinkedList(); + this.commitNodes = new Dictionary>(StringComparer.OrdinalIgnoreCase); + } + + /// + /// Records a missing root tree for a commit. Marks the commit as recently used. + /// A tree may be associated with multiple commits. + /// + public void AddMissingRootTree(string treeSha, string commitSha) + { + lock (this.syncLock) + { + this.EnsureCommitTracked(commitSha); + this.AddTreeToCommit(treeSha, commitSha); + } + } + + /// + /// Records missing sub-trees discovered while processing a parent tree. + /// Each sub-tree is associated with all commits currently tracking the parent tree. + /// + public void AddMissingSubTrees(string parentTreeSha, string[] subTreeShas) + { + lock (this.syncLock) + { + if (!this.commitsByTree.TryGetValue(parentTreeSha, out var commits)) + { + return; + } + + // Snapshot the set because AddTreeToCommit may modify commitsByTree indirectly + string[] commitSnapshot = commits.ToArray(); + foreach (string subTreeSha in subTreeShas) + { + foreach (string commitSha in commitSnapshot) + { + /* Ensure it wasn't evicted earlier in the loop. */ + if (!this.missingTreesByCommit.ContainsKey(commitSha)) + { + continue; + } + /* Ensure we don't evict this commit while trying to add a tree to it. */ + this.MarkCommitAsUsed(commitSha); + this.AddTreeToCommit(subTreeSha, commitSha); + } + } + } + } + + /// + /// Tries to get all commits associated with a tree SHA. + /// Marks all found commits as recently used. + /// + public bool TryGetCommits(string treeSha, out string[] commitShas) + { + lock (this.syncLock) + { + if (this.commitsByTree.TryGetValue(treeSha, out var commits)) + { + commitShas = commits.ToArray(); + foreach (string commitSha in commitShas) + { + this.MarkCommitAsUsed(commitSha); + } + + return true; + } + + commitShas = null; + return false; + } + } + + /// + /// Given a set of commits, finds the one with the most missing trees. + /// + public int GetHighestMissingTreeCount(string[] commitShas, out string highestCountCommitSha) + { + lock (this.syncLock) + { + highestCountCommitSha = null; + int highestCount = 0; + + foreach (string commitSha in commitShas) + { + if (this.missingTreesByCommit.TryGetValue(commitSha, out var trees) + && trees.Count > highestCount) + { + highestCount = trees.Count; + highestCountCommitSha = commitSha; + } + } + + return highestCount; + } + } + + /// + /// Marks a commit as complete (e.g. its pack was downloaded successfully). + /// Because the trees are now available, they are also removed from tracking + /// for any other commits that shared them, and those commits are cleaned up + /// if they become empty. + /// + public void MarkCommitComplete(string commitSha) + { + lock (this.syncLock) + { + this.RemoveCommitWithCascade(commitSha); + + EventMetadata metadata = new EventMetadata(); + metadata.Add("Area", EtwArea); + metadata.Add("CompletedCommit", commitSha); + metadata.Add("RemainingCommits", this.commitNodes.Count); + metadata.Add("RemainingTrees", this.commitsByTree.Count); + this.tracer.RelatedEvent(EventLevel.Informational, nameof(this.MarkCommitComplete), metadata, Keywords.Telemetry); + } + } + + private void EnsureCommitTracked(string commitSha) + { + if (!this.missingTreesByCommit.TryGetValue(commitSha, out _)) + { + this.missingTreesByCommit[commitSha] = new HashSet(StringComparer.OrdinalIgnoreCase); + var node = this.commitOrder.AddFirst(commitSha); + this.commitNodes[commitSha] = node; + } + else + { + this.MarkCommitAsUsed(commitSha); + } + } + + private void AddTreeToCommit(string treeSha, string commitSha) + { + if (!this.commitsByTree.ContainsKey(treeSha)) + { + // Evict LRU commits until there is room for the new tree + while (this.commitsByTree.Count >= this.treeCapacity) + { + // If evict fails it means we only have one commit left. + if (!this.EvictLruCommit()) + { + break; + } + } + + this.commitsByTree[treeSha] = new HashSet(StringComparer.OrdinalIgnoreCase); + } + + this.missingTreesByCommit[commitSha].Add(treeSha); + this.commitsByTree[treeSha].Add(commitSha); + } + + private void MarkCommitAsUsed(string commitSha) + { + if (this.commitNodes.TryGetValue(commitSha, out var node)) + { + this.commitOrder.Remove(node); + var newNode = this.commitOrder.AddFirst(commitSha); + this.commitNodes[commitSha] = newNode; + } + } + + private bool EvictLruCommit() + { + var last = this.commitOrder.Last; + if (last != null && last.Value != this.commitOrder.First.Value) + { + string lruCommit = last.Value; + var treeCountBefore = this.commitsByTree.Count; + this.RemoveCommitNoCache(lruCommit); + + EventMetadata metadata = new EventMetadata(); + metadata.Add("Area", EtwArea); + metadata.Add("EvictedCommit", lruCommit); + metadata.Add("TreeCountBefore", treeCountBefore); + metadata.Add("TreeCountAfter", this.commitsByTree.Count); + this.tracer.RelatedEvent(EventLevel.Informational, nameof(this.EvictLruCommit), metadata, Keywords.Telemetry); + + return true; + } + + EventMetadata warnMetadata = new EventMetadata(); + warnMetadata.Add("Area", EtwArea); + warnMetadata.Add("TreeCount", this.commitsByTree.Count); + warnMetadata.Add("CommitCount", this.commitNodes.Count); + this.tracer.RelatedEvent(EventLevel.Warning, $"{nameof(this.EvictLruCommit)}CouldNotEvict", warnMetadata, Keywords.Telemetry); + + return false; + } + + /// + /// Removes a commit without cascading tree removal to other commits. + /// Used during LRU eviction: the trees are still missing, so other commits + /// that share those trees should continue to track them. + /// + private void RemoveCommitNoCache(string commitSha) + { + if (!this.missingTreesByCommit.TryGetValue(commitSha, out var trees)) + { + return; + } + + foreach (string treeSha in trees) + { + if (this.commitsByTree.TryGetValue(treeSha, out var commits)) + { + commits.Remove(commitSha); + if (commits.Count == 0) + { + this.commitsByTree.Remove(treeSha); + } + } + } + + this.missingTreesByCommit.Remove(commitSha); + this.RemoveFromLruOrder(commitSha); + } + + /// + /// Removes a commit and cascades: trees that were in this commit's set are + /// also removed from all other commits that shared them. Any commit that + /// becomes empty as a result is also removed (without further cascade). + /// + private void RemoveCommitWithCascade(string commitSha) + { + if (!this.missingTreesByCommit.TryGetValue(commitSha, out var trees)) + { + return; + } + + // Collect commits that may become empty after we remove the shared trees. + // We don't cascade further than one level. + var commitsToCheck = new HashSet(); + + foreach (string treeSha in trees) + { + if (this.commitsByTree.TryGetValue(treeSha, out var sharingCommits)) + { + sharingCommits.Remove(commitSha); + + foreach (string otherCommit in sharingCommits) + { + if (this.missingTreesByCommit.TryGetValue(otherCommit, out var otherTrees)) + { + otherTrees.Remove(treeSha); + if (otherTrees.Count == 0) + { + commitsToCheck.Add(otherCommit); + } + } + } + + sharingCommits.Clear(); + this.commitsByTree.Remove(treeSha); + } + } + + this.missingTreesByCommit.Remove(commitSha); + this.RemoveFromLruOrder(commitSha); + + // Clean up any commits that became empty due to the cascade + foreach (string emptyCommit in commitsToCheck) + { + if (this.missingTreesByCommit.TryGetValue(emptyCommit, out var remaining) && remaining.Count == 0) + { + this.missingTreesByCommit.Remove(emptyCommit); + this.RemoveFromLruOrder(emptyCommit); + } + } + } + + private void RemoveFromLruOrder(string commitSha) + { + if (this.commitNodes.TryGetValue(commitSha, out var node)) + { + this.commitOrder.Remove(node); + this.commitNodes.Remove(commitSha); + } + } + } +} diff --git a/GVFS/GVFS.Common/NamedPipes/HydrationStatusNamedPipeMessages.cs b/GVFS/GVFS.Common/NamedPipes/HydrationStatusNamedPipeMessages.cs new file mode 100644 index 000000000..99ab4bfbd --- /dev/null +++ b/GVFS/GVFS.Common/NamedPipes/HydrationStatusNamedPipeMessages.cs @@ -0,0 +1,98 @@ +using System; + +namespace GVFS.Common.NamedPipes +{ + public static partial class NamedPipeMessages + { + public static class HydrationStatus + { + public const string Request = "GetHydration"; + public const string SuccessResult = "S"; + public const string NotAvailableResult = "NA"; + + /// + /// Wire format: PlaceholderFileCount,PlaceholderFolderCount,ModifiedFileCount,ModifiedFolderCount,TotalFileCount,TotalFolderCount + /// + public class Response + { + public int PlaceholderFileCount { get; set; } + public int PlaceholderFolderCount { get; set; } + public int ModifiedFileCount { get; set; } + public int ModifiedFolderCount { get; set; } + public int TotalFileCount { get; set; } + public int TotalFolderCount { get; set; } + + public int HydratedFileCount => this.PlaceholderFileCount + this.ModifiedFileCount; + public int HydratedFolderCount => this.PlaceholderFolderCount + this.ModifiedFolderCount; + + public bool IsValid => + this.PlaceholderFileCount >= 0 + && this.PlaceholderFolderCount >= 0 + && this.ModifiedFileCount >= 0 + && this.ModifiedFolderCount >= 0 + && this.TotalFileCount >= this.HydratedFileCount + && this.TotalFolderCount >= this.HydratedFolderCount; + + public string ToDisplayMessage() + { + if (!this.IsValid) + { + return null; + } + + int filePercent = this.TotalFileCount == 0 ? 0 : (int)((100L * this.HydratedFileCount) / this.TotalFileCount); + int folderPercent = this.TotalFolderCount == 0 ? 0 : (int)((100L * this.HydratedFolderCount) / this.TotalFolderCount); + return $"{filePercent}% of files and {folderPercent}% of folders hydrated. Run 'gvfs health' at the repository root for details."; + } + + public string ToBody() + { + return string.Join(",", + this.PlaceholderFileCount, + this.PlaceholderFolderCount, + this.ModifiedFileCount, + this.ModifiedFolderCount, + this.TotalFileCount, + this.TotalFolderCount); + } + + public static bool TryParse(string body, out Response response) + { + response = null; + if (string.IsNullOrEmpty(body)) + { + return false; + } + + string[] parts = body.Split(','); + if (parts.Length < 6) + { + return false; + } + + if (!int.TryParse(parts[0], out int placeholderFileCount) + || !int.TryParse(parts[1], out int placeholderFolderCount) + || !int.TryParse(parts[2], out int modifiedFileCount) + || !int.TryParse(parts[3], out int modifiedFolderCount) + || !int.TryParse(parts[4], out int totalFileCount) + || !int.TryParse(parts[5], out int totalFolderCount)) + { + return false; + } + + response = new Response + { + PlaceholderFileCount = placeholderFileCount, + PlaceholderFolderCount = placeholderFolderCount, + ModifiedFileCount = modifiedFileCount, + ModifiedFolderCount = modifiedFolderCount, + TotalFileCount = totalFileCount, + TotalFolderCount = totalFolderCount, + }; + + return response.IsValid; + } + } + } + } +} diff --git a/GVFS/GVFS.Common/NamedPipes/NamedPipeMessages.cs b/GVFS/GVFS.Common/NamedPipes/NamedPipeMessages.cs index 489ee05d7..fafb4e7d1 100644 --- a/GVFS/GVFS.Common/NamedPipes/NamedPipeMessages.cs +++ b/GVFS/GVFS.Common/NamedPipes/NamedPipeMessages.cs @@ -203,21 +203,24 @@ public static class DehydrateFolders public class Request { - public Request(string folders) + public Request(string backupFolderPath, string folders) { this.Folders = folders; + this.BackupFolderPath = backupFolderPath; } - public Request(Message message) + public static Request FromMessage(Message message) { - this.Folders = message.Body; + return JsonConvert.DeserializeObject(message.Body); } public string Folders { get; } + public string BackupFolderPath { get; } + public Message CreateMessage() { - return new Message(Dehydrate, this.Folders); + return new Message(Dehydrate, JsonConvert.SerializeObject(this)); } } diff --git a/GVFS/GVFS.Common/NamedPipes/UnstageNamedPipeMessages.cs b/GVFS/GVFS.Common/NamedPipes/UnstageNamedPipeMessages.cs new file mode 100644 index 000000000..ee3e32c27 --- /dev/null +++ b/GVFS/GVFS.Common/NamedPipes/UnstageNamedPipeMessages.cs @@ -0,0 +1,27 @@ +namespace GVFS.Common.NamedPipes +{ + public static partial class NamedPipeMessages + { + public static class PrepareForUnstage + { + public const string Request = "PreUnstage"; + public const string SuccessResult = "S"; + public const string FailureResult = "F"; + + public class Response + { + public Response(string result) + { + this.Result = result; + } + + public string Result { get; } + + public Message CreateMessage() + { + return new Message(this.Result, null); + } + } + } + } +} diff --git a/GVFS/GVFS.Common/ProcessHelper.cs b/GVFS/GVFS.Common/ProcessHelper.cs index 4fa57fbaf..3d7e35463 100644 --- a/GVFS/GVFS.Common/ProcessHelper.cs +++ b/GVFS/GVFS.Common/ProcessHelper.cs @@ -57,8 +57,10 @@ public static string GetCurrentProcessVersion() public static bool IsDevelopmentVersion() { + // Official CI builds use version numbers where major > 0. + // Development builds always start with 0. string version = ProcessHelper.GetCurrentProcessVersion(); - return version.Equals("0.2.173.2") || version.StartsWith("0.2.173.2+"); + return version.StartsWith("0."); } public static string GetProgramLocation(string programLocaterCommand, string processName) diff --git a/GVFS/GVFS.Common/RetryCircuitBreaker.cs b/GVFS/GVFS.Common/RetryCircuitBreaker.cs new file mode 100644 index 000000000..0f294c10e --- /dev/null +++ b/GVFS/GVFS.Common/RetryCircuitBreaker.cs @@ -0,0 +1,72 @@ +using System; +using System.Threading; + +namespace GVFS.Common +{ + /// + /// Global circuit breaker for retry operations. When too many consecutive failures + /// occur (e.g., during system-wide resource exhaustion), the circuit opens and + /// subsequent retry attempts fail fast instead of consuming connections and adding + /// backoff delays that worsen the resource pressure. + /// + public static class RetryCircuitBreaker + { + public const int DefaultFailureThreshold = 15; + public const int DefaultCooldownMs = 30_000; + + private static int failureThreshold = DefaultFailureThreshold; + private static int cooldownMs = DefaultCooldownMs; + private static int consecutiveFailures = 0; + private static long circuitOpenedAtUtcTicks = 0; + + public static bool IsOpen + { + get + { + if (Volatile.Read(ref consecutiveFailures) < failureThreshold) + { + return false; + } + + long openedAt = Volatile.Read(ref circuitOpenedAtUtcTicks); + return (DateTime.UtcNow.Ticks - openedAt) < TimeSpan.FromMilliseconds(cooldownMs).Ticks; + } + } + + public static int ConsecutiveFailures => Volatile.Read(ref consecutiveFailures); + + public static void RecordSuccess() + { + Interlocked.Exchange(ref consecutiveFailures, 0); + } + + public static void RecordFailure() + { + int failures = Interlocked.Increment(ref consecutiveFailures); + if (failures >= failureThreshold) + { + Volatile.Write(ref circuitOpenedAtUtcTicks, DateTime.UtcNow.Ticks); + } + } + + /// + /// Resets the circuit breaker to its initial state. Intended for testing. + /// + public static void Reset() + { + Volatile.Write(ref consecutiveFailures, 0); + Volatile.Write(ref circuitOpenedAtUtcTicks, 0); + Volatile.Write(ref failureThreshold, DefaultFailureThreshold); + Volatile.Write(ref cooldownMs, DefaultCooldownMs); + } + + /// + /// Configures the circuit breaker thresholds. Intended for testing. + /// + public static void Configure(int threshold, int cooldownMilliseconds) + { + Volatile.Write(ref failureThreshold, threshold); + Volatile.Write(ref cooldownMs, cooldownMilliseconds); + } + } +} diff --git a/GVFS/GVFS.Common/RetryWrapper.cs b/GVFS/GVFS.Common/RetryWrapper.cs index 8dafcbfd6..4d6a0ccd8 100644 --- a/GVFS/GVFS.Common/RetryWrapper.cs +++ b/GVFS/GVFS.Common/RetryWrapper.cs @@ -60,6 +60,24 @@ public static Action StandardErrorHandler(ITracer tracer, long r public InvocationResult Invoke(Func toInvoke) { + // NOTE: Cascade risk — connection pool timeouts (HttpRequestor returns + // ServiceUnavailable when the semaphore wait expires) flow through here + // as callback errors with shouldRetry=true and count toward the circuit + // breaker. Under sustained pool exhaustion, 15 timeouts can trip the + // breaker and fail-fast ALL retry operations for 30 seconds — including + // requests that might have succeeded. In practice, request coalescing + // (GVFSGitObjects) and the larger pool size drastically reduce the + // likelihood of sustained pool exhaustion. If telemetry shows this + // cascade occurring, consider excluding local resource pressure (pool + // timeouts) from circuit breaker failure counts. + if (RetryCircuitBreaker.IsOpen) + { + RetryableException circuitOpenError = new RetryableException( + "Circuit breaker is open - too many consecutive failures. Fast-failing to prevent resource exhaustion."); + this.OnFailure(new ErrorEventArgs(circuitOpenError, tryCount: 1, willRetry: false)); + return new InvocationResult(1, circuitOpenError); + } + // Use 1-based counting. This makes reporting look a lot nicer and saves a lot of +1s for (int tryCount = 1; tryCount <= this.maxAttempts; ++tryCount) { @@ -70,6 +88,11 @@ public InvocationResult Invoke(Func toInvoke) CallbackResult result = toInvoke(tryCount); if (result.HasErrors) { + if (result.ShouldRetry) + { + RetryCircuitBreaker.RecordFailure(); + } + if (!this.ShouldRetry(tryCount, null, result)) { return new InvocationResult(tryCount, result.Error, result.Result); @@ -77,6 +100,7 @@ public InvocationResult Invoke(Func toInvoke) } else { + RetryCircuitBreaker.RecordSuccess(); return new InvocationResult(tryCount, true, result.Result); } } @@ -92,6 +116,7 @@ e is AggregateException throw; } + RetryCircuitBreaker.RecordFailure(); if (!this.ShouldRetry(tryCount, exceptionToReport, null)) { return new InvocationResult(tryCount, exceptionToReport); diff --git a/GVFS/GVFS.Common/ReturnCode.cs b/GVFS/GVFS.Common/ReturnCode.cs index f99f3875d..5243cb2f5 100644 --- a/GVFS/GVFS.Common/ReturnCode.cs +++ b/GVFS/GVFS.Common/ReturnCode.cs @@ -10,5 +10,6 @@ public enum ReturnCode NullRequestData = 5, UnableToRegisterForOfflineIO = 6, DehydrateFolderFailures = 7, + MountAlreadyRunning = 8, } } diff --git a/GVFS/GVFS.Common/Tracing/NullTracer.cs b/GVFS/GVFS.Common/Tracing/NullTracer.cs new file mode 100644 index 000000000..8cd556642 --- /dev/null +++ b/GVFS/GVFS.Common/Tracing/NullTracer.cs @@ -0,0 +1,115 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace GVFS.Common.Tracing +{ + /// + /// Empty implementation of ITracer that does nothing + /// + public sealed class NullTracer : ITracer + { + private NullTracer() + { + } + + public static ITracer Instance { get; } = new NullTracer(); + + void IDisposable.Dispose() + { + + } + + void ITracer.RelatedError(EventMetadata metadata, string message) + { + + } + + void ITracer.RelatedError(EventMetadata metadata, string message, Keywords keywords) + { + + } + + void ITracer.RelatedError(string message) + { + + } + + void ITracer.RelatedError(string format, params object[] args) + { + + } + + void ITracer.RelatedEvent(EventLevel level, string eventName, EventMetadata metadata) + { + + } + + void ITracer.RelatedEvent(EventLevel level, string eventName, EventMetadata metadata, Keywords keywords) + { + + } + + void ITracer.RelatedInfo(string message) + { + + } + + void ITracer.RelatedInfo(string format, params object[] args) + { + + } + + void ITracer.RelatedInfo(EventMetadata metadata, string message) + { + + } + + void ITracer.RelatedWarning(EventMetadata metadata, string message) + { + + } + + void ITracer.RelatedWarning(EventMetadata metadata, string message, Keywords keywords) + { + + } + + void ITracer.RelatedWarning(string message) + { + + } + + void ITracer.RelatedWarning(string format, params object[] args) + { + + } + + void ITracer.SetGitCommandSessionId(string sessionId) + { + + } + + ITracer ITracer. StartActivity(string activityName, EventLevel level) + { + return this; + } + + ITracer ITracer. StartActivity(string activityName, EventLevel level, EventMetadata metadata) + { + return this; + } + + ITracer ITracer. StartActivity(string activityName, EventLevel level, Keywords startStopKeywords, EventMetadata metadata) + { + return this; + } + + TimeSpan ITracer.Stop(EventMetadata metadata) + { + return TimeSpan.Zero; + } + } +} diff --git a/GVFS/GVFS.Common/WorktreeCommandParser.cs b/GVFS/GVFS.Common/WorktreeCommandParser.cs new file mode 100644 index 000000000..df98cc750 --- /dev/null +++ b/GVFS/GVFS.Common/WorktreeCommandParser.cs @@ -0,0 +1,142 @@ +using System; +using System.Collections.Generic; + +namespace GVFS.Common +{ + /// + /// Parses git worktree command arguments from hook args arrays. + /// Hook args format: [hooktype, "worktree", subcommand, options..., positional args..., --git-pid=N, --exit_code=N] + /// + /// Assumptions: + /// - Args are passed by git exactly as the user typed them (no normalization). + /// - --git-pid and --exit_code are always appended by git in =value form. + /// - Single-letter flags may be combined (e.g., -fd for --force --detach). + /// - -b/-B always consume the next arg as a branch name, even when combined (e.g., -fb branch). + /// + /// Future improvement: consider replacing with a POSIX-compatible arg parser + /// library (e.g., Mono.Options, MIT license) to handle edge cases more robustly. + /// + public static class WorktreeCommandParser + { + private static readonly HashSet ShortOptionsWithValue = new HashSet { 'b', 'B' }; + + /// + /// Gets the worktree subcommand (add, remove, move, list, etc.) from hook args. + /// + public static string GetSubcommand(string[] args) + { + // args[0] = hook type, args[1] = "worktree", args[2+] = subcommand and its args + for (int i = 2; i < args.Length; i++) + { + if (!args[i].StartsWith("-")) + { + return args[i].ToLowerInvariant(); + } + } + + return null; + } + + /// + /// Gets a positional argument from git worktree subcommand args. + /// For 'add': git worktree add [options] <path> [<commit-ish>] + /// For 'remove': git worktree remove [options] <worktree> + /// For 'move': git worktree move [options] <worktree> <new-path> + /// + /// Full hook args array (hooktype, command, subcommand, ...) + /// 0-based index of the positional arg after the subcommand + public static string GetPositionalArg(string[] args, int positionalIndex) + { + var longOptionsWithValue = new HashSet(StringComparer.OrdinalIgnoreCase) + { + "--reason" + }; + + int found = -1; + bool pastSubcommand = false; + bool pastSeparator = false; + for (int i = 2; i < args.Length; i++) + { + if (args[i].StartsWith("--git-pid") || args[i].StartsWith("--exit_code")) + { + // Always =value form, but skip either way + if (!args[i].Contains("=") && i + 1 < args.Length) + { + i++; + } + + continue; + } + + if (args[i] == "--") + { + pastSeparator = true; + continue; + } + + if (!pastSeparator && args[i].StartsWith("--")) + { + // Long option — check if it takes a separate value + if (longOptionsWithValue.Contains(args[i]) && i + 1 < args.Length) + { + i++; + } + + continue; + } + + if (!pastSeparator && args[i].StartsWith("-") && args[i].Length > 1) + { + // Short option(s), possibly combined (e.g., -fd, -fb branch). + // A value-taking letter consumes the rest of the arg as its value. + // Only consume the next arg if the first value-taking letter is + // the last character (no baked-in value). + // e.g., -bfd → b="fd" (baked), -fdb val → f,d booleans, b="val" + // -Bb → B="b" (baked), -fBb → f boolean, B="b" (baked) + string flags = args[i].Substring(1); + bool consumesNextArg = false; + for (int j = 0; j < flags.Length; j++) + { + if (ShortOptionsWithValue.Contains(flags[j])) + { + // This letter takes a value. If it's the last letter, + // the value is the next arg. Otherwise the value is the + // remaining characters (baked in) and we're done. + consumesNextArg = (j == flags.Length - 1); + break; + } + } + + if (consumesNextArg && i + 1 < args.Length) + { + i++; + } + + continue; + } + + if (!pastSubcommand) + { + pastSubcommand = true; + continue; + } + + found++; + if (found == positionalIndex) + { + return args[i]; + } + } + + return null; + } + + /// + /// Gets the first positional argument (worktree path) from git worktree args. + /// + public static string GetPathArg(string[] args) + { + return GetPositionalArg(args, 0); + } + } +} diff --git a/GVFS/GVFS.FunctionalTests/GVFS.FunctionalTests.csproj b/GVFS/GVFS.FunctionalTests/GVFS.FunctionalTests.csproj index f170451f4..c777bdf84 100644 --- a/GVFS/GVFS.FunctionalTests/GVFS.FunctionalTests.csproj +++ b/GVFS/GVFS.FunctionalTests/GVFS.FunctionalTests.csproj @@ -26,6 +26,7 @@ false + PreserveNewest diff --git a/GVFS/GVFS.FunctionalTests/GVFSTestConfig.cs b/GVFS/GVFS.FunctionalTests/GVFSTestConfig.cs index aee3f2df8..03242369a 100644 --- a/GVFS/GVFS.FunctionalTests/GVFSTestConfig.cs +++ b/GVFS/GVFS.FunctionalTests/GVFSTestConfig.cs @@ -16,6 +16,8 @@ public static class GVFSTestConfig public static bool ReplaceInboxProjFS { get; set; } + public static bool IsDevMode { get; set; } + public static string PathToGVFS { get diff --git a/GVFS/GVFS.FunctionalTests/Program.cs b/GVFS/GVFS.FunctionalTests/Program.cs index f00d9496a..0303371bf 100644 --- a/GVFS/GVFS.FunctionalTests/Program.cs +++ b/GVFS/GVFS.FunctionalTests/Program.cs @@ -1,5 +1,7 @@ +using GVFS.Common; using GVFS.FunctionalTests.Properties; using GVFS.FunctionalTests.Tools; +using GVFS.PlatformLoader; using GVFS.Tests; using System; using System.Collections.Generic; @@ -13,14 +15,27 @@ public class Program public static void Main(string[] args) { Properties.Settings.Default.Initialize(); + GVFSPlatformLoader.Initialize(); + + GVFSTestConfig.IsDevMode = Environment.GetEnvironmentVariable("GVFS_FUNCTIONAL_TEST_DEV_MODE") == "1"; + Console.WriteLine("Settings.Default.CurrentDirectory: {0}", Settings.Default.CurrentDirectory); Console.WriteLine("Settings.Default.PathToGit: {0}", Settings.Default.PathToGit); Console.WriteLine("Settings.Default.PathToGVFS: {0}", Settings.Default.PathToGVFS); Console.WriteLine("Settings.Default.PathToGVFSService: {0}", Settings.Default.PathToGVFSService); + if (GVFSTestConfig.IsDevMode) + { + Console.WriteLine("*** Dev mode enabled (GVFS_FUNCTIONAL_TEST_DEV_MODE=1) ***"); + } NUnitRunner runner = new NUnitRunner(args); runner.AddGlobalSetupIfNeeded("GVFS.FunctionalTests.GlobalSetup"); + if (runner.HasCustomArg("--debug")) + { + Debugger.Launch(); + } + if (runner.HasCustomArg("--no-shared-gvfs-cache")) { Console.WriteLine("Running without a shared git object cache"); @@ -133,11 +148,8 @@ private static void RunBeforeAnyTests() GVFSServiceProcess.InstallService(); - string serviceProgramDataDir = Path.Combine( - Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles, Environment.SpecialFolderOption.Create), - "GVFS", - "ProgramData", - "GVFS.Service"); + string serviceProgramDataDir = GVFSPlatform.Instance.GetSecureDataRootForGVFSComponent( + GVFSConstants.Service.ServiceName); string statusCacheVersionTokenPath = Path.Combine( serviceProgramDataDir, "EnableGitStatusCacheToken.dat"); diff --git a/GVFS/GVFS.FunctionalTests/Settings.cs b/GVFS/GVFS.FunctionalTests/Settings.cs index 1abc3b851..9a978d2cf 100644 --- a/GVFS/GVFS.FunctionalTests/Settings.cs +++ b/GVFS/GVFS.FunctionalTests/Settings.cs @@ -45,16 +45,30 @@ public static void Initialize() Commitish = @"FunctionalTests/20201014"; EnlistmentRoot = @"C:\Repos\GVFSFunctionalTests\enlistment"; - PathToGVFS = @"C:\Program Files\VFS for Git\GVFS.exe"; - PathToGit = @"C:\Program Files\Git\cmd\git.exe"; - PathToBash = @"C:\Program Files\Git\bin\bash.exe"; ControlGitRepoRoot = @"C:\Repos\GVFSFunctionalTests\ControlRepo"; FastFetchBaseRoot = @"C:\Repos\GVFSFunctionalTests\FastFetch"; FastFetchRoot = Path.Combine(FastFetchBaseRoot, "test"); FastFetchControl = Path.Combine(FastFetchBaseRoot, "control"); - PathToGVFSService = @"C:\Program Files\VFS for Git\GVFS.Service.exe"; BinaryFileNameExtension = ".exe"; + + string devModeOutDir = Environment.GetEnvironmentVariable("GVFS_DEV_OUT_DIR"); + if (!string.IsNullOrEmpty(devModeOutDir)) + { + string configuration = Environment.GetEnvironmentVariable("GVFS_DEV_CONFIGURATION") ?? "Debug"; + string payloadDir = Path.Combine(devModeOutDir, "GVFS.Payload", "bin", configuration, "win-x64"); + + PathToGVFS = Path.Combine(payloadDir, "gvfs.exe"); + PathToGVFSService = Path.Combine(payloadDir, "GVFS.Service.exe"); + } + else + { + PathToGVFS = @"C:\Program Files\VFS for Git\GVFS.exe"; + PathToGVFSService = @"C:\Program Files\VFS for Git\GVFS.Service.exe"; + } + + PathToGit = @"C:\Program Files\Git\cmd\git.exe"; + PathToBash = @"C:\Program Files\Git\bin\bash.exe"; } } } diff --git a/GVFS/GVFS.FunctionalTests/Tests/EnlistmentPerFixture/DehydrateTests.cs b/GVFS/GVFS.FunctionalTests/Tests/EnlistmentPerFixture/DehydrateTests.cs index f896825fe..e05277bf5 100644 --- a/GVFS/GVFS.FunctionalTests/Tests/EnlistmentPerFixture/DehydrateTests.cs +++ b/GVFS/GVFS.FunctionalTests/Tests/EnlistmentPerFixture/DehydrateTests.cs @@ -54,7 +54,19 @@ public void DehydrateShouldExitWithoutConfirm() [TestCase] public void DehydrateShouldSucceedInCommonCase() { - this.DehydrateShouldSucceed(new[] { "The repo was successfully dehydrated and remounted" }, confirm: true, noStatus: false); + this.DehydrateShouldSucceed(new[] { "folder dehydrate successful." }, confirm: true, noStatus: false); + } + + [TestCase] + public void FullDehydrateShouldExitWithoutConfirm() + { + this.DehydrateShouldSucceed(new[] { "To actually execute the dehydrate, run 'gvfs dehydrate --confirm --full'" }, confirm: false, noStatus: false, full: true); + } + + [TestCase] + public void FullDehydrateShouldSucceedInCommonCase() + { + this.DehydrateShouldSucceed(new[] { "The repo was successfully dehydrated and remounted" }, confirm: true, noStatus: false, full: true); } [TestCase] @@ -69,13 +81,13 @@ public void DehydrateShouldSucceedEvenIfObjectCacheIsDeleted() { this.Enlistment.UnmountGVFS(); RepositoryHelpers.DeleteTestDirectory(this.Enlistment.GetObjectRoot(this.fileSystem)); - this.DehydrateShouldSucceed(new[] { "The repo was successfully dehydrated and remounted" }, confirm: true, noStatus: true); + this.DehydrateShouldSucceed(new[] { "The repo was successfully dehydrated and remounted" }, confirm: true, noStatus: true, full: true); } [TestCase] public void DehydrateShouldBackupFiles() { - this.DehydrateShouldSucceed(new[] { "The repo was successfully dehydrated and remounted" }, confirm: true, noStatus: false); + this.DehydrateShouldSucceed(new[] { "The repo was successfully dehydrated and remounted" }, confirm: true, noStatus: false, full: true); string backupFolder = Path.Combine(this.Enlistment.EnlistmentRoot, "dehydrate_backup"); backupFolder.ShouldBeADirectory(this.fileSystem); string[] backupFolderItems = this.fileSystem.EnumerateDirectory(backupFolder).Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); @@ -112,7 +124,7 @@ public void DehydrateShouldFailIfLocalCacheNotInMetadata() GVFSHelpers.SaveDiskLayoutVersion(this.Enlistment.DotGVFSRoot, majorVersion, minorVersion); GVFSHelpers.SaveGitObjectsRoot(this.Enlistment.DotGVFSRoot, objectsRoot); - this.DehydrateShouldFail(new[] { "Failed to determine local cache path from repo metadata" }, noStatus: true); + this.DehydrateShouldFail(new[] { "Failed to determine local cache path from repo metadata" }, noStatus: true, full: true); this.fileSystem.DeleteFile(metadataPath); this.fileSystem.MoveFile(metadataBackupPath, metadataPath); @@ -136,7 +148,7 @@ public void DehydrateShouldFailIfGitObjectsRootNotInMetadata() GVFSHelpers.SaveDiskLayoutVersion(this.Enlistment.DotGVFSRoot, majorVersion, minorVersion); GVFSHelpers.SaveLocalCacheRoot(this.Enlistment.DotGVFSRoot, localCacheRoot); - this.DehydrateShouldFail(new[] { "Failed to determine git objects root from repo metadata" }, noStatus: true); + this.DehydrateShouldFail(new[] { "Failed to determine git objects root from repo metadata" }, noStatus: true, full: true); this.fileSystem.DeleteFile(metadataPath); this.fileSystem.MoveFile(metadataBackupPath, metadataPath); @@ -160,11 +172,11 @@ public void DehydrateShouldFailOnWrongDiskLayoutVersion() if (previousMajorVersionNum >= GVFSHelpers.GetCurrentDiskLayoutMinimumMajorVersion()) { GVFSHelpers.SaveDiskLayoutVersion(this.Enlistment.DotGVFSRoot, previousMajorVersionNum.ToString(), "0"); - this.DehydrateShouldFail(new[] { "disk layout version doesn't match current version" }, noStatus: true); + this.DehydrateShouldFail(new[] { "disk layout version doesn't match current version" }, noStatus: true, full: true); } GVFSHelpers.SaveDiskLayoutVersion(this.Enlistment.DotGVFSRoot, (majorVersionNum + 1).ToString(), "0"); - this.DehydrateShouldFail(new[] { "Changes to GVFS disk layout do not allow mounting after downgrade." }, noStatus: true); + this.DehydrateShouldFail(new[] { "Changes to GVFS disk layout do not allow mounting after downgrade." }, noStatus: true, full: true); GVFSHelpers.SaveDiskLayoutVersion(this.Enlistment.DotGVFSRoot, majorVersionNum.ToString(), minorVersionNum.ToString()); } @@ -558,9 +570,9 @@ private void CheckDehydratedFolderAfterUnmount(string path) } } - private void DehydrateShouldSucceed(string[] expectedInOutput, bool confirm, bool noStatus, params string[] foldersToDehydrate) + private void DehydrateShouldSucceed(string[] expectedInOutput, bool confirm, bool noStatus, bool full = false, params string[] foldersToDehydrate) { - ProcessResult result = this.RunDehydrateProcess(confirm, noStatus, foldersToDehydrate); + ProcessResult result = this.RunDehydrateProcess(confirm, noStatus, full, foldersToDehydrate); result.ExitCode.ShouldEqual(0, $"mount exit code was {result.ExitCode}. Output: {result.Output}"); if (result.Output.Contains("Failed to move the src folder: Access to the path")) @@ -572,14 +584,14 @@ private void DehydrateShouldSucceed(string[] expectedInOutput, bool confirm, boo result.Output.ShouldContain(expectedInOutput); } - private void DehydrateShouldFail(string[] expectedErrorMessages, bool noStatus, params string[] foldersToDehydrate) + private void DehydrateShouldFail(string[] expectedErrorMessages, bool noStatus, bool full = false, params string[] foldersToDehydrate) { - ProcessResult result = this.RunDehydrateProcess(confirm: true, noStatus: noStatus, foldersToDehydrate: foldersToDehydrate); + ProcessResult result = this.RunDehydrateProcess(confirm: true, noStatus: noStatus, full: full, foldersToDehydrate: foldersToDehydrate); result.ExitCode.ShouldEqual(GVFSGenericError, $"mount exit code was not {GVFSGenericError}"); result.Output.ShouldContain(expectedErrorMessages); } - private ProcessResult RunDehydrateProcess(bool confirm, bool noStatus, params string[] foldersToDehydrate) + private ProcessResult RunDehydrateProcess(bool confirm, bool noStatus, bool full = false, params string[] foldersToDehydrate) { string dehydrateFlags = string.Empty; if (confirm) @@ -592,6 +604,11 @@ private ProcessResult RunDehydrateProcess(bool confirm, bool noStatus, params st dehydrateFlags += " --no-status "; } + if (full) + { + dehydrateFlags += " --full "; + } + if (foldersToDehydrate.Length > 0) { dehydrateFlags += $" --folders {string.Join(";", foldersToDehydrate)}"; diff --git a/GVFS/GVFS.FunctionalTests/Tests/EnlistmentPerFixture/GitBlockCommandsTests.cs b/GVFS/GVFS.FunctionalTests/Tests/EnlistmentPerFixture/GitBlockCommandsTests.cs index 774f9be0b..d0660205c 100644 --- a/GVFS/GVFS.FunctionalTests/Tests/EnlistmentPerFixture/GitBlockCommandsTests.cs +++ b/GVFS/GVFS.FunctionalTests/Tests/EnlistmentPerFixture/GitBlockCommandsTests.cs @@ -23,7 +23,7 @@ public void GitBlockCommands() this.CommandBlocked("update-index --skip-worktree"); this.CommandBlocked("update-index --no-skip-worktree"); this.CommandBlocked("update-index --split-index"); - this.CommandBlocked("worktree list"); + this.CommandNotBlocked("worktree list"); } private void CommandBlocked(string command) diff --git a/GVFS/GVFS.FunctionalTests/Tests/EnlistmentPerFixture/WorktreeTests.cs b/GVFS/GVFS.FunctionalTests/Tests/EnlistmentPerFixture/WorktreeTests.cs new file mode 100644 index 000000000..376796350 --- /dev/null +++ b/GVFS/GVFS.FunctionalTests/Tests/EnlistmentPerFixture/WorktreeTests.cs @@ -0,0 +1,159 @@ +using GVFS.FunctionalTests.Tools; +using GVFS.Tests.Should; +using NUnit.Framework; +using System; +using System.Diagnostics; +using System.IO; + +namespace GVFS.FunctionalTests.Tests.EnlistmentPerFixture +{ + [TestFixture] + [Category(Categories.GitCommands)] + public class WorktreeTests : TestsWithEnlistmentPerFixture + { + private const string WorktreeBranchA = "worktree-test-branch-a"; + private const string WorktreeBranchB = "worktree-test-branch-b"; + + [TestCase] + public void ConcurrentWorktreeAddCommitRemove() + { + string worktreePathA = Path.Combine(this.Enlistment.EnlistmentRoot, "test-wt-a-" + Guid.NewGuid().ToString("N").Substring(0, 8)); + string worktreePathB = Path.Combine(this.Enlistment.EnlistmentRoot, "test-wt-b-" + Guid.NewGuid().ToString("N").Substring(0, 8)); + + try + { + // 1. Create both worktrees in parallel + ProcessResult addResultA = null; + ProcessResult addResultB = null; + System.Threading.Tasks.Parallel.Invoke( + () => addResultA = GitHelpers.InvokeGitAgainstGVFSRepo( + this.Enlistment.RepoRoot, + $"worktree add -b {WorktreeBranchA} \"{worktreePathA}\""), + () => addResultB = GitHelpers.InvokeGitAgainstGVFSRepo( + this.Enlistment.RepoRoot, + $"worktree add -b {WorktreeBranchB} \"{worktreePathB}\"")); + + addResultA.ExitCode.ShouldEqual(0, $"worktree add A failed: {addResultA.Errors}"); + addResultB.ExitCode.ShouldEqual(0, $"worktree add B failed: {addResultB.Errors}"); + + // 2. Verify both have projected files + Directory.Exists(worktreePathA).ShouldBeTrue("Worktree A directory should exist"); + Directory.Exists(worktreePathB).ShouldBeTrue("Worktree B directory should exist"); + File.Exists(Path.Combine(worktreePathA, "Readme.md")).ShouldBeTrue("Readme.md should be projected in A"); + File.Exists(Path.Combine(worktreePathB, "Readme.md")).ShouldBeTrue("Readme.md should be projected in B"); + + // 3. Verify git status is clean in both + ProcessResult statusA = GitHelpers.InvokeGitAgainstGVFSRepo(worktreePathA, "status --porcelain"); + ProcessResult statusB = GitHelpers.InvokeGitAgainstGVFSRepo(worktreePathB, "status --porcelain"); + statusA.ExitCode.ShouldEqual(0, $"git status A failed: {statusA.Errors}"); + statusB.ExitCode.ShouldEqual(0, $"git status B failed: {statusB.Errors}"); + statusA.Output.Trim().ShouldBeEmpty("Worktree A should have clean status"); + statusB.Output.Trim().ShouldBeEmpty("Worktree B should have clean status"); + + // 4. Verify worktree list shows all three + ProcessResult listResult = GitHelpers.InvokeGitAgainstGVFSRepo( + this.Enlistment.RepoRoot, "worktree list"); + listResult.ExitCode.ShouldEqual(0, $"worktree list failed: {listResult.Errors}"); + string listOutput = listResult.Output; + Assert.IsTrue(listOutput.Contains(worktreePathA.Replace('\\', '/')), + $"worktree list should contain A. Output: {listOutput}"); + Assert.IsTrue(listOutput.Contains(worktreePathB.Replace('\\', '/')), + $"worktree list should contain B. Output: {listOutput}"); + + // 5. Make commits in both worktrees + File.WriteAllText(Path.Combine(worktreePathA, "from-a.txt"), "created in worktree A"); + GitHelpers.InvokeGitAgainstGVFSRepo(worktreePathA, "add from-a.txt") + .ExitCode.ShouldEqual(0); + GitHelpers.InvokeGitAgainstGVFSRepo(worktreePathA, "commit -m \"commit from A\"") + .ExitCode.ShouldEqual(0); + + File.WriteAllText(Path.Combine(worktreePathB, "from-b.txt"), "created in worktree B"); + GitHelpers.InvokeGitAgainstGVFSRepo(worktreePathB, "add from-b.txt") + .ExitCode.ShouldEqual(0); + GitHelpers.InvokeGitAgainstGVFSRepo(worktreePathB, "commit -m \"commit from B\"") + .ExitCode.ShouldEqual(0); + + // 6. Verify commits are visible from all worktrees (shared objects) + GitHelpers.InvokeGitAgainstGVFSRepo(this.Enlistment.RepoRoot, $"log -1 --format=%s {WorktreeBranchA}") + .Output.ShouldContain(expectedSubstrings: new[] { "commit from A" }); + GitHelpers.InvokeGitAgainstGVFSRepo(this.Enlistment.RepoRoot, $"log -1 --format=%s {WorktreeBranchB}") + .Output.ShouldContain(expectedSubstrings: new[] { "commit from B" }); + + // A can see B's commit and vice versa + GitHelpers.InvokeGitAgainstGVFSRepo(worktreePathA, $"log -1 --format=%s {WorktreeBranchB}") + .Output.ShouldContain(expectedSubstrings: new[] { "commit from B" }); + GitHelpers.InvokeGitAgainstGVFSRepo(worktreePathB, $"log -1 --format=%s {WorktreeBranchA}") + .Output.ShouldContain(expectedSubstrings: new[] { "commit from A" }); + + // 7. Remove both in parallel + ProcessResult removeA = null; + ProcessResult removeB = null; + System.Threading.Tasks.Parallel.Invoke( + () => removeA = GitHelpers.InvokeGitAgainstGVFSRepo( + this.Enlistment.RepoRoot, + $"worktree remove --force \"{worktreePathA}\""), + () => removeB = GitHelpers.InvokeGitAgainstGVFSRepo( + this.Enlistment.RepoRoot, + $"worktree remove --force \"{worktreePathB}\"")); + + removeA.ExitCode.ShouldEqual(0, $"worktree remove A failed: {removeA.Errors}"); + removeB.ExitCode.ShouldEqual(0, $"worktree remove B failed: {removeB.Errors}"); + + // 8. Verify cleanup + Directory.Exists(worktreePathA).ShouldBeFalse("Worktree A directory should be deleted"); + Directory.Exists(worktreePathB).ShouldBeFalse("Worktree B directory should be deleted"); + } + finally + { + this.ForceCleanupWorktree(worktreePathA, WorktreeBranchA); + this.ForceCleanupWorktree(worktreePathB, WorktreeBranchB); + } + } + + private void ForceCleanupWorktree(string worktreePath, string branchName) + { + // Best-effort cleanup for test failure cases + try + { + GitHelpers.InvokeGitAgainstGVFSRepo( + this.Enlistment.RepoRoot, + $"worktree remove --force \"{worktreePath}\""); + } + catch + { + } + + if (Directory.Exists(worktreePath)) + { + try + { + // Unmount any running GVFS mount for this worktree + Process unmount = Process.Start("gvfs", $"unmount \"{worktreePath}\""); + unmount?.WaitForExit(30000); + } + catch + { + } + + try + { + Directory.Delete(worktreePath, recursive: true); + } + catch + { + } + } + + // Clean up branch + try + { + GitHelpers.InvokeGitAgainstGVFSRepo( + this.Enlistment.RepoRoot, + $"branch -D {branchName}"); + } + catch + { + } + } + } +} diff --git a/GVFS/GVFS.FunctionalTests/Tests/GitCommands/CorruptionReproTests.cs b/GVFS/GVFS.FunctionalTests/Tests/GitCommands/CorruptionReproTests.cs new file mode 100644 index 000000000..9e7eec0bb --- /dev/null +++ b/GVFS/GVFS.FunctionalTests/Tests/GitCommands/CorruptionReproTests.cs @@ -0,0 +1,98 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using GVFS.Common; +using GVFS.FunctionalTests.Properties; +using GVFS.FunctionalTests.Tests.EnlistmentPerTestCase; +using NUnit.Framework; + +namespace GVFS.FunctionalTests.Tests.GitCommands +{ + /// + /// This class is used to reproduce corruption scenarios in the GVFS virtual projection. + /// + [Category(Categories.GitCommands)] + [TestFixtureSource(typeof(GitRepoTests), nameof(GitRepoTests.ValidateWorkingTree))] + public class CorruptionReproTests : GitRepoTests + { + public CorruptionReproTests(Settings.ValidateWorkingTreeMode validateWorkingTree) + : base(enlistmentPerTest: true, validateWorkingTree: validateWorkingTree) + { + } + + [TestCase] + public void ReproCherryPickRestoreCorruption() + { + // Reproduces a corruption scenario where git commands (like cherry-pick -n) + // stage changes directly, bypassing the filesystem. In VFS mode, these staged + // files have skip-worktree set and are not in the ModifiedPaths database. + // Without the fix, a subsequent "restore --staged" would fail to properly + // unstage them, leaving the index and projection in an inconsistent state. + // + // See https://github.com/microsoft/VFSForGit/issues/1855 + + // Based on FunctionalTests/20170206_Conflict_Source + const string CherryPickCommit = "51d15f7584e81d59d44c1511ce17d7c493903390"; + const string StartingCommit = "db95d631e379d366d26d899523f8136a77441914"; + + this.ControlGitRepo.Fetch(StartingCommit); + this.ControlGitRepo.Fetch(CherryPickCommit); + + this.ValidateGitCommand($"checkout -b FunctionalTests/CherryPickRestoreCorruptionRepro {StartingCommit}"); + + // Cherry-pick stages adds, deletes, and modifications without committing. + // In VFS mode, these changes are made directly by git in the index — they + // are not in ModifiedPaths, so all affected files still have skip-worktree set. + this.ValidateGitCommand($"cherry-pick -n {CherryPickCommit}"); + + // Restore --staged for a single file first. This verifies that only the + // targeted file is added to ModifiedPaths, not all staged files (important + // for performance when there are many staged files, e.g. during merge + // conflict resolution). + // + // Before the fix: added files with skip-worktree would be skipped by + // restore --staged, remaining stuck as staged in the index. + this.ValidateGitCommand("restore --staged Test_ConflictTests/AddedFiles/AddedBySource.txt"); + + // Restore --staged for everything remaining. Before the fix: + // - Modified files: restored in the index but invisible to git status + // because skip-worktree was set and the file wasn't in ModifiedPaths, + // so git never checked the working tree against the index. + // - Deleted files: same issue — deletions became invisible. + // - Added files: remained stuck as staged because restore --staged + // skipped them (skip-worktree set), and their ProjFS placeholders + // would later vanish when the projection reverted to HEAD. + this.ValidateGitCommand("restore --staged ."); + + // Restore the working directory. Before the fix, this step would + // silently succeed but leave corrupted state: modified/deleted files + // had stale projected content that didn't match HEAD, and added files + // (as ProjFS placeholders) would vanish entirely since they're not in + // HEAD's tree. + this.ValidateGitCommand("restore -- ."); + this.FilesShouldMatchCheckoutOfSourceBranch(); + } + + /// + /// Reproduction of a reported issue: + /// Restoring a file after its parent directory was deleted fails with + /// "fatal: could not unlink 'path\to\': Directory not empty" + /// + /// See https://github.com/microsoft/VFSForGit/issues/1901 + /// + [TestCase] + public void RestoreAfterDeleteNesteredDirectory() + { + // Delete a directory with nested subdirectories and files. + this.ValidateNonGitCommand("cmd.exe", "/c \"rmdir /s /q GVFlt_DeleteFileTest\""); + + // Restore the working directory. + this.ValidateGitCommand("restore ."); + + this.FilesShouldMatchCheckoutOfSourceBranch(); + } + } +} diff --git a/GVFS/GVFS.FunctionalTests/Tests/GitCommands/GitRepoTests.cs b/GVFS/GVFS.FunctionalTests/Tests/GitCommands/GitRepoTests.cs index 2b902117f..d7a22fa28 100644 --- a/GVFS/GVFS.FunctionalTests/Tests/GitCommands/GitRepoTests.cs +++ b/GVFS/GVFS.FunctionalTests/Tests/GitCommands/GitRepoTests.cs @@ -269,6 +269,23 @@ protected void ValidateGitCommand(string command, params object[] args) args); } + protected void ValidateNonGitCommand(string command, string args = "", bool ignoreErrors = false, bool checkStatus = true) + { + string controlRepoRoot = this.ControlGitRepo.RootPath; + string gvfsRepoRoot = this.Enlistment.RepoRoot; + + ProcessResult expectedResult = ProcessHelper.Run(command, args, controlRepoRoot); + ProcessResult actualResult = ProcessHelper.Run(command, args, gvfsRepoRoot); + if (!ignoreErrors) + { + GitHelpers.ErrorsShouldMatch(command, expectedResult, actualResult); + } + if (checkStatus) + { + this.ValidateGitCommand("status"); + } + } + protected void ChangeMode(string filePath, ushort mode) { string virtualFile = Path.Combine(this.Enlistment.RepoRoot, filePath); diff --git a/GVFS/GVFS.FunctionalTests/Tools/ControlGitRepo.cs b/GVFS/GVFS.FunctionalTests/Tools/ControlGitRepo.cs index e8c40099d..807c09efd 100644 --- a/GVFS/GVFS.FunctionalTests/Tools/ControlGitRepo.cs +++ b/GVFS/GVFS.FunctionalTests/Tools/ControlGitRepo.cs @@ -55,6 +55,7 @@ public void Initialize() GitProcess.Invoke(this.RootPath, "config merge.renames false"); GitProcess.Invoke(this.RootPath, "config advice.statusUoption false"); GitProcess.Invoke(this.RootPath, "config core.abbrev 40"); + GitProcess.Invoke(this.RootPath, "config checkout.workers 0"); GitProcess.Invoke(this.RootPath, "config core.useBuiltinFSMonitor false"); GitProcess.Invoke(this.RootPath, "config pack.useSparse true"); GitProcess.Invoke(this.RootPath, "config reset.quiet true"); diff --git a/GVFS/GVFS.FunctionalTests/Tools/GVFSServiceProcess.cs b/GVFS/GVFS.FunctionalTests/Tools/GVFSServiceProcess.cs index d27925e86..2ac384629 100644 --- a/GVFS/GVFS.FunctionalTests/Tools/GVFSServiceProcess.cs +++ b/GVFS/GVFS.FunctionalTests/Tools/GVFSServiceProcess.cs @@ -12,20 +12,157 @@ namespace GVFS.FunctionalTests.Tools public static class GVFSServiceProcess { private static readonly string ServiceNameArgument = "--servicename=" + TestServiceName; + private static Process consoleServiceProcess; public static string TestServiceName { get { - return "Test.GVFS.Service"; + string name = Environment.GetEnvironmentVariable("GVFS_TEST_SERVICE_NAME"); + return string.IsNullOrWhiteSpace(name) ? "Test.GVFS.Service" : name; } } public static void InstallService() + { + if (GVFSTestConfig.IsDevMode) + { + StartServiceAsConsoleProcess(); + } + else + { + InstallWindowsService(); + } + } + + public static void UninstallService() + { + if (GVFSTestConfig.IsDevMode) + { + StopConsoleServiceProcess(); + CleanupServiceData(); + } + else + { + UninstallWindowsService(); + } + } + + public static void StartService() + { + if (GVFSTestConfig.IsDevMode) + { + StartServiceAsConsoleProcess(); + } + else + { + StartWindowsService(); + } + } + + public static void StopService() + { + if (GVFSTestConfig.IsDevMode) + { + StopConsoleServiceProcess(); + } + else + { + StopWindowsService(); + } + } + + private static void StartServiceAsConsoleProcess() + { + StopConsoleServiceProcess(); + + string pathToService = GetPathToService(); + Console.WriteLine("Starting test service in console mode: " + pathToService); + + ProcessStartInfo startInfo = new ProcessStartInfo(pathToService); + startInfo.Arguments = $"--console {ServiceNameArgument}"; + startInfo.UseShellExecute = false; + startInfo.CreateNoWindow = true; + startInfo.RedirectStandardOutput = true; + startInfo.RedirectStandardError = true; + + consoleServiceProcess = Process.Start(startInfo); + consoleServiceProcess.ShouldNotBeNull("Failed to start test service process"); + + // Consume output asynchronously to prevent buffer deadlock + consoleServiceProcess.BeginOutputReadLine(); + consoleServiceProcess.BeginErrorReadLine(); + + // Wait for the service to start listening on its named pipe + string pipeName = TestServiceName + ".pipe"; + int retries = 50; + while (retries-- > 0) + { + if (consoleServiceProcess.HasExited) + { + throw new InvalidOperationException( + $"Test service process exited with code {consoleServiceProcess.ExitCode} before becoming ready"); + } + + if (File.Exists(@"\\.\pipe\" + pipeName)) + { + Console.WriteLine("Test service is ready (pipe: " + pipeName + ")"); + return; + } + + Thread.Sleep(200); + } + + throw new System.TimeoutException("Timed out waiting for test service pipe: " + pipeName); + } + + private static void StopConsoleServiceProcess() + { + if (consoleServiceProcess != null && !consoleServiceProcess.HasExited) + { + try + { + Console.WriteLine("Stopping test service console process (PID: " + consoleServiceProcess.Id + ")"); + consoleServiceProcess.Kill(); + consoleServiceProcess.WaitForExit(5000); + } + catch (InvalidOperationException) + { + // Process already exited + } + + consoleServiceProcess = null; + } + } + + private static void CleanupServiceData() + { + string commonAppDataRoot = Environment.GetEnvironmentVariable("GVFS_COMMON_APPDATA_ROOT"); + string serviceData; + if (!string.IsNullOrEmpty(commonAppDataRoot)) + { + serviceData = Path.Combine(commonAppDataRoot, TestServiceName); + } + else + { + serviceData = Path.Combine( + Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), + "GVFS", + TestServiceName); + } + + DirectoryInfo serviceDataDir = new DirectoryInfo(serviceData); + if (serviceDataDir.Exists) + { + serviceDataDir.Delete(true); + } + } + + private static void InstallWindowsService() { Console.WriteLine("Installing " + TestServiceName); - UninstallService(); + UninstallWindowsService(); // Wait for delete to complete. If the services control panel is open, this will never complete. while (RunScCommand("query", TestServiceName).ExitCode == 0) @@ -47,12 +184,12 @@ public static void InstallService() ProcessResult result = RunScCommand("create", createServiceArguments); result.ExitCode.ShouldEqual(0, "Failure while running sc create " + createServiceArguments + "\r\n" + result.Output); - StartService(); + StartWindowsService(); } - public static void UninstallService() + private static void UninstallWindowsService() { - StopService(); + StopWindowsService(); RunScCommand("delete", TestServiceName); @@ -65,7 +202,7 @@ public static void UninstallService() } } - public static void StartService() + private static void StartWindowsService() { ServiceController testService = ServiceController.GetServices().SingleOrDefault(service => service.ServiceName == TestServiceName); testService.ShouldNotBeNull($"{TestServiceName} does not exist as a service"); @@ -78,7 +215,7 @@ public static void StartService() } } - public static void StopService() + private static void StopWindowsService() { try { diff --git a/GVFS/GVFS.FunctionalTests/Tools/ProcessHelper.cs b/GVFS/GVFS.FunctionalTests/Tools/ProcessHelper.cs index 664c1e254..539c5cc82 100644 --- a/GVFS/GVFS.FunctionalTests/Tools/ProcessHelper.cs +++ b/GVFS/GVFS.FunctionalTests/Tools/ProcessHelper.cs @@ -6,6 +6,11 @@ namespace GVFS.FunctionalTests.Tools public static class ProcessHelper { public static ProcessResult Run(string fileName, string arguments) + { + return Run(fileName, arguments, null); + } + + public static ProcessResult Run(string fileName, string arguments, string workingDirectory) { ProcessStartInfo startInfo = new ProcessStartInfo(); startInfo.UseShellExecute = false; @@ -14,6 +19,10 @@ public static ProcessResult Run(string fileName, string arguments) startInfo.CreateNoWindow = true; startInfo.FileName = fileName; startInfo.Arguments = arguments; + if (!string.IsNullOrEmpty(workingDirectory)) + { + startInfo.WorkingDirectory = workingDirectory; + } return Run(startInfo); } diff --git a/GVFS/GVFS.Hooks/GVFS.Hooks.csproj b/GVFS/GVFS.Hooks/GVFS.Hooks.csproj index 23dd6ea97..f5cd8a1eb 100644 --- a/GVFS/GVFS.Hooks/GVFS.Hooks.csproj +++ b/GVFS/GVFS.Hooks/GVFS.Hooks.csproj @@ -3,8 +3,13 @@ Exe net471 + true + + + + + + + + + + + + + diff --git a/GVFS/GVFS.Hooks/Program.Unstage.cs b/GVFS/GVFS.Hooks/Program.Unstage.cs new file mode 100644 index 000000000..bebe9b9a9 --- /dev/null +++ b/GVFS/GVFS.Hooks/Program.Unstage.cs @@ -0,0 +1,106 @@ +using GVFS.Common.NamedPipes; +using System; + +namespace GVFS.Hooks +{ + /// + /// Partial class for unstage-related pre-command handling. + /// Detects "restore --staged" and "checkout HEAD --" operations and sends + /// a PrepareForUnstage message to the GVFS mount process so it can add + /// staged files to ModifiedPaths before git clears skip-worktree. + /// + public partial class Program + { + /// + /// Sends a PrepareForUnstage message to the GVFS mount process, which will + /// add staged files matching the pathspec to ModifiedPaths so that git will + /// clear skip-worktree and process them. + /// + private static void SendPrepareForUnstageMessage(string command, string[] args) + { + UnstageCommandParser.PathspecResult pathspecResult = UnstageCommandParser.GetRestorePathspec(command, args); + + if (pathspecResult.Failed) + { + ExitWithError( + "VFS for Git was unable to determine the pathspecs for this unstage operation.", + "This can happen when --pathspec-from-file=- (stdin) is used.", + "", + "Instead, pass the paths directly on the command line:", + " git restore --staged ..."); + return; + } + + // Build the message body. Format: + // null/empty → all staged files (no pathspec) + // "path1\0path2" → inline pathspecs (null-separated) + // "\nF\n" → --pathspec-from-file (mount forwards to git) + // "\nFZ\n" → --pathspec-from-file with --pathspec-file-nul + // The leading \n distinguishes file-reference bodies from inline pathspecs. + string body; + if (pathspecResult.PathspecFromFile != null) + { + string prefix = pathspecResult.PathspecFileNul ? "\nFZ\n" : "\nF\n"; + body = prefix + pathspecResult.PathspecFromFile; + + // If there are also inline pathspecs, append them after another \n + if (!string.IsNullOrEmpty(pathspecResult.InlinePathspecs)) + { + body += "\n" + pathspecResult.InlinePathspecs; + } + } + else + { + body = pathspecResult.InlinePathspecs; + } + + string message = string.IsNullOrEmpty(body) + ? NamedPipeMessages.PrepareForUnstage.Request + : NamedPipeMessages.PrepareForUnstage.Request + "|" + body; + + bool succeeded = false; + string failureMessage = null; + + try + { + using (NamedPipeClient pipeClient = new NamedPipeClient(enlistmentPipename)) + { + if (pipeClient.Connect()) + { + pipeClient.SendRequest(message); + string rawResponse = pipeClient.ReadRawResponse(); + if (rawResponse != null && rawResponse.StartsWith(NamedPipeMessages.PrepareForUnstage.SuccessResult)) + { + succeeded = true; + } + else + { + failureMessage = "GVFS mount process returned failure for PrepareForUnstage."; + } + } + else + { + failureMessage = "Unable to connect to GVFS mount process."; + } + } + } + catch (Exception e) + { + failureMessage = "Exception communicating with GVFS: " + e.Message; + } + + if (!succeeded && failureMessage != null) + { + ExitWithError( + failureMessage, + "The unstage operation cannot safely proceed because GVFS was unable to", + "prepare the staged files. This could lead to index corruption.", + "", + "To resolve:", + " 1. Run 'gvfs unmount' and 'gvfs mount' to reset the GVFS state", + " 2. Retry the restore --staged command", + "If the problem persists, run 'gvfs repair' or re-clone the enlistment."); + } + } + } +} diff --git a/GVFS/GVFS.Hooks/Program.Worktree.cs b/GVFS/GVFS.Hooks/Program.Worktree.cs new file mode 100644 index 000000000..325532a37 --- /dev/null +++ b/GVFS/GVFS.Hooks/Program.Worktree.cs @@ -0,0 +1,390 @@ +using GVFS.Common; +using GVFS.Common.NamedPipes; +using GVFS.Hooks.HooksPlatform; +using System; +using System.IO; +using System.Linq; + +namespace GVFS.Hooks +{ + public partial class Program + { + private static string GetWorktreeSubcommand(string[] args) + { + return WorktreeCommandParser.GetSubcommand(args); + } + + /// + /// Gets a positional argument from git worktree subcommand args. + /// For 'add': git worktree add [options] <path> [<commit-ish>] + /// For 'remove': git worktree remove [options] <worktree> + /// For 'move': git worktree move [options] <worktree> <new-path> + /// + private static string GetWorktreePositionalArg(string[] args, int positionalIndex) + { + return WorktreeCommandParser.GetPositionalArg(args, positionalIndex); + } + + private static string GetWorktreePathArg(string[] args) + { + return WorktreeCommandParser.GetPathArg(args); + } + + private static void RunWorktreePreCommand(string[] args) + { + string subcommand = GetWorktreeSubcommand(args); + switch (subcommand) + { + case "add": + BlockNestedWorktreeAdd(args); + break; + case "remove": + HandleWorktreeRemove(args); + break; + case "move": + // Unmount at old location before git moves the directory + UnmountWorktreeByArg(args); + break; + } + } + + private static void RunWorktreePostCommand(string[] args) + { + string subcommand = GetWorktreeSubcommand(args); + switch (subcommand) + { + case "add": + MountNewWorktree(args); + break; + case "remove": + RemountWorktreeIfRemoveFailed(args); + CleanupSkipCleanCheckMarker(args); + break; + case "move": + // Mount at the new location after git moved the directory + MountMovedWorktree(args); + break; + } + } + + private static void UnmountWorktreeByArg(string[] args) + { + string worktreePath = GetWorktreePathArg(args); + if (string.IsNullOrEmpty(worktreePath)) + { + return; + } + + string fullPath = ResolvePath(worktreePath); + if (!UnmountWorktree(fullPath)) + { + Console.Error.WriteLine( + $"error: failed to unmount worktree '{fullPath}'. Cannot proceed with move."); + Environment.Exit(1); + } + } + + /// + /// If the worktree directory and its .git file both still exist after + /// git worktree remove, the removal failed completely. Remount ProjFS + /// so the worktree remains usable. If the remove partially succeeded + /// (e.g., .git file or gitdir removed), don't attempt recovery. + /// + private static void RemountWorktreeIfRemoveFailed(string[] args) + { + string worktreePath = GetWorktreePathArg(args); + if (string.IsNullOrEmpty(worktreePath)) + { + return; + } + + string fullPath = ResolvePath(worktreePath); + string dotGitFile = Path.Combine(fullPath, ".git"); + if (Directory.Exists(fullPath) && File.Exists(dotGitFile)) + { + ProcessHelper.Run("gvfs", $"mount \"{fullPath}\"", redirectOutput: false); + } + } + + /// + /// Remove the skip-clean-check marker if it still exists after + /// worktree remove completes (e.g., if the remove failed and the + /// worktree gitdir was not deleted). + /// + private static void CleanupSkipCleanCheckMarker(string[] args) + { + string worktreePath = GetWorktreePathArg(args); + if (string.IsNullOrEmpty(worktreePath)) + { + return; + } + + string fullPath = ResolvePath(worktreePath); + GVFSEnlistment.WorktreeInfo wtInfo = GVFSEnlistment.TryGetWorktreeInfo(fullPath); + if (wtInfo != null) + { + string markerPath = Path.Combine(wtInfo.WorktreeGitDir, GVFSConstants.DotGit.SkipCleanCheckName); + if (File.Exists(markerPath)) + { + File.Delete(markerPath); + } + } + } + + /// + /// Block creating a worktree inside the primary VFS working directory + /// or inside any other existing worktree. + /// ProjFS cannot handle nested virtualization roots. + /// + private static void BlockNestedWorktreeAdd(string[] args) + { + string worktreePath = GetWorktreePathArg(args); + if (string.IsNullOrEmpty(worktreePath)) + { + return; + } + + string fullPath = ResolvePath(worktreePath); + string primaryWorkingDir = Path.Combine(enlistmentRoot, GVFSConstants.WorkingDirectoryRootName); + + if (GVFSEnlistment.IsPathInsideDirectory(fullPath, primaryWorkingDir)) + { + Console.Error.WriteLine( + $"error: cannot create worktree inside the VFS working directory.\n" + + $"Create the worktree outside of '{primaryWorkingDir}'."); + Environment.Exit(1); + } + + string gitDir = Path.Combine(primaryWorkingDir, ".git"); + foreach (string existingWorktreePath in GVFSEnlistment.GetKnownWorktreePaths(gitDir)) + { + if (GVFSEnlistment.IsPathInsideDirectory(fullPath, existingWorktreePath)) + { + Console.Error.WriteLine( + $"error: cannot create worktree inside an existing worktree.\n" + + $"'{fullPath}' is inside worktree '{existingWorktreePath}'."); + Environment.Exit(1); + } + } + } + + private static void HandleWorktreeRemove(string[] args) + { + string worktreePath = GetWorktreePathArg(args); + if (string.IsNullOrEmpty(worktreePath)) + { + return; + } + + string fullPath = ResolvePath(worktreePath); + GVFSEnlistment.WorktreeInfo wtInfo = GVFSEnlistment.TryGetWorktreeInfo(fullPath); + + bool hasForce = args.Any(a => + a.Equals("--force", StringComparison.OrdinalIgnoreCase) || + a.Equals("-f", StringComparison.OrdinalIgnoreCase)); + + // Check if the worktree's GVFS mount is running by probing the pipe. + bool isMounted = false; + if (wtInfo != null) + { + string pipeName = GVFSHooksPlatform.GetNamedPipeName(enlistmentRoot) + wtInfo.PipeSuffix; + using (NamedPipeClient pipeClient = new NamedPipeClient(pipeName)) + { + isMounted = pipeClient.Connect(500); + } + } + + if (!hasForce) + { + if (!isMounted) + { + Console.Error.WriteLine( + $"error: worktree '{fullPath}' is not mounted.\n" + + $"Mount it with 'gvfs mount \"{fullPath}\"' or use 'git worktree remove --force'."); + Environment.Exit(1); + } + + // Check for uncommitted changes while ProjFS is still mounted. + ProcessResult statusResult = ProcessHelper.Run( + "git", + $"-C \"{fullPath}\" status --porcelain", + redirectOutput: true); + + if (!string.IsNullOrWhiteSpace(statusResult.Output)) + { + Console.Error.WriteLine( + $"error: worktree '{fullPath}' has uncommitted changes.\n" + + $"Use 'git worktree remove --force' to remove it anyway."); + Environment.Exit(1); + } + } + else if (!isMounted) + { + // Force remove of unmounted worktree — nothing to unmount. + return; + } + + // Write a marker in the worktree gitdir that tells git.exe + // to skip the cleanliness check during worktree remove. + // We already did our own check above while ProjFS was alive. + string skipCleanCheck = Path.Combine(wtInfo.WorktreeGitDir, GVFSConstants.DotGit.SkipCleanCheckName); + File.WriteAllText(skipCleanCheck, string.Empty); + + // Unmount ProjFS before git deletes the worktree directory. + if (!UnmountWorktree(fullPath, wtInfo) && !hasForce) + { + Console.Error.WriteLine( + $"error: failed to unmount worktree '{fullPath}'.\n" + + $"Use 'git worktree remove --force' to attempt removal anyway."); + Environment.Exit(1); + } + } + + private static bool UnmountWorktree(string fullPath) + { + GVFSEnlistment.WorktreeInfo wtInfo = GVFSEnlistment.TryGetWorktreeInfo(fullPath); + if (wtInfo == null) + { + return false; + } + + return UnmountWorktree(fullPath, wtInfo); + } + + private static bool UnmountWorktree(string fullPath, GVFSEnlistment.WorktreeInfo wtInfo) + { + ProcessResult result = ProcessHelper.Run("gvfs", $"unmount \"{fullPath}\"", redirectOutput: false); + + // After gvfs unmount exits, ProjFS handles may still be closing. + // Wait briefly to allow the OS to release all handles before git + // attempts to delete the worktree directory. + System.Threading.Thread.Sleep(200); + + return result.ExitCode == 0; + } + + private static void MountNewWorktree(string[] args) + { + string worktreePath = GetWorktreePathArg(args); + if (string.IsNullOrEmpty(worktreePath)) + { + return; + } + + string fullPath = ResolvePath(worktreePath); + + // Verify worktree was created (check for .git file) + string dotGitFile = Path.Combine(fullPath, ".git"); + if (File.Exists(dotGitFile)) + { + string worktreeError; + GVFSEnlistment.WorktreeInfo wtInfo = GVFSEnlistment.TryGetWorktreeInfo(fullPath, out worktreeError); + if (worktreeError != null) + { + Console.Error.WriteLine($"warning: failed to read worktree info for '{fullPath}': {worktreeError}"); + } + + // Store the primary enlistment root so mount/unmount can find + // it without deriving from path structure assumptions. + if (wtInfo?.WorktreeGitDir != null) + { + string markerPath = Path.Combine( + wtInfo.WorktreeGitDir, + GVFSEnlistment.WorktreeInfo.EnlistmentRootFileName); + File.WriteAllText(markerPath, enlistmentRoot); + } + + // Copy the primary's index to the worktree before checkout. + // The primary index has all entries with correct skip-worktree + // bits. If the worktree targets the same commit, checkout is + // a no-op. If a different commit, git does an incremental + // update — much faster than building 2.5M entries from scratch. + if (wtInfo?.SharedGitDir != null) + { + string primaryIndex = Path.Combine(wtInfo.SharedGitDir, "index"); + string worktreeIndex = Path.Combine(wtInfo.WorktreeGitDir, "index"); + if (File.Exists(primaryIndex) && !File.Exists(worktreeIndex)) + { + // Copy to a temp file first, then rename atomically. + // The primary index may be updated concurrently by the + // running mount; a direct copy risks a torn read on + // large indexes (200MB+ in some large repos). + // Note: mirrors PhysicalFileSystem.TryCopyToTempFileAndRename + // but that method requires GVFSPlatform which is not + // available in the hooks process. + string tempIndex = worktreeIndex + ".tmp"; + try + { + File.Copy(primaryIndex, tempIndex, overwrite: true); + File.Move(tempIndex, worktreeIndex); + } + catch + { + try { File.Delete(tempIndex); } catch { } + throw; + } + } + } + + // Run checkout to reconcile the index with the worktree's HEAD. + // With a pre-populated index this is fast (incremental diff). + // Override core.virtualfilesystem with an empty script that + // returns .gitattributes so it gets materialized while all + // other entries keep skip-worktree set. + // + // Disable hooks via core.hookspath — the worktree's GVFS mount + // doesn't exist yet, so post-index-change would fail trying + // to connect to a pipe that hasn't been created. + string emptyVfsHook = Path.Combine(fullPath, ".vfs-empty-hook"); + try + { + File.WriteAllText(emptyVfsHook, "#!/bin/sh\nprintf \".gitattributes\\n\"\n"); + string emptyVfsHookGitPath = emptyVfsHook.Replace('\\', '/'); + + ProcessHelper.Run( + "git", + $"-C \"{fullPath}\" -c core.virtualfilesystem=\"'{emptyVfsHookGitPath}'\" -c core.hookspath= checkout -f HEAD", + redirectOutput: false); + } + finally + { + File.Delete(emptyVfsHook); + } + + // Hydrate .gitattributes — copy from the primary enlistment. + if (wtInfo?.SharedGitDir != null) + { + string primarySrc = Path.GetDirectoryName(wtInfo.SharedGitDir); + string primaryGitattributes = Path.Combine(primarySrc, ".gitattributes"); + string worktreeGitattributes = Path.Combine(fullPath, ".gitattributes"); + if (File.Exists(primaryGitattributes) && !File.Exists(worktreeGitattributes)) + { + File.Copy(primaryGitattributes, worktreeGitattributes); + } + } + + // Now mount GVFS — the index exists for GitIndexProjection + ProcessHelper.Run("gvfs", $"mount \"{fullPath}\"", redirectOutput: false); + } + } + + private static void MountMovedWorktree(string[] args) + { + // git worktree move + // After move, the worktree is at + string newPath = GetWorktreePositionalArg(args, 1); + if (string.IsNullOrEmpty(newPath)) + { + return; + } + + string fullPath = ResolvePath(newPath); + + string dotGitFile = Path.Combine(fullPath, ".git"); + if (File.Exists(dotGitFile)) + { + ProcessHelper.Run("gvfs", $"mount \"{fullPath}\"", redirectOutput: false); + } + } + } +} diff --git a/GVFS/GVFS.Hooks/Program.cs b/GVFS/GVFS.Hooks/Program.cs index d48230a2a..c04f0c778 100644 --- a/GVFS/GVFS.Hooks/Program.cs +++ b/GVFS/GVFS.Hooks/Program.cs @@ -1,13 +1,17 @@ -using GVFS.Common; +using GVFS.Common; +using GVFS.Common.Git; using GVFS.Common.NamedPipes; +using GVFS.Common.Tracing; using GVFS.Hooks.HooksPlatform; using System; using System.Collections.Generic; +using System.IO; using System.Linq; +using System.Threading.Tasks; namespace GVFS.Hooks { - public class Program + public partial class Program { private const string PreCommandHook = "pre-command"; private const string PostCommandHook = "post-command"; @@ -19,6 +23,7 @@ public class Program private static string enlistmentRoot; private static string enlistmentPipename; + private static string normalizedCurrentDirectory; private static Random random = new Random(); private delegate void LockRequestDelegate(bool unattended, string[] args, int pid, NamedPipeClient pipeClient); @@ -35,7 +40,6 @@ public static void Main(string[] args) bool unattended = GVFSEnlistment.IsUnattended(tracer: null); string errorMessage; - string normalizedCurrentDirectory; if (!GVFSHooksPlatform.TryGetNormalizedPath(Environment.CurrentDirectory, out normalizedCurrentDirectory, out errorMessage)) { ExitWithError($"Failed to determine final path for current directory {Environment.CurrentDirectory}. Error: {errorMessage}"); @@ -50,6 +54,15 @@ public static void Main(string[] args) enlistmentPipename = GVFSHooksPlatform.GetNamedPipeName(enlistmentRoot); + // If running inside a worktree, append a worktree-specific + // suffix to the pipe name so hooks communicate with the + // correct GVFS mount instance. + string worktreeSuffix = GVFSEnlistment.GetWorktreePipeSuffix(normalizedCurrentDirectory); + if (worktreeSuffix != null) + { + enlistmentPipename += worktreeSuffix; + } + switch (GetHookType(args)) { case PreCommandHook: @@ -65,6 +78,8 @@ public static void Main(string[] args) { RunLockRequest(args, unattended, ReleaseGVFSLock); } + + RunPostCommands(args); break; default: @@ -92,10 +107,19 @@ private static void RunPreCommands(string[] args) if (!ArgsBlockHydrationStatus(args) && ConfigurationAllowsHydrationStatus()) { - /* Display a message about the hydration status of the repo */ - ProcessHelper.Run("gvfs", "health --status", redirectOutput: false); + TryDisplayCachedHydrationStatus(); + } + break; + case "restore": + case "checkout": + if (UnstageCommandParser.IsUnstageOperation(command, args)) + { + SendPrepareForUnstageMessage(command, args); } break; + case "worktree": + RunWorktreePreCommand(args); + break; } } @@ -103,24 +127,89 @@ private static bool ArgsBlockHydrationStatus(string[] args) { return args.Any(arg => arg.StartsWith("--serialize", StringComparison.OrdinalIgnoreCase) - || arg.StartsWith("--porcelain", StringComparison.OrdinalIgnoreCase)); + || arg.StartsWith("--porcelain", StringComparison.OrdinalIgnoreCase) + || arg.Equals("--short", StringComparison.OrdinalIgnoreCase) + || HasShortFlag(arg, "s")); + } + + private static void RunPostCommands(string[] args) + { + string command = GetGitCommand(args); + switch (command) + { + case "worktree": + RunWorktreePostCommand(args); + break; + } + } + + private static string ResolvePath(string path) + { + return Path.GetFullPath( + Path.IsPathRooted(path) + ? path + : Path.Combine(normalizedCurrentDirectory, path)); + } + + private static bool HasShortFlag(string arg, string flag) + { + return arg.StartsWith("-") && !arg.StartsWith("--") && arg.Substring(1).Contains(flag); } private static bool ConfigurationAllowsHydrationStatus() { + using (LibGit2RepoInvoker repo = new LibGit2RepoInvoker(NullTracer.Instance, normalizedCurrentDirectory)) + { + return repo.GetConfigBoolOrDefault(GVFSConstants.GitConfig.ShowHydrationStatus, GVFSConstants.GitConfig.ShowHydrationStatusDefault); + } + } + + /// + /// Query the mount process for the cached hydration summary via named pipe. + /// The entire operation (connect + send + receive + parse) is bounded to + /// 100ms via Task.Wait. Exits silently on any failure — this must never block git status. + /// + private static void TryDisplayCachedHydrationStatus() + { + const int HydrationStatusTimeoutMs = 100; + const int ConnectTimeoutMs = 50; + try { - ProcessResult result = ProcessHelper.Run("git", $"config --get {GVFSConstants.GitConfig.ShowHydrationStatus}"); - bool hydrationStatusEnabled; - if (bool.TryParse(result.Output.Trim(), out hydrationStatusEnabled)) + Task task = Task.Run(() => + { + using (NamedPipeClient pipeClient = new NamedPipeClient(enlistmentPipename)) + { + if (!pipeClient.Connect(timeoutMilliseconds: ConnectTimeoutMs)) + { + return null; + } + + pipeClient.SendRequest(new NamedPipeMessages.Message(NamedPipeMessages.HydrationStatus.Request, null)); + NamedPipeMessages.Message response = pipeClient.ReadResponse(); + + if (response.Header == NamedPipeMessages.HydrationStatus.SuccessResult + && NamedPipeMessages.HydrationStatus.Response.TryParse(response.Body, out NamedPipeMessages.HydrationStatus.Response status)) + { + return status.ToDisplayMessage(); + } + + return null; + } + }); + + // Hard outer timeout — if the task hasn't completed (e.g., ReadResponse + // blocked on a stalled mount process), we abandon it. The orphaned thread + // is cleaned up when the hook process exits immediately after. + if (task.Wait(HydrationStatusTimeoutMs) && task.Status == TaskStatus.RanToCompletion && task.Result != null) { - return hydrationStatusEnabled; + Console.WriteLine(task.Result); } } catch (Exception) { + // Silently ignore — never block git status for hydration display } - return GVFSConstants.GitConfig.ShowHydrationStatusDefault; } private static void ExitWithError(params string[] messages) diff --git a/GVFS/GVFS.Hooks/UnstageCommandParser.cs b/GVFS/GVFS.Hooks/UnstageCommandParser.cs new file mode 100644 index 000000000..d03761821 --- /dev/null +++ b/GVFS/GVFS.Hooks/UnstageCommandParser.cs @@ -0,0 +1,173 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace GVFS.Hooks +{ + /// + /// Pure parsing logic for detecting and extracting pathspecs from + /// git unstage commands. Separated from Program.Unstage.cs so it + /// can be linked into the unit test project without pulling in the + /// rest of the Hooks assembly. + /// + public static class UnstageCommandParser + { + /// + /// Result of parsing pathspec arguments from a git unstage command. + /// + public class PathspecResult + { + /// Null-separated inline pathspecs, or empty for all staged files. + public string InlinePathspecs { get; set; } + + /// Path to a --pathspec-from-file, or null if not specified. + public string PathspecFromFile { get; set; } + + /// Whether --pathspec-file-nul was specified. + public bool PathspecFileNul { get; set; } + + /// True if parsing failed and the command should be blocked. + public bool Failed { get; set; } + } + + /// + /// Detects whether the git command is an unstage operation that may need + /// special handling for VFS projections. + /// Matches: "restore --staged", "restore -S", "checkout HEAD --" + /// + public static bool IsUnstageOperation(string command, string[] args) + { + if (command == "restore") + { + return args.Any(arg => + arg.Equals("--staged", StringComparison.OrdinalIgnoreCase) || + // -S is --staged; char overload of IndexOf is case-sensitive, + // which is required because lowercase -s means --source + (arg.StartsWith("-") && !arg.StartsWith("--") && arg.IndexOf('S') >= 0)); + } + + if (command == "checkout") + { + // "checkout HEAD -- " is an unstage+restore operation. + // TODO: investigate whether "checkout -- " also + // needs PrepareForUnstage protection. It re-stages files (sets index to + // a different tree-ish) and could hit the same skip-worktree interference + // if the target files were staged by cherry-pick -n / merge and aren't in + // ModifiedPaths. Currently scoped to HEAD only as the common unstage case. + bool hasHead = args.Any(arg => arg.Equals("HEAD", StringComparison.OrdinalIgnoreCase)); + bool hasDashDash = args.Any(arg => arg == "--"); + return hasHead && hasDashDash; + } + + return false; + } + + /// + /// Extracts pathspec arguments from a restore/checkout unstage command. + /// Returns a containing either inline pathspecs, + /// a --pathspec-from-file reference, or a failure indicator. + /// + /// When --pathspec-from-file is specified, the file path is returned so the + /// caller can forward it through IPC to the mount process, which passes it + /// to git diff --cached --pathspec-from-file. + /// + public static PathspecResult GetRestorePathspec(string command, string[] args) + { + // args[0] = hook type, args[1] = git command, rest are arguments + List paths = new List(); + bool pastDashDash = false; + bool skipNext = false; + bool isCheckout = command == "checkout"; + + // For checkout, the first non-option arg before -- is the tree-ish (e.g. HEAD), + // not a pathspec. Track whether we've consumed it. + bool treeishConsumed = false; + + // --pathspec-from-file support: collect the file path and nul flag + string pathspecFromFile = null; + bool pathspecFileNul = false; + bool captureNextAsPathspecFile = false; + + for (int i = 2; i < args.Length; i++) + { + string arg = args[i]; + + if (captureNextAsPathspecFile) + { + pathspecFromFile = arg; + captureNextAsPathspecFile = false; + continue; + } + + if (skipNext) + { + skipNext = false; + continue; + } + + if (arg.StartsWith("--git-pid=")) + continue; + + // Capture --pathspec-from-file value + if (arg.StartsWith("--pathspec-from-file=")) + { + pathspecFromFile = arg.Substring("--pathspec-from-file=".Length); + continue; + } + + if (arg == "--pathspec-from-file") + { + captureNextAsPathspecFile = true; + continue; + } + + if (arg == "--pathspec-file-nul") + { + pathspecFileNul = true; + continue; + } + + if (arg == "--") + { + pastDashDash = true; + continue; + } + + if (!pastDashDash && arg.StartsWith("-")) + { + // For restore: --source and -s take a following argument + if (!isCheckout && + (arg == "--source" || arg == "-s")) + { + skipNext = true; + } + + continue; + } + + // For checkout, the first positional arg before -- is the tree-ish + if (isCheckout && !pastDashDash && !treeishConsumed) + { + treeishConsumed = true; + continue; + } + + paths.Add(arg); + } + + // stdin ("-") is not supported in hook context — the hook's stdin + // is not connected to the user's terminal + if (pathspecFromFile == "-") + { + return new PathspecResult { Failed = true }; + } + + return new PathspecResult + { + InlinePathspecs = paths.Count > 0 ? string.Join("\0", paths) : "", + PathspecFromFile = pathspecFromFile, + PathspecFileNul = pathspecFileNul, + }; + } + } +} diff --git a/GVFS/GVFS.Installers/install.bat b/GVFS/GVFS.Installers/install.bat index 8375be193..4a186743a 100644 --- a/GVFS/GVFS.Installers/install.bat +++ b/GVFS/GVFS.Installers/install.bat @@ -1,18 +1,7 @@ @ECHO OFF SETLOCAL -REM Determine the correct architecture for the installer -IF "%PROCESSOR_ARCHITECTURE%"=="AMD64" ( - SET GIT_ARCH=64-bit -) ELSE IF "%PROCESSOR_ARCHITECTURE%"=="ARM64" ( - SET GIT_ARCH=arm64 -) ELSE ( - ECHO Unknown architecture: %PROCESSOR_ARCHITECTURE% - exit 1 -) - -REM Lookup full paths to Git and VFS for Git installers -FOR /F "tokens=* USEBACKQ" %%F IN ( `where /R %~dp0 Git*-%GIT_ARCH%.exe` ) DO SET GIT_INSTALLER=%%F +REM Lookup full path to VFS for Git installer FOR /F "tokens=* USEBACKQ" %%F IN ( `where /R %~dp0 SetupGVFS*.exe` ) DO SET GVFS_INSTALLER=%%F REM Create new empty directory for logs @@ -22,8 +11,5 @@ IF EXIST %LOGDIR% ( ) mkdir %LOGDIR% -ECHO Installing Git (%GIT_ARCH%)... -%GIT_INSTALLER% /LOG="%LOGDIR%\git.log" /VERYSILENT /SUPPRESSMSGBOXES /NORESTART /ALLOWDOWNGRADE=1 - ECHO Installing VFS for Git... -%GVFS_INSTALLER% /LOG="%LOGDIR%\gvfs.log" /VERYSILENT /SUPPRESSMSGBOXES /NORESTART /DIR="C:\Program Files\VFS for Git" +%GVFS_INSTALLER% /LOG="%LOGDIR%\gvfs.log" /VERYSILENT /SUPPRESSMSGBOXES /NORESTART /DIR="C:\Program Files\VFS for Git" diff --git a/GVFS/GVFS.Mount/InProcessMount.cs b/GVFS/GVFS.Mount/InProcessMount.cs index 52426075b..6fedfb3ed 100644 --- a/GVFS/GVFS.Mount/InProcessMount.cs +++ b/GVFS/GVFS.Mount/InProcessMount.cs @@ -15,8 +15,10 @@ using System.Diagnostics; using System.IO; using System.Linq; +using System.Security; using System.Text; using System.Threading; +using System.Threading.Tasks; using static GVFS.Common.Git.LibGit2Repo; namespace GVFS.Mount @@ -33,6 +35,12 @@ public class InProcessMount // all the trees in a commit to ~2-3 seconds. private const int MissingTreeThresholdForDownloadingCommitPack = 200; + // Number of unique missing trees to track with LRU eviction. Eviction is commit-based: + // when capacity is reached, the LRU commit and all its unique trees are dropped to make room. + // Set to 20x the threshold so that enough trees can accumulate for the heuristic to + // reliably trigger a commit pack download. + private const int TrackedTreeCapacity = MissingTreeThresholdForDownloadingCommitPack * 20; + private readonly bool showDebugWindow; private FileSystemCallbacks fileSystemCallbacks; @@ -52,7 +60,7 @@ public class InProcessMount private HeartbeatThread heartbeat; private ManualResetEvent unmountEvent; - private readonly Dictionary treesWithDownloadedCommits = new Dictionary(); + private readonly MissingTreeTracker missingTreeTracker; // True if InProcessMount is calling git reset as part of processing // a folder dehydrate request @@ -67,6 +75,7 @@ public InProcessMount(ITracer tracer, GVFSEnlistment enlistment, CacheServerInfo this.enlistment = enlistment; this.showDebugWindow = showDebugWindow; this.unmountEvent = new ManualResetEvent(false); + this.missingTreeTracker = new MissingTreeTracker(tracer, TrackedTreeCapacity); } private enum MountState @@ -83,6 +92,68 @@ public void Mount(EventLevel verbosity, Keywords keywords) { this.currentState = MountState.Mounting; + // For worktree mounts, create the .gvfs metadata directory and + // bootstrap it with cache paths from the primary enlistment + if (this.enlistment.IsWorktree) + { + this.InitializeWorktreeMetadata(); + } + + string mountLockPath = Path.Combine(this.enlistment.DotGVFSRoot, GVFSConstants.DotGVFS.MountLock); + using (FileBasedLock mountLock = GVFSPlatform.Instance.CreateFileBasedLock( + new PhysicalFileSystem(), + this.tracer, + mountLockPath)) + { + if (!mountLock.TryAcquireLock(out Exception lockException)) + { + if (lockException is IOException) + { + this.FailMountAndExit(ReturnCode.MountAlreadyRunning, "Mount: Another mount process is already running."); + } + + this.FailMountAndExit("Mount: Failed to acquire mount lock: {0}", lockException.Message); + } + + this.MountWithLockAcquired(verbosity, keywords); + } + } + + private void MountWithLockAcquired(EventLevel verbosity, Keywords keywords) + { + // Start auth + config query immediately — these are network-bound and don't + // depend on repo metadata or cache paths. Every millisecond of network latency + // we can overlap with local I/O is a win. + // TryInitializeAndQueryGVFSConfig combines the anonymous probe, credential fetch, + // and config query into at most 2 HTTP requests (1 for anonymous repos), reusing + // the same HttpClient/TCP connection. + Stopwatch parallelTimer = Stopwatch.StartNew(); + + var networkTask = Task.Run(() => + { + Stopwatch sw = Stopwatch.StartNew(); + ServerGVFSConfig config; + string authConfigError; + + if (!this.enlistment.Authentication.TryInitializeAndQueryGVFSConfig( + this.tracer, this.enlistment, this.retryConfig, + out config, out authConfigError)) + { + if (this.cacheServer != null && !string.IsNullOrWhiteSpace(this.cacheServer.Url)) + { + this.tracer.RelatedWarning("Mount will proceed with fallback cache server: " + authConfigError); + config = null; + } + else + { + this.FailMountAndExit("Unable to query /gvfs/config" + Environment.NewLine + authConfigError); + } + } + + this.ValidateGVFSVersion(config); + this.tracer.RelatedInfo("ParallelMount: Auth + config completed in {0}ms", sw.ElapsedMilliseconds); + return config; + }); // We must initialize repo metadata before starting the pipe server so it // can immediately handle status requests string error; @@ -121,6 +192,58 @@ public void Mount(EventLevel verbosity, Keywords keywords) this.enlistment.InitializeCachePaths(localCacheRoot, gitObjectsRoot, blobSizesRoot); + // Local validations and git config run while we wait for the network + var localTask = Task.Run(() => + { + Stopwatch sw = Stopwatch.StartNew(); + + this.ValidateGitVersion(); + this.tracer.RelatedInfo("ParallelMount: ValidateGitVersion completed in {0}ms", sw.ElapsedMilliseconds); + + this.ValidateHooksVersion(); + this.ValidateFileSystemSupportsRequiredFeatures(); + + GitProcess git = new GitProcess(this.enlistment); + if (!git.IsValidRepo()) + { + this.FailMountAndExit("The .git folder is missing or has invalid contents"); + } + + if (!GVFSPlatform.Instance.FileSystem.IsFileSystemSupported(this.enlistment.EnlistmentRoot, out string fsError)) + { + this.FailMountAndExit("FileSystem unsupported: " + fsError); + } + + this.tracer.RelatedInfo("ParallelMount: Local validations completed in {0}ms", sw.ElapsedMilliseconds); + + if (!this.TrySetRequiredGitConfigSettings()) + { + this.FailMountAndExit("Unable to configure git repo"); + } + + this.LogEnlistmentInfoAndSetConfigValues(); + this.tracer.RelatedInfo("ParallelMount: Local validations + git config completed in {0}ms", sw.ElapsedMilliseconds); + }); + + try + { + Task.WaitAll(networkTask, localTask); + } + catch (AggregateException ae) + { + this.FailMountAndExit(ae.Flatten().InnerExceptions[0].Message); + } + + parallelTimer.Stop(); + this.tracer.RelatedInfo("ParallelMount: All parallel tasks completed in {0}ms", parallelTimer.ElapsedMilliseconds); + + ServerGVFSConfig serverGVFSConfig = networkTask.Result; + + CacheServerResolver cacheServerResolver = new CacheServerResolver(this.tracer, this.enlistment); + this.cacheServer = cacheServerResolver.ResolveNameFromRemote(this.cacheServer.Url, serverGVFSConfig); + + this.EnsureLocalCacheIsHealthy(serverGVFSConfig); + using (NamedPipeServer pipeServer = this.StartNamedPipe()) { this.tracer.RelatedEvent( @@ -138,7 +261,10 @@ public void Mount(EventLevel verbosity, Keywords keywords) this.ValidateMountPoints(); string errorMessage; - if (!HooksInstaller.TryUpdateHooks(this.context, out errorMessage)) + + // Worktrees share hooks with the primary enlistment via core.hookspath, + // so skip installation to avoid locking conflicts with the running mount. + if (!this.enlistment.IsWorktree && !HooksInstaller.TryUpdateHooks(this.context, out errorMessage)) { this.FailMountAndExit(errorMessage); } @@ -186,11 +312,101 @@ private void ValidateMountPoints() this.FailMountAndExit("Failed to initialize file system callbacks. Directory \"{0}\" must exist.", this.enlistment.WorkingDirectoryBackingRoot); } - string dotGitPath = Path.Combine(this.enlistment.WorkingDirectoryBackingRoot, GVFSConstants.DotGit.Root); - DirectoryInfo dotGitPathInfo = new DirectoryInfo(dotGitPath); - if (!dotGitPathInfo.Exists) + if (this.enlistment.IsWorktree) + { + // Worktrees have a .git file (not directory) pointing to the shared git dir + string dotGitFile = Path.Combine(this.enlistment.WorkingDirectoryBackingRoot, GVFSConstants.DotGit.Root); + if (!File.Exists(dotGitFile)) + { + this.FailMountAndExit("Failed to mount worktree. File \"{0}\" must exist.", dotGitFile); + } + } + else { - this.FailMountAndExit("Failed to mount. Directory \"{0}\" must exist.", dotGitPathInfo); + string dotGitPath = Path.Combine(this.enlistment.WorkingDirectoryBackingRoot, GVFSConstants.DotGit.Root); + DirectoryInfo dotGitPathInfo = new DirectoryInfo(dotGitPath); + if (!dotGitPathInfo.Exists) + { + this.FailMountAndExit("Failed to mount. Directory \"{0}\" must exist.", dotGitPathInfo); + } + } + } + + /// + /// For worktree mounts, create the .gvfs metadata directory and + /// bootstrap RepoMetadata with cache paths from the primary enlistment. + /// + private void InitializeWorktreeMetadata() + { + string dotGVFSRoot = this.enlistment.DotGVFSRoot; + if (!Directory.Exists(dotGVFSRoot)) + { + try + { + Directory.CreateDirectory(dotGVFSRoot); + this.tracer.RelatedInfo($"Created worktree metadata directory: {dotGVFSRoot}"); + } + catch (Exception e) + { + this.FailMountAndExit("Failed to create worktree metadata directory '{0}': {1}", dotGVFSRoot, e.Message); + } + } + + // Bootstrap RepoMetadata from the primary enlistment's metadata. + // Use try/finally to guarantee Shutdown() even if an unexpected + // exception occurs — the singleton must not be left pointing at + // the primary's metadata directory. + string primaryDotGVFS = Path.Combine(this.enlistment.EnlistmentRoot, GVFSPlatform.Instance.Constants.DotGVFSRoot); + string error; + string gitObjectsRoot; + string localCacheRoot; + string blobSizesRoot; + + if (!RepoMetadata.TryInitialize(this.tracer, primaryDotGVFS, out error)) + { + this.FailMountAndExit("Failed to read primary enlistment metadata: " + error); + } + + try + { + if (!RepoMetadata.Instance.TryGetGitObjectsRoot(out gitObjectsRoot, out error)) + { + this.FailMountAndExit("Failed to read git objects root from primary metadata: " + error); + } + + if (!RepoMetadata.Instance.TryGetLocalCacheRoot(out localCacheRoot, out error)) + { + this.FailMountAndExit("Failed to read local cache root from primary metadata: " + error); + } + + if (!RepoMetadata.Instance.TryGetBlobSizesRoot(out blobSizesRoot, out error)) + { + this.FailMountAndExit("Failed to read blob sizes root from primary metadata: " + error); + } + } + finally + { + RepoMetadata.Shutdown(); + } + + // Initialize cache paths on the enlistment so SaveCloneMetadata + // can persist them into the worktree's metadata + this.enlistment.InitializeCachePaths(localCacheRoot, gitObjectsRoot, blobSizesRoot); + + // Initialize the worktree's own metadata with cache paths, + // disk layout version, and a new enlistment ID + if (!RepoMetadata.TryInitialize(this.tracer, dotGVFSRoot, out error)) + { + this.FailMountAndExit("Failed to initialize worktree metadata: " + error); + } + + try + { + RepoMetadata.Instance.SaveCloneMetadata(this.tracer, this.enlistment); + } + finally + { + RepoMetadata.Shutdown(); } } @@ -208,6 +424,11 @@ private NamedPipeServer StartNamedPipe() } private void FailMountAndExit(string error, params object[] args) + { + this.FailMountAndExit(ReturnCode.GenericError, error, args); + } + + private void FailMountAndExit(ReturnCode returnCode, string error, params object[] args) { this.currentState = MountState.MountFailed; @@ -224,7 +445,7 @@ private void FailMountAndExit(string error, params object[] args) this.fileSystemCallbacks = null; } - Environment.Exit((int)ReturnCode.GenericError); + Environment.Exit((int)returnCode); } private T CreateOrReportAndExit(Func factory, string reportMessage) @@ -274,6 +495,10 @@ private void HandleRequest(ITracer tracer, string request, NamedPipeServer.Conne this.HandlePostIndexChangedRequest(message, connection); break; + case NamedPipeMessages.PrepareForUnstage.Request: + this.HandlePrepareForUnstageRequest(message, connection); + break; + case NamedPipeMessages.RunPostFetchJob.PostFetchJob: this.HandlePostFetchJobRequest(message, connection); break; @@ -282,6 +507,10 @@ private void HandleRequest(ITracer tracer, string request, NamedPipeServer.Conne this.HandleDehydrateFolders(message, connection); break; + case NamedPipeMessages.HydrationStatus.Request: + this.HandleGetHydrationStatusRequest(connection); + break; + default: EventMetadata metadata = new EventMetadata(); metadata.Add("Area", "Mount"); @@ -293,9 +522,37 @@ private void HandleRequest(ITracer tracer, string request, NamedPipeServer.Conne } } + private void HandleGetHydrationStatusRequest(NamedPipeServer.Connection connection) + { + EnlistmentHydrationSummary summary = this.fileSystemCallbacks?.GetCachedHydrationSummary(); + if (summary == null || !summary.IsValid) + { + this.tracer.RelatedInfo( + $"{nameof(this.HandleGetHydrationStatusRequest)}: " + + (summary == null ? "No cached hydration summary available yet" : "Cached hydration summary is invalid")); + + connection.TrySendResponse( + new NamedPipeMessages.Message(NamedPipeMessages.HydrationStatus.NotAvailableResult, null)); + return; + } + + NamedPipeMessages.HydrationStatus.Response response = new NamedPipeMessages.HydrationStatus.Response + { + PlaceholderFileCount = summary.PlaceholderFileCount, + PlaceholderFolderCount = summary.PlaceholderFolderCount, + ModifiedFileCount = summary.ModifiedFileCount, + ModifiedFolderCount = summary.ModifiedFolderCount, + TotalFileCount = summary.TotalFileCount, + TotalFolderCount = summary.TotalFolderCount, + }; + + connection.TrySendResponse( + new NamedPipeMessages.Message(NamedPipeMessages.HydrationStatus.SuccessResult, response.ToBody())); + } + private void HandleDehydrateFolders(NamedPipeMessages.Message message, NamedPipeServer.Connection connection) { - NamedPipeMessages.DehydrateFolders.Request request = new NamedPipeMessages.DehydrateFolders.Request(message); + NamedPipeMessages.DehydrateFolders.Request request = NamedPipeMessages.DehydrateFolders.Request.FromMessage(message); EventMetadata metadata = new EventMetadata(); metadata.Add(nameof(request.Folders), request.Folders); @@ -308,7 +565,9 @@ private void HandleDehydrateFolders(NamedPipeMessages.Message message, NamedPipe response = new NamedPipeMessages.DehydrateFolders.Response(NamedPipeMessages.DehydrateFolders.DehydratedResult); string[] folders = request.Folders.Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries); StringBuilder resetFolderPaths = new StringBuilder(); - foreach (string folder in folders) + List movedFolders = BackupFoldersWhileUnmounted(request, response, folders); + + foreach (string folder in movedFolders) { if (this.fileSystemCallbacks.TryDehydrateFolder(folder, out string errorMessage)) { @@ -357,6 +616,50 @@ private void HandleDehydrateFolders(NamedPipeMessages.Message message, NamedPipe connection.TrySendResponse(response.CreateMessage()); } + private List BackupFoldersWhileUnmounted(NamedPipeMessages.DehydrateFolders.Request request, NamedPipeMessages.DehydrateFolders.Response response, string[] folders) + { + /* We can't move folders while the virtual file system is mounted, so unmount it first. + * After moving the folders, remount the virtual file system. + */ + + var movedFolders = new List(); + try + { + /* Set to "Mounting" instead of "Unmounting" so that incoming requests + * that are rejected will know they can try again soon. + */ + this.currentState = MountState.Mounting; + this.UnmountAndStopWorkingDirectoryCallbacks(willRemountInSameProcess: true); + foreach (string folder in folders) + { + try + { + var source = Path.Combine(this.enlistment.WorkingDirectoryBackingRoot, folder); + var destination = Path.Combine(request.BackupFolderPath, folder); + var destinationParent = Path.GetDirectoryName(destination); + this.context.FileSystem.CreateDirectory(destinationParent); + if (this.context.FileSystem.DirectoryExists(source)) + { + this.context.FileSystem.MoveDirectory(source, destination); + } + movedFolders.Add(folder); + } + catch (Exception ex) + { + response.FailedFolders.Add($"{folder}\0{ex.Message}"); + continue; + } + } + } + finally + { + this.MountAndStartWorkingDirectoryCallbacks(this.cacheServer, alreadyInitialized: true); + this.currentState = MountState.Ready; + } + + return movedFolders; + } + private void HandleLockRequest(string messageBody, NamedPipeServer.Connection connection) { NamedPipeMessages.AcquireLock.Response response; @@ -467,6 +770,53 @@ private void HandlePostIndexChangedRequest(NamedPipeMessages.Message message, Na connection.TrySendResponse(response.CreateMessage()); } + /// + /// Handles a request to prepare for an unstage operation (e.g., restore --staged). + /// Finds index entries that are staged (not in HEAD) with skip-worktree set and adds + /// them to ModifiedPaths so that git will clear skip-worktree and process them. + /// Also forces a projection update to fix stale placeholders for modified/deleted files. + /// + private void HandlePrepareForUnstageRequest(NamedPipeMessages.Message message, NamedPipeServer.Connection connection) + { + NamedPipeMessages.PrepareForUnstage.Response response; + + if (this.currentState != MountState.Ready) + { + response = new NamedPipeMessages.PrepareForUnstage.Response(NamedPipeMessages.MountNotReadyResult); + } + else + { + try + { + string pathspec = message.Body; + bool success = this.fileSystemCallbacks.AddStagedFilesToModifiedPaths(pathspec, out int addedCount); + + EventMetadata metadata = new EventMetadata(); + metadata.Add("addedToModifiedPaths", addedCount); + metadata.Add("pathspec", pathspec ?? "(all)"); + metadata.Add("success", success); + this.tracer.RelatedEvent( + EventLevel.Informational, + nameof(this.HandlePrepareForUnstageRequest), + metadata); + + response = new NamedPipeMessages.PrepareForUnstage.Response( + success + ? NamedPipeMessages.PrepareForUnstage.SuccessResult + : NamedPipeMessages.PrepareForUnstage.FailureResult); + } + catch (Exception e) + { + EventMetadata metadata = new EventMetadata(); + metadata.Add("Exception", e.ToString()); + this.tracer.RelatedError(metadata, nameof(this.HandlePrepareForUnstageRequest) + " failed"); + response = new NamedPipeMessages.PrepareForUnstage.Response(NamedPipeMessages.PrepareForUnstage.FailureResult); + } + } + + connection.TrySendResponse(response.CreateMessage()); + } + private void HandleModifiedPathsListRequest(NamedPipeMessages.Message message, NamedPipeServer.Connection connection) { NamedPipeMessages.ModifiedPaths.Response response; @@ -544,19 +894,20 @@ private void HandleDownloadObjectRequest(NamedPipeMessages.Message message, Name this.context.Repository.GVFSLock.Stats.RecordObjectDownload(objectType == Native.ObjectTypes.Blob, downloadTime.ElapsedMilliseconds); if (objectType == Native.ObjectTypes.Commit - && !this.PrefetchHasBeenDone() && !this.context.Repository.CommitAndRootTreeExists(objectSha, out var treeSha) && !string.IsNullOrEmpty(treeSha)) { /* If a commit is downloaded, it wasn't prefetched. - * If any prefetch has been done, there is probably a commit in the prefetch packs that is close enough that - * loose object download of missing trees will be faster than downloading a pack of all the trees for the commit. - * Otherwise, the trees for the commit may be needed soon depending on the context. + * The trees for the commit may be needed soon depending on the context. * e.g. git log (without a pathspec) doesn't need trees, but git checkout does. - * + * + * If any prefetch has been done there is probably a similar commit/tree in the graph, + * but in case there isn't (such as if the cache server repack maintenance job is failing) + * we should still try to avoid downloading an excessive number of loose trees for a commit. + * * Save the tree/commit so if more trees are requested we can download all the trees for the commit in a batch. */ - this.treesWithDownloadedCommits[treeSha] = objectSha; + this.missingTreeTracker.AddMissingRootTree(treeSha: treeSha, commitSha: objectSha); } } } @@ -564,22 +915,11 @@ private void HandleDownloadObjectRequest(NamedPipeMessages.Message message, Name connection.TrySendResponse(response.CreateMessage()); } - private bool PrefetchHasBeenDone() - { - var prefetchPacks = this.gitObjects.ReadPackFileNames(this.enlistment.GitPackRoot, GVFSConstants.PrefetchPackPrefix); - var result = prefetchPacks.Length > 0; - if (result) - { - this.treesWithDownloadedCommits.Clear(); - } - return result; - } - private bool ShouldDownloadCommitPack(string objectSha, out string commitSha) { - if (!this.treesWithDownloadedCommits.TryGetValue(objectSha, out commitSha) - || this.PrefetchHasBeenDone()) + if (!this.missingTreeTracker.TryGetCommits(objectSha, out string[] commitShas)) { + commitSha = null; return false; } @@ -588,20 +928,19 @@ private bool ShouldDownloadCommitPack(string objectSha, out string commitSha) * Conversely, if we know (from previously downloaded missing trees) that a commit has a lot of missing * trees left, we'll probably need to download many more trees for the commit so we should download the pack. */ - var commitShaLocal = commitSha; // can't use out parameter in lambda - int missingTreeCount = this.treesWithDownloadedCommits.Where(x => x.Value == commitShaLocal).Count(); + int missingTreeCount = this.missingTreeTracker.GetHighestMissingTreeCount(commitShas, out commitSha); + return missingTreeCount > MissingTreeThresholdForDownloadingCommitPack; } private void UpdateTreesForDownloadedCommits(string objectSha) { /* If we are downloading missing trees, we probably are missing more trees for the commit. - * Update our list of trees associated with the commit so we can use the # of missing trees + * Update our list of trees associated with the commit so we can use the # of missing trees * as a heuristic to decide whether to batch download all the trees for the commit the * next time a missing one is requested. */ - if (!this.treesWithDownloadedCommits.TryGetValue(objectSha, out var commitSha) - || this.PrefetchHasBeenDone()) + if (!this.missingTreeTracker.TryGetCommits(objectSha, out _)) { return; } @@ -614,20 +953,13 @@ private void UpdateTreesForDownloadedCommits(string objectSha) if (this.context.Repository.TryGetMissingSubTrees(objectSha, out var missingSubTrees)) { - foreach (var missingSubTree in missingSubTrees) - { - this.treesWithDownloadedCommits[missingSubTree] = commitSha; - } + this.missingTreeTracker.AddMissingSubTrees(objectSha, missingSubTrees); } } private void DownloadedCommitPack(string commitSha) { - var toRemove = this.treesWithDownloadedCommits.Where(x => x.Value == commitSha).ToList(); - foreach (var tree in toRemove) - { - this.treesWithDownloadedCommits.Remove(tree.Key); - } + this.missingTreeTracker.MarkCommitComplete(commitSha); } private void HandlePostFetchJobRequest(NamedPipeMessages.Message message, NamedPipeServer.Connection connection) @@ -723,13 +1055,9 @@ private void HandleUnmountRequest(NamedPipeServer.Connection connection) } } - private void MountAndStartWorkingDirectoryCallbacks(CacheServerInfo cache) + private void MountAndStartWorkingDirectoryCallbacks(CacheServerInfo cache, bool alreadyInitialized = false) { string error; - if (!this.context.Enlistment.Authentication.TryInitialize(this.context.Tracer, this.context.Enlistment, out error)) - { - this.FailMountAndExit("Failed to obtain git credentials: " + error); - } GitObjectsHttpRequestor objectRequestor = new GitObjectsHttpRequestor(this.context.Tracer, this.context.Enlistment, cache, this.retryConfig); this.gitObjects = new GVFSGitObjects(this.context, objectRequestor); @@ -763,19 +1091,22 @@ private void MountAndStartWorkingDirectoryCallbacks(CacheServerInfo cache) }, "Failed to create src folder callback listener"); this.maintenanceScheduler = this.CreateOrReportAndExit(() => new GitMaintenanceScheduler(this.context, this.gitObjects), "Failed to start maintenance scheduler"); - int majorVersion; - int minorVersion; - if (!RepoMetadata.Instance.TryGetOnDiskLayoutVersion(out majorVersion, out minorVersion, out error)) + if (!alreadyInitialized) { - this.FailMountAndExit("Error: {0}", error); - } + int majorVersion; + int minorVersion; + if (!RepoMetadata.Instance.TryGetOnDiskLayoutVersion(out majorVersion, out minorVersion, out error)) + { + this.FailMountAndExit("Error: {0}", error); + } - if (majorVersion != GVFSPlatform.Instance.DiskLayoutUpgrade.Version.CurrentMajorVersion) - { - this.FailMountAndExit( - "Error: On disk version ({0}) does not match current version ({1})", - majorVersion, - GVFSPlatform.Instance.DiskLayoutUpgrade.Version.CurrentMajorVersion); + if (majorVersion != GVFSPlatform.Instance.DiskLayoutUpgrade.Version.CurrentMajorVersion) + { + this.FailMountAndExit( + "Error: On disk version ({0}) does not match current version ({1})", + majorVersion, + GVFSPlatform.Instance.DiskLayoutUpgrade.Version.CurrentMajorVersion); + } } try @@ -794,7 +1125,400 @@ private void MountAndStartWorkingDirectoryCallbacks(CacheServerInfo cache) this.heartbeat.Start(); } - private void UnmountAndStopWorkingDirectoryCallbacks() + private void ValidateGitVersion() + { + GitVersion gitVersion = null; + if (string.IsNullOrEmpty(this.enlistment.GitBinPath) || !GitProcess.TryGetVersion(this.enlistment.GitBinPath, out gitVersion, out string _)) + { + this.FailMountAndExit("Error: Unable to retrieve the Git version"); + } + + this.enlistment.SetGitVersion(gitVersion.ToString()); + + if (gitVersion.Platform != GVFSConstants.SupportedGitVersion.Platform) + { + this.FailMountAndExit("Error: Invalid version of Git {0}. Must use vfs version.", gitVersion); + } + + if (gitVersion.IsLessThan(GVFSConstants.SupportedGitVersion)) + { + this.FailMountAndExit( + "Error: Installed Git version {0} is less than the minimum supported version of {1}.", + gitVersion, + GVFSConstants.SupportedGitVersion); + } + else if (gitVersion.Revision != GVFSConstants.SupportedGitVersion.Revision) + { + this.FailMountAndExit( + "Error: Installed Git version {0} has revision number {1} instead of {2}." + + " This Git version is too new, so either downgrade Git or upgrade VFS for Git." + + " The minimum supported version of Git is {3}.", + gitVersion, + gitVersion.Revision, + GVFSConstants.SupportedGitVersion.Revision, + GVFSConstants.SupportedGitVersion); + } + } + + private void ValidateHooksVersion() + { + string hooksVersion; + string error; + if (!GVFSPlatform.Instance.TryGetGVFSHooksVersion(out hooksVersion, out error)) + { + this.FailMountAndExit(error); + } + + string gvfsVersion = ProcessHelper.GetCurrentProcessVersion(); + if (hooksVersion != gvfsVersion) + { + this.FailMountAndExit("GVFS.Hooks version ({0}) does not match GVFS version ({1}).", hooksVersion, gvfsVersion); + } + + this.enlistment.SetGVFSHooksVersion(hooksVersion); + } + + private void ValidateFileSystemSupportsRequiredFeatures() + { + try + { + string warning; + string error; + if (!GVFSPlatform.Instance.KernelDriver.IsSupported(this.enlistment.EnlistmentRoot, out warning, out error)) + { + this.FailMountAndExit("Error: {0}", error); + } + } + catch (Exception e) + { + EventMetadata metadata = new EventMetadata(); + metadata.Add("Exception", e.ToString()); + this.tracer.RelatedError(metadata, "Failed to determine if file system supports features required by GVFS"); + this.FailMountAndExit("Error: Failed to determine if file system supports features required by GVFS."); + } + } + + private ServerGVFSConfig QueryAndValidateGVFSConfig() + { + ServerGVFSConfig serverGVFSConfig = null; + string errorMessage = null; + + using (ConfigHttpRequestor configRequestor = new ConfigHttpRequestor(this.tracer, this.enlistment, this.retryConfig)) + { + const bool LogErrors = true; + if (!configRequestor.TryQueryGVFSConfig(LogErrors, out serverGVFSConfig, out _, out errorMessage)) + { + // If we have a valid cache server, continue without config (matches verb fallback behavior) + if (this.cacheServer != null && !string.IsNullOrWhiteSpace(this.cacheServer.Url)) + { + this.tracer.RelatedWarning("Unable to query /gvfs/config: " + errorMessage); + serverGVFSConfig = null; + } + else + { + this.FailMountAndExit("Unable to query /gvfs/config" + Environment.NewLine + errorMessage); + } + } + } + + this.ValidateGVFSVersion(serverGVFSConfig); + + return serverGVFSConfig; + } + + private void ValidateGVFSVersion(ServerGVFSConfig config) + { + using (ITracer activity = this.tracer.StartActivity("ValidateGVFSVersion", EventLevel.Informational)) + { + if (ProcessHelper.IsDevelopmentVersion()) + { + return; + } + + string recordedVersion = ProcessHelper.GetCurrentProcessVersion(); + int plus = recordedVersion.IndexOf('+'); + Version currentVersion = new Version(plus < 0 ? recordedVersion : recordedVersion.Substring(0, plus)); + IEnumerable allowedGvfsClientVersions = + config != null + ? config.AllowedGVFSClientVersions + : null; + + if (allowedGvfsClientVersions == null || !allowedGvfsClientVersions.Any()) + { + string warningMessage = "WARNING: Unable to validate your GVFS version" + Environment.NewLine; + if (config == null) + { + warningMessage += "Could not query valid GVFS versions from: " + Uri.EscapeUriString(this.enlistment.RepoUrl); + } + else + { + warningMessage += "Server not configured to provide supported GVFS versions"; + } + + this.tracer.RelatedWarning(warningMessage); + return; + } + + foreach (ServerGVFSConfig.VersionRange versionRange in config.AllowedGVFSClientVersions) + { + if (currentVersion >= versionRange.Min && + (versionRange.Max == null || currentVersion <= versionRange.Max)) + { + activity.RelatedEvent( + EventLevel.Informational, + "GVFSVersionValidated", + new EventMetadata + { + { "SupportedVersionRange", versionRange }, + }); + + this.enlistment.SetGVFSVersion(currentVersion.ToString()); + return; + } + } + + activity.RelatedError("GVFS version {0} is not supported", currentVersion); + this.FailMountAndExit("ERROR: Your GVFS version is no longer supported. Install the latest and try again."); + } + } + + private void EnsureLocalCacheIsHealthy(ServerGVFSConfig serverGVFSConfig) + { + if (!Directory.Exists(this.enlistment.LocalCacheRoot)) + { + try + { + this.tracer.RelatedInfo($"{nameof(this.EnsureLocalCacheIsHealthy)}: Local cache root: {this.enlistment.LocalCacheRoot} missing, recreating it"); + Directory.CreateDirectory(this.enlistment.LocalCacheRoot); + } + catch (Exception e) + { + EventMetadata metadata = new EventMetadata(); + metadata.Add("Exception", e.ToString()); + metadata.Add("enlistment.LocalCacheRoot", this.enlistment.LocalCacheRoot); + this.tracer.RelatedError(metadata, $"{nameof(this.EnsureLocalCacheIsHealthy)}: Exception while trying to create local cache root"); + this.FailMountAndExit("Failed to create local cache: " + this.enlistment.LocalCacheRoot); + } + } + + PhysicalFileSystem fileSystem = new PhysicalFileSystem(); + if (Directory.Exists(this.enlistment.GitObjectsRoot)) + { + bool gitObjectsRootInAlternates = false; + string alternatesFilePath = Path.Combine(this.enlistment.DotGitRoot, GVFSConstants.DotGit.Objects.Info.AlternatesRelativePath); + if (File.Exists(alternatesFilePath)) + { + try + { + using (Stream stream = fileSystem.OpenFileStream( + alternatesFilePath, + FileMode.Open, + FileAccess.Read, + FileShare.ReadWrite, + callFlushFileBuffers: false)) + { + using (StreamReader reader = new StreamReader(stream)) + { + while (!reader.EndOfStream) + { + string alternatesLine = reader.ReadLine(); + if (string.Equals(alternatesLine, this.enlistment.GitObjectsRoot, GVFSPlatform.Instance.Constants.PathComparison)) + { + gitObjectsRootInAlternates = true; + } + } + } + } + } + catch (Exception e) + { + EventMetadata exceptionMetadata = new EventMetadata(); + exceptionMetadata.Add("Exception", e.ToString()); + this.tracer.RelatedError(exceptionMetadata, $"{nameof(this.EnsureLocalCacheIsHealthy)}: Exception while trying to validate alternates file"); + this.FailMountAndExit($"Failed to validate that alternates file includes git objects root: {e.Message}"); + } + } + else + { + this.tracer.RelatedInfo($"{nameof(this.EnsureLocalCacheIsHealthy)}: Alternates file not found"); + } + + if (!gitObjectsRootInAlternates) + { + this.tracer.RelatedInfo($"{nameof(this.EnsureLocalCacheIsHealthy)}: GitObjectsRoot ({this.enlistment.GitObjectsRoot}) missing from alternates files, recreating alternates"); + string error; + if (!this.TryCreateAlternatesFile(fileSystem, out error)) + { + this.FailMountAndExit($"Failed to update alternates file to include git objects root: {error}"); + } + } + } + else + { + this.tracer.RelatedInfo($"{nameof(this.EnsureLocalCacheIsHealthy)}: GitObjectsRoot ({this.enlistment.GitObjectsRoot}) missing, determining new root"); + + if (serverGVFSConfig == null) + { + using (ConfigHttpRequestor configRequestor = new ConfigHttpRequestor(this.tracer, this.enlistment, this.retryConfig)) + { + string configError; + if (!configRequestor.TryQueryGVFSConfig(true, out serverGVFSConfig, out _, out configError)) + { + this.FailMountAndExit("Unable to query /gvfs/config" + Environment.NewLine + configError); + } + } + } + + string localCacheKey; + string error; + LocalCacheResolver localCacheResolver = new LocalCacheResolver(this.enlistment); + if (!localCacheResolver.TryGetLocalCacheKeyFromLocalConfigOrRemoteCacheServers( + this.tracer, + serverGVFSConfig, + this.cacheServer, + this.enlistment.LocalCacheRoot, + localCacheKey: out localCacheKey, + errorMessage: out error)) + { + this.FailMountAndExit($"Previous git objects root ({this.enlistment.GitObjectsRoot}) not found, and failed to determine new local cache key: {error}"); + } + + EventMetadata keyMetadata = new EventMetadata(); + keyMetadata.Add("localCacheRoot", this.enlistment.LocalCacheRoot); + keyMetadata.Add("localCacheKey", localCacheKey); + keyMetadata.Add(TracingConstants.MessageKey.InfoMessage, "Initializing and persisting updated paths"); + this.tracer.RelatedEvent(EventLevel.Informational, "EnsureLocalCacheIsHealthy_InitializePathsFromKey", keyMetadata); + this.enlistment.InitializeCachePathsFromKey(this.enlistment.LocalCacheRoot, localCacheKey); + + this.tracer.RelatedInfo($"{nameof(this.EnsureLocalCacheIsHealthy)}: Creating GitObjectsRoot ({this.enlistment.GitObjectsRoot}), GitPackRoot ({this.enlistment.GitPackRoot}), and BlobSizesRoot ({this.enlistment.BlobSizesRoot})"); + try + { + Directory.CreateDirectory(this.enlistment.GitObjectsRoot); + Directory.CreateDirectory(this.enlistment.GitPackRoot); + } + catch (Exception e) + { + EventMetadata exceptionMetadata = new EventMetadata(); + exceptionMetadata.Add("Exception", e.ToString()); + exceptionMetadata.Add("enlistment.GitObjectsRoot", this.enlistment.GitObjectsRoot); + exceptionMetadata.Add("enlistment.GitPackRoot", this.enlistment.GitPackRoot); + this.tracer.RelatedError(exceptionMetadata, $"{nameof(this.EnsureLocalCacheIsHealthy)}: Exception while trying to create objects and pack folders"); + this.FailMountAndExit("Failed to create objects and pack folders"); + } + + this.tracer.RelatedInfo($"{nameof(this.EnsureLocalCacheIsHealthy)}: Creating new alternates file"); + if (!this.TryCreateAlternatesFile(fileSystem, out error)) + { + this.FailMountAndExit($"Failed to update alternates file with new objects path: {error}"); + } + + this.tracer.RelatedInfo($"{nameof(this.EnsureLocalCacheIsHealthy)}: Saving git objects root ({this.enlistment.GitObjectsRoot}) in repo metadata"); + RepoMetadata.Instance.SetGitObjectsRoot(this.enlistment.GitObjectsRoot); + + this.tracer.RelatedInfo($"{nameof(this.EnsureLocalCacheIsHealthy)}: Saving blob sizes root ({this.enlistment.BlobSizesRoot}) in repo metadata"); + RepoMetadata.Instance.SetBlobSizesRoot(this.enlistment.BlobSizesRoot); + } + + if (!Directory.Exists(this.enlistment.BlobSizesRoot)) + { + this.tracer.RelatedInfo($"{nameof(this.EnsureLocalCacheIsHealthy)}: BlobSizesRoot ({this.enlistment.BlobSizesRoot}) not found, re-creating"); + try + { + Directory.CreateDirectory(this.enlistment.BlobSizesRoot); + } + catch (Exception e) + { + EventMetadata exceptionMetadata = new EventMetadata(); + exceptionMetadata.Add("Exception", e.ToString()); + exceptionMetadata.Add("enlistment.BlobSizesRoot", this.enlistment.BlobSizesRoot); + this.tracer.RelatedError(exceptionMetadata, $"{nameof(this.EnsureLocalCacheIsHealthy)}: Exception while trying to create blob sizes folder"); + this.FailMountAndExit("Failed to create blob sizes folder"); + } + } + } + + private bool TryCreateAlternatesFile(PhysicalFileSystem fileSystem, out string errorMessage) + { + try + { + string alternatesFilePath = Path.Combine(this.enlistment.DotGitRoot, GVFSConstants.DotGit.Objects.Info.AlternatesRelativePath); + string tempFilePath= alternatesFilePath + ".tmp"; + fileSystem.WriteAllText(tempFilePath, this.enlistment.GitObjectsRoot); + fileSystem.MoveAndOverwriteFile(tempFilePath, alternatesFilePath); + } + catch (SecurityException e) { errorMessage = e.Message; return false; } + catch (IOException e) { errorMessage = e.Message; return false; } + + errorMessage = null; + return true; + } + + private bool TrySetRequiredGitConfigSettings() + { + Dictionary requiredSettings = RequiredGitConfig.GetRequiredSettings(this.enlistment); + + GitProcess git = new GitProcess(this.enlistment); + + Dictionary existingConfigSettings; + if (!git.TryGetAllConfig(localOnly: true, configSettings: out existingConfigSettings)) + { + return false; + } + + foreach (KeyValuePair setting in requiredSettings) + { + GitConfigSetting existingSetting; + if (setting.Value != null) + { + if (!existingConfigSettings.TryGetValue(setting.Key, out existingSetting) || + !existingSetting.HasValue(setting.Value)) + { + GitProcess.Result setConfigResult = git.SetInLocalConfig(setting.Key, setting.Value); + if (setConfigResult.ExitCodeIsFailure) + { + return false; + } + } + } + else + { + if (existingConfigSettings.TryGetValue(setting.Key, out existingSetting)) + { + git.DeleteFromLocalConfig(setting.Key); + } + } + } + + return true; + } + + private void LogEnlistmentInfoAndSetConfigValues() + { + string mountId = Guid.NewGuid().ToString("N"); + EventMetadata metadata = new EventMetadata(); + metadata.Add(nameof(RepoMetadata.Instance.EnlistmentId), RepoMetadata.Instance.EnlistmentId); + metadata.Add(nameof(mountId), mountId); + metadata.Add("Enlistment", this.enlistment); + metadata.Add("PhysicalDiskInfo", GVFSPlatform.Instance.GetPhysicalDiskInfo(this.enlistment.WorkingDirectoryRoot, sizeStatsOnly: false)); + this.tracer.RelatedEvent(EventLevel.Informational, "EnlistmentInfo", metadata, Keywords.Telemetry); + + GitProcess git = new GitProcess(this.enlistment); + GitProcess.Result configResult = git.SetInLocalConfig(GVFSConstants.GitConfig.EnlistmentId, RepoMetadata.Instance.EnlistmentId, replaceAll: true); + if (configResult.ExitCodeIsFailure) + { + string error = "Could not update config with enlistment id, error: " + configResult.Errors; + this.tracer.RelatedWarning(error); + } + + configResult = git.SetInLocalConfig(GVFSConstants.GitConfig.MountId, mountId, replaceAll: true); + if (configResult.ExitCodeIsFailure) + { + string error = "Could not update config with mount id, error: " + configResult.Errors; + this.tracer.RelatedWarning(error); + } + } + + private void UnmountAndStopWorkingDirectoryCallbacks(bool willRemountInSameProcess = false) { if (this.maintenanceScheduler != null) { @@ -817,6 +1541,12 @@ private void UnmountAndStopWorkingDirectoryCallbacks() this.gvfsDatabase?.Dispose(); this.gvfsDatabase = null; + + if (!willRemountInSameProcess) + { + this.context?.Dispose(); + this.context = null; + } } } } \ No newline at end of file diff --git a/GVFS/GVFS.NativeHooks.Common/common.windows.cpp b/GVFS/GVFS.NativeHooks.Common/common.windows.cpp index d062d5758..35c7db8d4 100644 --- a/GVFS/GVFS.NativeHooks.Common/common.windows.cpp +++ b/GVFS/GVFS.NativeHooks.Common/common.windows.cpp @@ -52,11 +52,79 @@ PATH_STRING GetFinalPathName(const PATH_STRING& path) return finalPath; } +// Checks if the given directory is a git worktree by looking for a +// ".git" file (not directory). If found, reads it to extract the +// worktree name and returns a pipe name suffix like "_WT_NAME". +// Returns an empty string if not in a worktree. +PATH_STRING GetWorktreePipeSuffix(const wchar_t* directory) +{ + PATH_STRING dotGitPath(directory); + if (!dotGitPath.empty() && dotGitPath.back() != L'\\') + dotGitPath += L'\\'; + dotGitPath += L".git"; + + DWORD dotGitAttrs = GetFileAttributesW(dotGitPath.c_str()); + if (dotGitAttrs == INVALID_FILE_ATTRIBUTES || + (dotGitAttrs & FILE_ATTRIBUTE_DIRECTORY)) + { + return PATH_STRING(); + } + + // .git is a file — this is a worktree. Read it to find the + // worktree git directory (format: "gitdir: ") + FILE* gitFile = NULL; + errno_t fopenResult = _wfopen_s(&gitFile, dotGitPath.c_str(), L"r"); + if (fopenResult != 0 || gitFile == NULL) + return PATH_STRING(); + + char gitdirLine[4096]; + if (fgets(gitdirLine, sizeof(gitdirLine), gitFile) == NULL) + { + fclose(gitFile); + return PATH_STRING(); + } + fclose(gitFile); + + char* gitdirPath = gitdirLine; + if (strncmp(gitdirPath, "gitdir: ", 8) == 0) + gitdirPath += 8; + + // Trim trailing whitespace + size_t lineLen = strlen(gitdirPath); + while (lineLen > 0 && (gitdirPath[lineLen - 1] == '\n' || + gitdirPath[lineLen - 1] == '\r' || + gitdirPath[lineLen - 1] == ' ')) + gitdirPath[--lineLen] = '\0'; + + // Extract worktree name — last path component + // e.g., from ".git/worktrees/my-worktree" extract "my-worktree" + char* lastSep = strrchr(gitdirPath, '/'); + if (!lastSep) + lastSep = strrchr(gitdirPath, '\\'); + + if (lastSep == NULL) + return PATH_STRING(); + + std::string nameUtf8(lastSep + 1); + int wideLen = MultiByteToWideChar(CP_UTF8, 0, nameUtf8.c_str(), -1, NULL, 0); + if (wideLen <= 0) + return PATH_STRING(); + + std::wstring wtName(wideLen, L'\0'); + MultiByteToWideChar(CP_UTF8, 0, nameUtf8.c_str(), -1, &wtName[0], wideLen); + wtName.resize(wideLen - 1); // remove null terminator from string + + PATH_STRING suffix = L"_WT_"; + suffix += wtName; + return suffix; +} + PATH_STRING GetGVFSPipeName(const char *appName) { // The pipe name is built using the path of the GVFS enlistment root. // Start in the current directory and walk up the directory tree - // until we find a folder that contains the ".gvfs" folder + // until we find a folder that contains the ".gvfs" folder. + // For worktrees, a suffix is appended to target the worktree's mount. const size_t dotGVFSRelativePathLength = sizeof(L"\\.gvfs") / sizeof(wchar_t); @@ -117,7 +185,18 @@ PATH_STRING GetGVFSPipeName(const char *appName) PATH_STRING namedPipe(CharUpperW(enlistmentRoot)); std::replace(namedPipe.begin(), namedPipe.end(), L':', L'_'); - return L"\\\\.\\pipe\\GVFS_" + namedPipe; + PATH_STRING pipeName = L"\\\\.\\pipe\\GVFS_" + namedPipe; + + // Append worktree suffix if running in a worktree + PATH_STRING worktreeSuffix = GetWorktreePipeSuffix(finalRootPath.c_str()); + if (!worktreeSuffix.empty()) + { + std::transform(worktreeSuffix.begin(), worktreeSuffix.end(), + worktreeSuffix.begin(), ::towupper); + pipeName += worktreeSuffix; + } + + return pipeName; } PIPE_HANDLE CreatePipeToGVFS(const PATH_STRING& pipeName) diff --git a/GVFS/GVFS.Platform.Windows/ProjFSFilter.cs b/GVFS/GVFS.Platform.Windows/ProjFSFilter.cs index 8575012d6..b4f035983 100644 --- a/GVFS/GVFS.Platform.Windows/ProjFSFilter.cs +++ b/GVFS/GVFS.Platform.Windows/ProjFSFilter.cs @@ -38,6 +38,7 @@ public class ProjFSFilter : IKernelDriver private const uint OkResult = 0; private const uint NameCollisionErrorResult = 0x801F0012; + private const uint AccessDeniedResult = 0x80070005; private enum ProjFSInboxStatus { @@ -460,10 +461,35 @@ public bool TryPrepareFolderForCallbacks(string folderPath, out string error, ou public bool IsReady(JsonTracer tracer, string enlistmentRoot, TextWriter output, out string error) { error = string.Empty; - return - IsServiceRunning(tracer) && - IsNativeLibInstalled(tracer, new PhysicalFileSystem()) && - TryAttach(enlistmentRoot, out error); + if (!IsServiceRunning(tracer)) + { + error = "ProjFS (prjflt) service is not running"; + return false; + } + + if (!IsNativeLibInstalled(tracer, new PhysicalFileSystem())) + { + error = "ProjFS native library is not installed"; + return false; + } + + if (!TryAttach(enlistmentRoot, out error)) + { + // FilterAttach requires SE_LOAD_DRIVER_PRIVILEGE (admin). When running + // non-elevated on a machine where ProjFS is already set up, the filter + // is already attached to the volume and the only failure is ACCESS_DENIED. + // Allow the mount to proceed in that specific case. + if (error.Contains(AccessDeniedResult.ToString())) + { + tracer.RelatedInfo($"IsReady: TryAttach returned ACCESS_DENIED, but ProjFS service is running. Proceeding."); + error = string.Empty; + return true; + } + + return false; + } + + return true; } public bool RegisterForOfflineIO() diff --git a/GVFS/GVFS.Platform.Windows/WindowsFileBasedLock.cs b/GVFS/GVFS.Platform.Windows/WindowsFileBasedLock.cs index a965304e4..edf1c43a0 100644 --- a/GVFS/GVFS.Platform.Windows/WindowsFileBasedLock.cs +++ b/GVFS/GVFS.Platform.Windows/WindowsFileBasedLock.cs @@ -36,8 +36,9 @@ public WindowsFileBasedLock( { } - public override bool TryAcquireLock() + public override bool TryAcquireLock(out Exception lockException) { + lockException = null; try { lock (this.deleteOnCloseStreamLock) @@ -63,13 +64,14 @@ public override bool TryAcquireLock() catch (IOException e) { // HResultErrorFileExists is expected when the lock file exists - // HResultErrorSharingViolation is expected when the lock file exists andanother GVFS process has acquired the lock file + // HResultErrorSharingViolation is expected when the lock file exists and another GVFS process has acquired the lock file if (e.HResult != HResultErrorFileExists && e.HResult != HResultErrorSharingViolation) { EventMetadata metadata = this.CreateLockMetadata(e); this.Tracer.RelatedWarning(metadata, $"{nameof(this.TryAcquireLock)}: IOException caught while trying to acquire lock"); } + lockException = e; this.DisposeStream(); return false; } @@ -78,6 +80,7 @@ public override bool TryAcquireLock() EventMetadata metadata = this.CreateLockMetadata(e); this.Tracer.RelatedWarning(metadata, $"{nameof(this.TryAcquireLock)}: UnauthorizedAccessException caught while trying to acquire lock"); + lockException = e; this.DisposeStream(); return false; } @@ -86,6 +89,7 @@ public override bool TryAcquireLock() EventMetadata metadata = this.CreateLockMetadata(e); this.Tracer.RelatedWarning(metadata, $"{nameof(this.TryAcquireLock)}: Win32Exception caught while trying to acquire lock"); + lockException = e; this.DisposeStream(); return false; } diff --git a/GVFS/GVFS.Platform.Windows/WindowsFileSystemVirtualizer.cs b/GVFS/GVFS.Platform.Windows/WindowsFileSystemVirtualizer.cs index 23a49030b..8977bfa88 100644 --- a/GVFS/GVFS.Platform.Windows/WindowsFileSystemVirtualizer.cs +++ b/GVFS/GVFS.Platform.Windows/WindowsFileSystemVirtualizer.cs @@ -1014,7 +1014,25 @@ private void NotifyNewFileCreatedHandler( GitCommandLineParser gitCommand = new GitCommandLineParser(this.Context.Repository.GVFSLock.GetLockedGitCommand()); if (gitCommand.IsValidGitCommand) { - this.MarkDirectoryAsPlaceholder(virtualPath, triggeringProcessId, triggeringProcessImageFileName); + // When git recreates a directory that was previously deleted (and is + // tracked in ModifiedPaths), skip marking it as a ProjFS placeholder. + // Otherwise ProjFS would immediately project all children into it, + // conflicting with git's own attempt to populate the directory. + // + // This check is safe from races with the background task that updates + // ModifiedPaths: the deletion happens from a non-git process (e.g., + // rmdir), and IsReadyForExternalAcquireLockRequests() blocks git from + // acquiring the GVFS lock until the background queue is drained. When + // git itself deletes a folder, the code takes the IsValidGitCommand + // path in OnWorkingDirectoryFileOrFolderDeleteNotification and calls + // OnPossibleTombstoneFolderCreated instead of OnFolderDeleted, so + // ModifiedPaths is not involved. + // + // See https://github.com/microsoft/VFSForGit/issues/1901 + if (!this.FileSystemCallbacks.IsPathOrParentInModifiedPaths(virtualPath, isFolder: true)) + { + this.MarkDirectoryAsPlaceholder(virtualPath, triggeringProcessId, triggeringProcessImageFileName); + } } else { diff --git a/GVFS/GVFS.Platform.Windows/WindowsPlatform.Shared.cs b/GVFS/GVFS.Platform.Windows/WindowsPlatform.Shared.cs index 67414dbfc..80cc09f68 100644 --- a/GVFS/GVFS.Platform.Windows/WindowsPlatform.Shared.cs +++ b/GVFS/GVFS.Platform.Windows/WindowsPlatform.Shared.cs @@ -77,6 +77,12 @@ public static string GetNamedPipeNameImplementation(string enlistmentRoot) public static string GetSecureDataRootForGVFSImplementation() { + string envOverride = Environment.GetEnvironmentVariable("GVFS_SECURE_DATA_ROOT"); + if (!string.IsNullOrEmpty(envOverride)) + { + return envOverride; + } + return Path.Combine( Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles, Environment.SpecialFolderOption.Create), "GVFS", @@ -85,6 +91,12 @@ public static string GetSecureDataRootForGVFSImplementation() public static string GetCommonAppDataRootForGVFSImplementation() { + string envOverride = Environment.GetEnvironmentVariable("GVFS_COMMON_APPDATA_ROOT"); + if (!string.IsNullOrEmpty(envOverride)) + { + return envOverride; + } + return Path.Combine( Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData, Environment.SpecialFolderOption.Create), "GVFS"); diff --git a/GVFS/GVFS.Service/GVFSMountProcess.cs b/GVFS/GVFS.Service/GVFSMountProcess.cs index 6d11f9ce2..4e65e6c62 100644 --- a/GVFS/GVFS.Service/GVFSMountProcess.cs +++ b/GVFS/GVFS.Service/GVFSMountProcess.cs @@ -35,7 +35,25 @@ public bool MountRepository(string repoRoot, int sessionId) } string errorMessage; - if (!GVFSEnlistment.WaitUntilMounted(this.tracer, repoRoot, false, out errorMessage)) + string pipeName = GVFSPlatform.Instance.GetNamedPipeName(repoRoot); + string worktreeError; + GVFSEnlistment.WorktreeInfo wtInfo = GVFSEnlistment.TryGetWorktreeInfo(repoRoot, out worktreeError); + if (worktreeError != null) + { + this.tracer.RelatedError($"Failed to check worktree status for '{repoRoot}': {worktreeError}"); + return false; + } + + if (wtInfo?.SharedGitDir != null) + { + string enlistmentRoot = wtInfo.GetEnlistmentRoot(); + if (enlistmentRoot != null) + { + pipeName = GVFSPlatform.Instance.GetNamedPipeName(enlistmentRoot) + wtInfo.PipeSuffix; + } + } + + if (!GVFSEnlistment.WaitUntilMounted(this.tracer, pipeName, repoRoot, false, out errorMessage)) { this.tracer.RelatedError(errorMessage); return false; diff --git a/GVFS/GVFS.Service/GVFSService.Windows.cs b/GVFS/GVFS.Service/GVFSService.Windows.cs index 92d4fd4fc..5b3048b74 100644 --- a/GVFS/GVFS.Service/GVFSService.Windows.cs +++ b/GVFS/GVFS.Service/GVFSService.Windows.cs @@ -152,6 +152,53 @@ protected override void OnSessionChange(SessionChangeDescription changeDescripti } } + public void RunInConsoleMode(string[] args) + { + if (this.serviceThread != null) + { + throw new InvalidOperationException("Cannot start service twice in a row."); + } + + string serviceName = args.FirstOrDefault(arg => arg.StartsWith(ServiceNameArgPrefix)); + if (serviceName != null) + { + this.serviceName = serviceName.Substring(ServiceNameArgPrefix.Length); + } + + string serviceLogsDirectoryPath = GVFSPlatform.Instance.GetLogsDirectoryForGVFSComponent(this.serviceName); + + Directory.CreateDirectory(serviceLogsDirectoryPath); + this.tracer.AddLogFileEventListener( + GVFSEnlistment.GetNewGVFSLogFileName(serviceLogsDirectoryPath, GVFSConstants.LogFileTypes.Service), + EventLevel.Verbose, + Keywords.Any); + + try + { + this.serviceDataLocation = GVFSPlatform.Instance.GetSecureDataRootForGVFSComponent(this.serviceName); + Directory.CreateDirectory(this.serviceDataLocation); + Directory.CreateDirectory(Path.GetDirectoryName(this.serviceDataLocation)); + + this.serviceStopped = new ManualResetEvent(false); + + Console.WriteLine($"GVFS.Service running in console mode as '{this.serviceName}'"); + Console.WriteLine("Press Ctrl+C to stop."); + + Console.CancelKeyPress += (sender, e) => + { + e.Cancel = true; + this.StopRunning(); + }; + + this.Run(); + } + catch (Exception e) + { + this.tracer.RelatedError($"Console mode failed: {e}"); + throw; + } + } + protected override void OnStart(string[] args) { if (this.serviceThread != null) diff --git a/GVFS/GVFS.Service/Program.cs b/GVFS/GVFS.Service/Program.cs index aae422d4a..ac6d35449 100644 --- a/GVFS/GVFS.Service/Program.cs +++ b/GVFS/GVFS.Service/Program.cs @@ -3,25 +3,47 @@ using GVFS.PlatformLoader; using System; using System.Diagnostics; +using System.Linq; using System.ServiceProcess; +using System.Threading; namespace GVFS.Service { public static class Program { + private const string ConsoleFlag = "--console"; + public static void Main(string[] args) { GVFSPlatformLoader.Initialize(); AppDomain.CurrentDomain.UnhandledException += UnhandledExceptionHandler; + if (args.Any(arg => arg.Equals(ConsoleFlag, StringComparison.OrdinalIgnoreCase))) + { + RunAsConsoleApp(args); + } + else + { + using (JsonTracer tracer = new JsonTracer(GVFSConstants.Service.ServiceName, GVFSConstants.Service.ServiceName)) + { + using (GVFSService service = new GVFSService(tracer)) + { + // This will fail with a popup from a command prompt. To install as a service, run: + // %windir%\Microsoft.NET\Framework64\v4.0.30319\installutil GVFS.Service.exe + ServiceBase.Run(service); + } + } + } + } + + private static void RunAsConsoleApp(string[] args) + { using (JsonTracer tracer = new JsonTracer(GVFSConstants.Service.ServiceName, GVFSConstants.Service.ServiceName)) { using (GVFSService service = new GVFSService(tracer)) { - // This will fail with a popup from a command prompt. To install as a service, run: - // %windir%\Microsoft.NET\Framework64\v4.0.30319\installutil GVFS.Service.exe - ServiceBase.Run(service); + service.RunInConsoleMode(args); } } } diff --git a/GVFS/GVFS.UnitTests/CommandLine/CacheVerbTests.cs b/GVFS/GVFS.UnitTests/CommandLine/CacheVerbTests.cs new file mode 100644 index 000000000..fb42fe75b --- /dev/null +++ b/GVFS/GVFS.UnitTests/CommandLine/CacheVerbTests.cs @@ -0,0 +1,195 @@ +using GVFS.CommandLine; +using NUnit.Framework; +using System; +using System.Globalization; +using System.IO; + +namespace GVFS.UnitTests.CommandLine +{ + [TestFixture] + public class CacheVerbTests + { + private CacheVerb cacheVerb; + private string testDir; + + [SetUp] + public void Setup() + { + this.cacheVerb = new CacheVerb(); + this.testDir = Path.Combine(Path.GetTempPath(), "CacheVerbTests_" + Guid.NewGuid().ToString("N")); + Directory.CreateDirectory(this.testDir); + } + + [TearDown] + public void TearDown() + { + if (Directory.Exists(this.testDir)) + { + Directory.Delete(this.testDir, recursive: true); + } + } + + [TestCase(0, "0 bytes")] + [TestCase(512, "512 bytes")] + [TestCase(1023, "1023 bytes")] + [TestCase(1024, "1.0 KB")] + [TestCase(1536, "1.5 KB")] + [TestCase(1048576, "1.0 MB")] + [TestCase(1572864, "1.5 MB")] + [TestCase(1073741824, "1.0 GB")] + [TestCase(1610612736, "1.5 GB")] + [TestCase(10737418240, "10.0 GB")] + public void FormatSizeForUserDisplayReturnsExpectedString(long bytes, string expected) + { + CultureInfo savedCulture = CultureInfo.CurrentCulture; + try + { + CultureInfo.CurrentCulture = CultureInfo.InvariantCulture; + Assert.AreEqual(expected, this.cacheVerb.FormatSizeForUserDisplay(bytes)); + } + finally + { + CultureInfo.CurrentCulture = savedCulture; + } + } + + [TestCase] + public void GetPackSummaryWithNoPacks() + { + string packDir = Path.Combine(this.testDir, "pack"); + Directory.CreateDirectory(packDir); + + this.cacheVerb.GetPackSummary( + packDir, + out int prefetchCount, + out long prefetchSize, + out int otherCount, + out long otherSize, + out long latestTimestamp); + + Assert.AreEqual(0, prefetchCount); + Assert.AreEqual(0, prefetchSize); + Assert.AreEqual(0, otherCount); + Assert.AreEqual(0, otherSize); + Assert.AreEqual(0, latestTimestamp); + } + + [TestCase] + public void GetPackSummaryCategorizesPrefetchAndOtherPacks() + { + string packDir = Path.Combine(this.testDir, "pack"); + Directory.CreateDirectory(packDir); + + this.CreateFileWithSize(Path.Combine(packDir, "prefetch-1000-aabbccdd.pack"), 100); + this.CreateFileWithSize(Path.Combine(packDir, "prefetch-2000-eeff0011.pack"), 200); + this.CreateFileWithSize(Path.Combine(packDir, "pack-abcdef1234567890.pack"), 50); + + this.cacheVerb.GetPackSummary( + packDir, + out int prefetchCount, + out long prefetchSize, + out int otherCount, + out long otherSize, + out long latestTimestamp); + + Assert.AreEqual(2, prefetchCount); + Assert.AreEqual(300, prefetchSize); + Assert.AreEqual(1, otherCount); + Assert.AreEqual(50, otherSize); + Assert.AreEqual(2000, latestTimestamp); + } + + [TestCase] + public void GetPackSummaryIgnoresNonPackFiles() + { + string packDir = Path.Combine(this.testDir, "pack"); + Directory.CreateDirectory(packDir); + + this.CreateFileWithSize(Path.Combine(packDir, "prefetch-1000-aabb.pack"), 100); + this.CreateFileWithSize(Path.Combine(packDir, "prefetch-1000-aabb.idx"), 50); + this.CreateFileWithSize(Path.Combine(packDir, "multi-pack-index"), 10); + + this.cacheVerb.GetPackSummary( + packDir, + out int prefetchCount, + out long prefetchSize, + out int otherCount, + out long otherSize, + out long latestTimestamp); + + Assert.AreEqual(1, prefetchCount); + Assert.AreEqual(100, prefetchSize); + Assert.AreEqual(0, otherCount); + Assert.AreEqual(0, otherSize); + } + + [TestCase] + public void GetPackSummaryHandlesBothGuidAndSHA1HashFormats() + { + string packDir = Path.Combine(this.testDir, "pack"); + Directory.CreateDirectory(packDir); + + // GVFS format: 32-char GUID + this.CreateFileWithSize(Path.Combine(packDir, "prefetch-1000-b8d9efad32194d98894532905daf88ec.pack"), 100); + // Scalar format: 40-char SHA1 + this.CreateFileWithSize(Path.Combine(packDir, "prefetch-2000-9babd9b75521f9caf693b485329d3d5669c88564.pack"), 200); + + this.cacheVerb.GetPackSummary( + packDir, + out int prefetchCount, + out long prefetchSize, + out int otherCount, + out long otherSize, + out long latestTimestamp); + + Assert.AreEqual(2, prefetchCount); + Assert.AreEqual(300, prefetchSize); + Assert.AreEqual(2000, latestTimestamp); + } + + [TestCase] + public void CountLooseObjectsWithNoObjects() + { + int count = this.cacheVerb.CountLooseObjects(this.testDir); + Assert.AreEqual(0, count); + } + + [TestCase] + public void CountLooseObjectsCountsFilesInHexDirectories() + { + Directory.CreateDirectory(Path.Combine(this.testDir, "00")); + File.WriteAllText(Path.Combine(this.testDir, "00", "abc123"), string.Empty); + File.WriteAllText(Path.Combine(this.testDir, "00", "def456"), string.Empty); + + Directory.CreateDirectory(Path.Combine(this.testDir, "ff")); + File.WriteAllText(Path.Combine(this.testDir, "ff", "789abc"), string.Empty); + + int count = this.cacheVerb.CountLooseObjects(this.testDir); + Assert.AreEqual(3, count); + } + + [TestCase] + public void CountLooseObjectsIgnoresNonHexDirectories() + { + // "pack" and "info" are valid directories in a git objects dir but not hex dirs + Directory.CreateDirectory(Path.Combine(this.testDir, "pack")); + File.WriteAllText(Path.Combine(this.testDir, "pack", "somefile"), string.Empty); + + Directory.CreateDirectory(Path.Combine(this.testDir, "info")); + File.WriteAllText(Path.Combine(this.testDir, "info", "somefile"), string.Empty); + + // "ab" is a valid hex dir + Directory.CreateDirectory(Path.Combine(this.testDir, "ab")); + File.WriteAllText(Path.Combine(this.testDir, "ab", "object1"), string.Empty); + + int count = this.cacheVerb.CountLooseObjects(this.testDir); + Assert.AreEqual(1, count); + } + + private void CreateFileWithSize(string path, int size) + { + byte[] data = new byte[size]; + File.WriteAllBytes(path, data); + } + } +} diff --git a/GVFS/GVFS.UnitTests/Common/EnlistmentHydrationSummaryTests.cs b/GVFS/GVFS.UnitTests/Common/EnlistmentHydrationSummaryTests.cs index 2623dc4bd..3920ab135 100644 --- a/GVFS/GVFS.UnitTests/Common/EnlistmentHydrationSummaryTests.cs +++ b/GVFS/GVFS.UnitTests/Common/EnlistmentHydrationSummaryTests.cs @@ -1,59 +1,266 @@ using GVFS.Common; using GVFS.Common.Git; -using GVFS.Tests.Should; +using GVFS.Common.Tracing; using GVFS.UnitTests.Mock.Common; using GVFS.UnitTests.Mock.FileSystem; using GVFS.UnitTests.Mock.Git; +using GVFS.Virtualization.Projection; using NUnit.Framework; -using System.Collections.Generic; +using System; using System.IO; -using System.Linq; +using System.Text; +using System.Threading; +using static GVFS.Virtualization.Projection.GitIndexProjection.GitIndexParser; namespace GVFS.UnitTests.Common { [TestFixture] public class EnlistmentHydrationSummaryTests { - private MockFileSystem fileSystem; - private MockGitProcess gitProcess; - private GVFSContext context; - private string gitParentPath; - private string gvfsMetadataPath; - private MockDirectory enlistmentDirectory; + [TestCase] + public void CountIndexFolders_FlatDirectories() + { + int count = CountFoldersInIndex(new[] { "src/file1.cs", "test/file2.cs" }); + Assert.AreEqual(2, count); // "src", "test" + } + + [TestCase] + public void CountIndexFolders_NestedDirectories() + { + int count = CountFoldersInIndex(new[] { "a/b/c/file1.cs", "a/b/file2.cs", "x/file3.cs" }); + Assert.AreEqual(4, count); // "a", "a/b", "a/b/c", "x" + } + + [TestCase] + public void CountIndexFolders_RootFilesOnly() + { + int count = CountFoldersInIndex(new[] { "README.md", ".gitignore" }); + Assert.AreEqual(0, count); + } + + [TestCase] + public void CountIndexFolders_EmptyIndex() + { + int count = CountFoldersInIndex(new string[0]); + Assert.AreEqual(0, count); + } + + [TestCase] + public void CountIndexFolders_DeepNesting() + { + int count = CountFoldersInIndex(new[] { "a/b/c/d/e/file.txt" }); + Assert.AreEqual(5, count); // "a", "a/b", "a/b/c", "a/b/c/d", "a/b/c/d/e" + } + + [TestCase] + public void CountIndexFolders_ExcludesNonSkipWorktree() + { + // Entries without skip-worktree and with NoConflicts merge state are not + // projected, so their directories should not be counted. + IndexEntryInfo[] entries = new[] + { + new IndexEntryInfo("src/file1.cs", skipWorktree: true), + new IndexEntryInfo("vendor/lib/file2.cs", skipWorktree: false), + }; + + int count = CountFoldersInIndex(entries); + Assert.AreEqual(1, count); // only "src" + } - private const string HeadTreeId = "0123456789012345678901234567890123456789"; - private const int HeadPathCount = 42; + [TestCase] + public void CountIndexFolders_ExcludesCommonAncestor() + { + // CommonAncestor entries are excluded even when skip-worktree is set. + IndexEntryInfo[] entries = new[] + { + new IndexEntryInfo("src/file1.cs", skipWorktree: true), + new IndexEntryInfo("conflict/file2.cs", skipWorktree: true, mergeState: MergeStage.CommonAncestor), + }; + + int count = CountFoldersInIndex(entries); + Assert.AreEqual(1, count); // only "src" + } + + [TestCase] + public void CountIndexFolders_IncludesYoursMergeState() + { + // Yours merge-state entries are projected even without skip-worktree. + IndexEntryInfo[] entries = new[] + { + new IndexEntryInfo("src/file1.cs", skipWorktree: true), + new IndexEntryInfo("merge/file2.cs", skipWorktree: false, mergeState: MergeStage.Yours), + }; + + int count = CountFoldersInIndex(entries); + Assert.AreEqual(2, count); // "src" and "merge" + } - public static IEnumerable<(string CachePrecontents, string ExpectedCachePostContents)> HeadTreeCountCacheContents + private static int CountFoldersInIndex(string[] paths) { - get + byte[] indexBytes = CreateV4Index(paths); + using (MemoryStream stream = new MemoryStream(indexBytes)) { - yield return (null, $"{HeadTreeId}\n{HeadPathCount}"); - yield return ($"{HeadTreeId}\n{HeadPathCount}", $"{HeadTreeId}\n{HeadPathCount}"); - yield return ($"{HeadTreeId}\n{HeadPathCount - 1}", $"{HeadTreeId}\n{HeadPathCount - 1}"); - yield return ($"{HeadTreeId.Replace("1", "a")}\n{HeadPathCount - 1}", $"{HeadTreeId}\n{HeadPathCount}"); - yield return ($"{HeadTreeId}\nabc", $"{HeadTreeId}\n{HeadPathCount}"); - yield return ($"{HeadTreeId}\nabc", $"{HeadTreeId}\n{HeadPathCount}"); - yield return ($"\n", $"{HeadTreeId}\n{HeadPathCount}"); - yield return ($"\nabc", $"{HeadTreeId}\n{HeadPathCount}"); + return GitIndexProjection.CountIndexFolders(new MockTracer(), stream); } } + private static int CountFoldersInIndex(IndexEntryInfo[] entries) + { + byte[] indexBytes = CreateV4Index(entries); + using (MemoryStream stream = new MemoryStream(indexBytes)) + { + return GitIndexProjection.CountIndexFolders(new MockTracer(), stream); + } + } + + /// + /// Create a minimal git index v4 binary matching the format GitIndexGenerator produces. + /// Uses prefix-compression for paths (v4 format). + /// + private static byte[] CreateV4Index(string[] paths) + { + IndexEntryInfo[] entries = new IndexEntryInfo[paths.Length]; + for (int i = 0; i < paths.Length; i++) + { + entries[i] = new IndexEntryInfo(paths[i], skipWorktree: true); + } + + return CreateV4Index(entries); + } + + private static byte[] CreateV4Index(IndexEntryInfo[] entries) + { + // Stat entry header matching GitIndexGenerator.EntryHeader: + // 40 bytes with file mode 0x81A4 (regular file, 644) at offset 24-27 + byte[] entryHeader = new byte[40]; + entryHeader[26] = 0x81; + entryHeader[27] = 0xA4; + + using (MemoryStream ms = new MemoryStream()) + using (BinaryWriter bw = new BinaryWriter(ms)) + { + // Header + bw.Write(new byte[] { (byte)'D', (byte)'I', (byte)'R', (byte)'C' }); + WriteBigEndian32(bw, 4); // version 4 + WriteBigEndian32(bw, (uint)entries.Length); + + string previousPath = string.Empty; + foreach (IndexEntryInfo entry in entries) + { + // 40-byte stat entry header with valid file mode + bw.Write(entryHeader); + // 20 bytes SHA-1 (zeros) + bw.Write(new byte[20]); + // Flags: path length in low 12 bits, merge state in bits 12-13, extended bit 14 + byte[] pathBytes = Encoding.UTF8.GetBytes(entry.Path); + ushort flags = (ushort)(Math.Min(pathBytes.Length, 0xFFF) | 0x4000 | ((ushort)entry.MergeState << 12)); + WriteBigEndian16(bw, flags); + // Extended flags: skip-worktree bit + ushort extendedFlags = entry.SkipWorktree ? (ushort)0x4000 : (ushort)0; + WriteBigEndian16(bw, extendedFlags); + + // V4 prefix compression: compute common prefix with previous path + int commonLen = 0; + int maxCommon = Math.Min(previousPath.Length, entry.Path.Length); + while (commonLen < maxCommon && previousPath[commonLen] == entry.Path[commonLen]) + { + commonLen++; + } + + int replaceLen = previousPath.Length - commonLen; + string suffix = entry.Path.Substring(commonLen); + + // Write replace length as varint + WriteVarint(bw, replaceLen); + // Write suffix + null terminator + bw.Write(Encoding.UTF8.GetBytes(suffix)); + bw.Write((byte)0); + + previousPath = entry.Path; + } + + return ms.ToArray(); + } + } + + private struct IndexEntryInfo + { + public string Path; + public bool SkipWorktree; + public MergeStage MergeState; + + public IndexEntryInfo(string path, bool skipWorktree, MergeStage mergeState = MergeStage.NoConflicts) + { + this.Path = path; + this.SkipWorktree = skipWorktree; + this.MergeState = mergeState; + } + } + + private static void WriteBigEndian32(BinaryWriter bw, uint value) + { + bw.Write((byte)((value >> 24) & 0xFF)); + bw.Write((byte)((value >> 16) & 0xFF)); + bw.Write((byte)((value >> 8) & 0xFF)); + bw.Write((byte)(value & 0xFF)); + } + + private static void WriteBigEndian16(BinaryWriter bw, ushort value) + { + bw.Write((byte)((value >> 8) & 0xFF)); + bw.Write((byte)(value & 0xFF)); + } + + private static void WriteVarint(BinaryWriter bw, int value) + { + // Git index v4 varint encoding (same as ReadReplaceLength in GitIndexParser) + if (value < 0x80) + { + bw.Write((byte)value); + return; + } + + byte[] bytes = new byte[5]; + int pos = 4; + bytes[pos] = (byte)(value & 0x7F); + value = (value >> 7) - 1; + while (value >= 0) + { + pos--; + bytes[pos] = (byte)(0x80 | (value & 0x7F)); + value = (value >> 7) - 1; + } + + bw.Write(bytes, pos, 5 - pos); + } + } + + /// + /// Tests for EnlistmentHydrationSummary that require the full mock filesystem/context. + /// + [TestFixture] + public class EnlistmentHydrationSummaryContextTests + { + private MockFileSystem fileSystem; + private MockTracer tracer; + private GVFSContext context; + private string gitParentPath; + private MockDirectory enlistmentDirectory; + [SetUp] public void Setup() { - MockTracer tracer = new MockTracer(); + this.tracer = new MockTracer(); string enlistmentRoot = Path.Combine("mock:", "GVFS", "UnitTests", "Repo"); string statusCachePath = Path.Combine("mock:", "GVFS", "UnitTests", "Repo", GVFSPlatform.Instance.Constants.DotGVFSRoot, "gitStatusCache"); - this.gitProcess = new MockGitProcess(); - this.gitProcess.SetExpectedCommandResult($"--no-optional-locks status \"--serialize={statusCachePath}", () => new GitProcess.Result(string.Empty, string.Empty, 0), true); - MockGVFSEnlistment enlistment = new MockGVFSEnlistment(enlistmentRoot, "fake://repoUrl", "fake://gitBinPath", this.gitProcess); + MockGitProcess gitProcess = new MockGitProcess(); + gitProcess.SetExpectedCommandResult($"--no-optional-locks status \"--serialize={statusCachePath}", () => new GitProcess.Result(string.Empty, string.Empty, 0), true); + MockGVFSEnlistment enlistment = new MockGVFSEnlistment(enlistmentRoot, "fake://repoUrl", "fake://gitBinPath", gitProcess); enlistment.InitializeCachePathsFromKey("fake:\\gvfsSharedCache", "fakeCacheKey"); this.gitParentPath = enlistment.WorkingDirectoryBackingRoot; - this.gvfsMetadataPath = enlistment.DotGVFSRoot; this.enlistmentDirectory = new MockDirectory( enlistmentRoot, @@ -74,52 +281,45 @@ public void Setup() this.fileSystem.DeleteNonExistentFileThrowsException = false; this.context = new GVFSContext( - tracer, + this.tracer, this.fileSystem, - new MockGitRepo(tracer, enlistment, this.fileSystem), + new MockGitRepo(this.tracer, enlistment, this.fileSystem), enlistment); } - [TearDown] - public void TearDown() + [TestCase] + public void GetIndexFileCount_IndexTooSmall_ReturnsNegativeOne() { - this.fileSystem = null; - this.gitProcess = null; - this.context = null; - this.gitParentPath = null; - this.gvfsMetadataPath = null; - this.enlistmentDirectory = null; + string indexPath = Path.Combine(this.gitParentPath, ".git", "index"); + this.enlistmentDirectory.CreateFile(indexPath, "short", createDirectories: true); + + int result = EnlistmentHydrationSummary.GetIndexFileCount( + this.context.Enlistment, this.context.FileSystem); + + Assert.AreEqual(-1, result); } - [TestCaseSource("HeadTreeCountCacheContents")] - public void HeadTreeCountCacheTests((string CachePrecontents, string ExpectedCachePostContents) args) + [TestCase] + public void CreateSummary_CancelledToken_ReturnsInvalidSummary() { - string totalPathCountPath = Path.Combine(this.gvfsMetadataPath, GVFSConstants.DotGVFS.GitStatusCache.TreeCount); - if (args.CachePrecontents != null) - { - this.enlistmentDirectory.CreateFile(totalPathCountPath, args.CachePrecontents, createDirectories: true); - } + // Set up a valid index file so CreateSummary gets past GetIndexFileCount + // before hitting the first cancellation check. + string indexPath = Path.Combine(this.gitParentPath, ".git", "index"); + byte[] indexBytes = new byte[12]; + indexBytes[11] = 100; // file count = 100 (big-endian) + MockFile indexFile = new MockFile(indexPath, indexBytes); + MockDirectory gitDir = this.enlistmentDirectory.FindDirectory(Path.Combine(this.gitParentPath, ".git")); + gitDir.Files.Add(indexFile.FullName, indexFile); + + CancellationTokenSource cts = new CancellationTokenSource(); + cts.Cancel(); + + Func dummyProvider = () => 0; + EnlistmentHydrationSummary result = EnlistmentHydrationSummary.CreateSummary( + this.context.Enlistment, this.context.FileSystem, this.context.Tracer, dummyProvider, cts.Token); - this.gitProcess.SetExpectedCommandResult("rev-parse \"HEAD^{tree}\"", - () => new GitProcess.Result(HeadTreeId, "", 0)); - this.gitProcess.SetExpectedCommandResult("ls-tree -r -d HEAD", - () => new GitProcess.Result( - string.Join("\n", Enumerable.Range(0, HeadPathCount) - .Select(x => x.ToString())), - "", 0)); - - Assert.AreEqual( - args.CachePrecontents != null, - this.fileSystem.FileExists(totalPathCountPath)); - - int result = EnlistmentHydrationSummary.GetHeadTreeCount(this.context.Enlistment, this.context.FileSystem); - - this.fileSystem.FileExists(totalPathCountPath).ShouldBeTrue(); - var postContents = this.fileSystem.ReadAllText(totalPathCountPath); - Assert.AreEqual( - args.ExpectedCachePostContents, - postContents); - Assert.AreEqual(postContents.Split('\n')[1], result.ToString()); + Assert.IsFalse(result.IsValid); + Assert.IsNull(result.Error); } } } diff --git a/GVFS/GVFS.UnitTests/Common/HydrationStatusCircuitBreakerTests.cs b/GVFS/GVFS.UnitTests/Common/HydrationStatusCircuitBreakerTests.cs new file mode 100644 index 000000000..415b71cf0 --- /dev/null +++ b/GVFS/GVFS.UnitTests/Common/HydrationStatusCircuitBreakerTests.cs @@ -0,0 +1,183 @@ +using GVFS.Common; +using GVFS.Tests.Should; +using GVFS.UnitTests.Mock.Common; +using NUnit.Framework; +using System.IO; + +namespace GVFS.UnitTests.Common +{ + [TestFixture] + public class HydrationStatusCircuitBreakerTests + { + private MockTracer tracer; + private string dotGVFSRoot; + private string tempDir; + + [SetUp] + public void Setup() + { + this.tempDir = Path.Combine(Path.GetTempPath(), "GVFS_CircuitBreakerTest_" + Path.GetRandomFileName()); + this.dotGVFSRoot = Path.Combine(this.tempDir, ".gvfs"); + Directory.CreateDirectory(Path.Combine(this.dotGVFSRoot, "gitStatusCache")); + this.tracer = new MockTracer(); + } + + [TearDown] + public void TearDown() + { + if (Directory.Exists(this.tempDir)) + { + Directory.Delete(this.tempDir, recursive: true); + } + } + + [Test] + public void IsDisabledReturnsFalseWhenNoMarkerFile() + { + HydrationStatusCircuitBreaker breaker = this.CreateBreaker(); + breaker.IsDisabled().ShouldBeFalse(); + } + + [Test] + public void SingleFailureDoesNotDisable() + { + HydrationStatusCircuitBreaker breaker = this.CreateBreaker(); + breaker.RecordFailure(); + breaker.IsDisabled().ShouldBeFalse(); + } + + [Test] + public void TwoFailuresDoNotDisable() + { + HydrationStatusCircuitBreaker breaker = this.CreateBreaker(); + breaker.RecordFailure(); + breaker.RecordFailure(); + breaker.IsDisabled().ShouldBeFalse(); + } + + [Test] + public void ThreeFailuresTripsBreaker() + { + HydrationStatusCircuitBreaker breaker = this.CreateBreaker(); + breaker.RecordFailure(); + breaker.RecordFailure(); + breaker.RecordFailure(); + breaker.IsDisabled().ShouldBeTrue(); + } + + [Test] + public void BreakerResetsOnNewDay() + { + HydrationStatusCircuitBreaker breaker = this.CreateBreaker(); + + // Simulate a marker file from yesterday + string markerPath = Path.Combine( + this.dotGVFSRoot, + GVFSConstants.DotGVFS.HydrationStatus.DisabledMarkerFile); + File.WriteAllText( + markerPath, + $"2020-01-01\n{ProcessHelper.GetCurrentProcessVersion()}\n5"); + + breaker.IsDisabled().ShouldBeFalse("Circuit breaker should reset on a new day"); + } + + [Test] + public void BreakerResetsOnVersionChange() + { + HydrationStatusCircuitBreaker breaker = this.CreateBreaker(); + + // Simulate a marker file with a different GVFS version + string markerPath = Path.Combine( + this.dotGVFSRoot, + GVFSConstants.DotGVFS.HydrationStatus.DisabledMarkerFile); + string today = System.DateTime.UtcNow.ToString("yyyy-MM-dd"); + File.WriteAllText( + markerPath, + $"{today}\n99.99.99.99\n5"); + + breaker.IsDisabled().ShouldBeFalse("Circuit breaker should reset when GVFS version changes"); + } + + [Test] + public void BreakerStaysTrippedOnSameDayAndVersion() + { + HydrationStatusCircuitBreaker breaker = this.CreateBreaker(); + + string markerPath = Path.Combine( + this.dotGVFSRoot, + GVFSConstants.DotGVFS.HydrationStatus.DisabledMarkerFile); + string today = System.DateTime.UtcNow.ToString("yyyy-MM-dd"); + string currentVersion = ProcessHelper.GetCurrentProcessVersion(); + File.WriteAllText( + markerPath, + $"{today}\n{currentVersion}\n3"); + + breaker.IsDisabled().ShouldBeTrue("Circuit breaker should remain tripped on same day and version"); + } + + [Test] + public void TryParseMarkerFileHandlesValidContent() + { + bool result = HydrationStatusCircuitBreaker.TryParseMarkerFile( + "2026-03-11\n0.2.26070.19566\n3", + out string date, + out string version, + out int count); + + result.ShouldBeTrue(); + date.ShouldEqual("2026-03-11"); + version.ShouldEqual("0.2.26070.19566"); + count.ShouldEqual(3); + } + + [Test] + public void TryParseMarkerFileHandlesEmptyContent() + { + HydrationStatusCircuitBreaker.TryParseMarkerFile( + string.Empty, + out string _, + out string _, + out int _).ShouldBeFalse(); + } + + [Test] + public void TryParseMarkerFileHandlesCorruptContent() + { + HydrationStatusCircuitBreaker.TryParseMarkerFile( + "garbage", + out string _, + out string _, + out int _).ShouldBeFalse(); + } + + [Test] + public void TryParseMarkerFileHandlesNonNumericCount() + { + HydrationStatusCircuitBreaker.TryParseMarkerFile( + "2026-03-11\n0.2.26070.19566\nabc", + out string _, + out string _, + out int _).ShouldBeFalse(); + } + + [Test] + public void RecordFailureLogsWarningWhenBreakerTrips() + { + HydrationStatusCircuitBreaker breaker = this.CreateBreaker(); + breaker.RecordFailure(); + breaker.RecordFailure(); + breaker.RecordFailure(); + + this.tracer.RelatedWarningEvents.Count.ShouldBeAtLeast( + 1, + "Should log a warning when circuit breaker trips"); + } + + private HydrationStatusCircuitBreaker CreateBreaker() + { + return new HydrationStatusCircuitBreaker( + this.dotGVFSRoot, + this.tracer); + } + } +} diff --git a/GVFS/GVFS.UnitTests/Common/HydrationStatusErrorPathTests.cs b/GVFS/GVFS.UnitTests/Common/HydrationStatusErrorPathTests.cs new file mode 100644 index 000000000..77d9b7ebf --- /dev/null +++ b/GVFS/GVFS.UnitTests/Common/HydrationStatusErrorPathTests.cs @@ -0,0 +1,227 @@ +using GVFS.Common; +using GVFS.Common.Git; +using GVFS.Common.NamedPipes; +using GVFS.Common.Tracing; +using GVFS.Tests.Should; +using GVFS.UnitTests.Mock.Common; +using GVFS.UnitTests.Mock.FileSystem; +using GVFS.UnitTests.Mock.Git; +using NUnit.Framework; +using System.IO; +using System.Linq; +using System.Threading; + +namespace GVFS.UnitTests.Common +{ + [TestFixture] + public class HydrationStatusErrorPathTests + { + private const string HeadTreeId = "0123456789012345678901234567890123456789"; + private const int HeadPathCount = 42; + + private MockFileSystem fileSystem; + private MockGitProcess gitProcess; + private MockTracer tracer; + private GVFSContext context; + private string gitParentPath; + private string gvfsMetadataPath; + private MockDirectory enlistmentDirectory; + + [SetUp] + public void Setup() + { + this.tracer = new MockTracer(); + + string enlistmentRoot = Path.Combine("mock:", "GVFS", "UnitTests", "Repo"); + string statusCachePath = Path.Combine("mock:", "GVFS", "UnitTests", "Repo", GVFSPlatform.Instance.Constants.DotGVFSRoot, "gitStatusCache"); + + this.gitProcess = new MockGitProcess(); + this.gitProcess.SetExpectedCommandResult($"--no-optional-locks status \"--serialize={statusCachePath}", () => new GitProcess.Result(string.Empty, string.Empty, 0), true); + MockGVFSEnlistment enlistment = new MockGVFSEnlistment(enlistmentRoot, "fake://repoUrl", "fake://gitBinPath", this.gitProcess); + enlistment.InitializeCachePathsFromKey("fake:\\gvfsSharedCache", "fakeCacheKey"); + + this.gitParentPath = enlistment.WorkingDirectoryBackingRoot; + this.gvfsMetadataPath = enlistment.DotGVFSRoot; + + this.enlistmentDirectory = new MockDirectory( + enlistmentRoot, + new MockDirectory[] + { + new MockDirectory(this.gitParentPath, folders: null, files: null), + }, + null); + + this.enlistmentDirectory.CreateFile(Path.Combine(this.gitParentPath, ".git", "config"), ".git config Contents", createDirectories: true); + this.enlistmentDirectory.CreateFile(Path.Combine(this.gitParentPath, ".git", "HEAD"), ".git HEAD Contents", createDirectories: true); + this.enlistmentDirectory.CreateFile(Path.Combine(this.gitParentPath, ".git", "logs", "HEAD"), "HEAD Contents", createDirectories: true); + this.enlistmentDirectory.CreateFile(Path.Combine(this.gitParentPath, ".git", "info", "always_exclude"), "always_exclude Contents", createDirectories: true); + this.enlistmentDirectory.CreateDirectory(Path.Combine(this.gitParentPath, ".git", "objects", "pack")); + + this.fileSystem = new MockFileSystem(this.enlistmentDirectory); + this.fileSystem.AllowMoveFile = true; + this.fileSystem.DeleteNonExistentFileThrowsException = false; + + this.context = new GVFSContext( + this.tracer, + this.fileSystem, + new MockGitRepo(this.tracer, enlistment, this.fileSystem), + enlistment); + } + + [TearDown] + public void TearDown() + { + this.fileSystem = null; + this.gitProcess = null; + this.tracer = null; + this.context = null; + } + + #region HydrationStatus.Response TryParse error paths + + [TestCase(null)] + [TestCase("")] + public void TryParse_NullOrEmpty_ReturnsFalse(string body) + { + bool result = NamedPipeMessages.HydrationStatus.Response.TryParse(body, out NamedPipeMessages.HydrationStatus.Response response); + Assert.IsFalse(result); + Assert.IsNull(response); + } + + [TestCase("1,2,3")] + [TestCase("1,2,3,4,5")] + public void TryParse_TooFewParts_ReturnsFalse(string body) + { + bool result = NamedPipeMessages.HydrationStatus.Response.TryParse(body, out NamedPipeMessages.HydrationStatus.Response response); + Assert.IsFalse(result); + Assert.IsNull(response); + } + + [TestCase("abc,2,3,4,5,6")] + [TestCase("1,2,three,4,5,6")] + [TestCase("1,2,3,4,5,six")] + public void TryParse_NonIntegerValues_ReturnsFalse(string body) + { + bool result = NamedPipeMessages.HydrationStatus.Response.TryParse(body, out NamedPipeMessages.HydrationStatus.Response response); + Assert.IsFalse(result); + Assert.IsNull(response); + } + + [TestCase("-1,0,0,0,10,5")] + [TestCase("0,-1,0,0,10,5")] + [TestCase("0,0,-1,0,10,5")] + [TestCase("0,0,0,-1,10,5")] + public void TryParse_NegativeCounts_ReturnsFalse(string body) + { + bool result = NamedPipeMessages.HydrationStatus.Response.TryParse(body, out NamedPipeMessages.HydrationStatus.Response response); + Assert.IsFalse(result); + } + + [TestCase("100,0,100,0,50,5")] + [TestCase("0,100,0,100,10,5")] + public void TryParse_HydratedExceedsTotal_ReturnsFalse(string body) + { + bool result = NamedPipeMessages.HydrationStatus.Response.TryParse(body, out NamedPipeMessages.HydrationStatus.Response response); + Assert.IsFalse(result); + } + + [TestCase] + public void TryParse_ValidResponse_Succeeds() + { + bool result = NamedPipeMessages.HydrationStatus.Response.TryParse( + "10,5,3,2,100,50", + out NamedPipeMessages.HydrationStatus.Response response); + + Assert.IsTrue(result); + Assert.AreEqual(10, response.PlaceholderFileCount); + Assert.AreEqual(5, response.PlaceholderFolderCount); + Assert.AreEqual(3, response.ModifiedFileCount); + Assert.AreEqual(2, response.ModifiedFolderCount); + Assert.AreEqual(100, response.TotalFileCount); + Assert.AreEqual(50, response.TotalFolderCount); + Assert.AreEqual(13, response.HydratedFileCount); + Assert.AreEqual(7, response.HydratedFolderCount); + } + + [TestCase] + public void TryParse_ExtraFields_IgnoredAndSucceeds() + { + bool result = NamedPipeMessages.HydrationStatus.Response.TryParse( + "10,5,3,2,100,50,extra,fields", + out NamedPipeMessages.HydrationStatus.Response response); + + Assert.IsTrue(result); + Assert.AreEqual(10, response.PlaceholderFileCount); + Assert.AreEqual(100, response.TotalFileCount); + } + + [TestCase] + public void TryParse_ZeroCounts_IsValid() + { + bool result = NamedPipeMessages.HydrationStatus.Response.TryParse( + "0,0,0,0,0,0", + out NamedPipeMessages.HydrationStatus.Response response); + + Assert.IsTrue(result); + Assert.IsTrue(response.IsValid); + } + + [TestCase] + public void ToBody_RoundTrips_WithTryParse() + { + NamedPipeMessages.HydrationStatus.Response original = new NamedPipeMessages.HydrationStatus.Response + { + PlaceholderFileCount = 42, + PlaceholderFolderCount = 10, + ModifiedFileCount = 8, + ModifiedFolderCount = 3, + TotalFileCount = 1000, + TotalFolderCount = 200, + }; + + string body = original.ToBody(); + bool result = NamedPipeMessages.HydrationStatus.Response.TryParse(body, out NamedPipeMessages.HydrationStatus.Response parsed); + + Assert.IsTrue(result); + Assert.AreEqual(original.PlaceholderFileCount, parsed.PlaceholderFileCount); + Assert.AreEqual(original.PlaceholderFolderCount, parsed.PlaceholderFolderCount); + Assert.AreEqual(original.ModifiedFileCount, parsed.ModifiedFileCount); + Assert.AreEqual(original.ModifiedFolderCount, parsed.ModifiedFolderCount); + Assert.AreEqual(original.TotalFileCount, parsed.TotalFileCount); + Assert.AreEqual(original.TotalFolderCount, parsed.TotalFolderCount); + } + + [TestCase] + public void ToDisplayMessage_InvalidResponse_ReturnsNull() + { + NamedPipeMessages.HydrationStatus.Response response = new NamedPipeMessages.HydrationStatus.Response + { + PlaceholderFileCount = -1, + TotalFileCount = 100, + }; + + Assert.IsNull(response.ToDisplayMessage()); + } + + [TestCase] + public void ToDisplayMessage_ValidResponse_FormatsCorrectly() + { + NamedPipeMessages.HydrationStatus.Response response = new NamedPipeMessages.HydrationStatus.Response + { + PlaceholderFileCount = 40, + PlaceholderFolderCount = 10, + ModifiedFileCount = 10, + ModifiedFolderCount = 5, + TotalFileCount = 100, + TotalFolderCount = 50, + }; + + string message = response.ToDisplayMessage(); + Assert.IsNotNull(message); + Assert.That(message, Does.Contain("50%")); + Assert.That(message, Does.Contain("30%")); + } + + #endregion + } +} diff --git a/GVFS/GVFS.UnitTests/Common/LibGit2RepoSafeDirectoryTests.cs b/GVFS/GVFS.UnitTests/Common/LibGit2RepoSafeDirectoryTests.cs new file mode 100644 index 000000000..47fd8acd7 --- /dev/null +++ b/GVFS/GVFS.UnitTests/Common/LibGit2RepoSafeDirectoryTests.cs @@ -0,0 +1,283 @@ +using GVFS.Common.Git; +using GVFS.Common.Tracing; +using GVFS.Tests.Should; +using NUnit.Framework; +using System; +using System.Collections.Generic; +using System.IO; + +namespace GVFS.UnitTests.Common +{ + [TestFixture] + public class LibGit2RepoSafeDirectoryTests + { + // ─────────────────────────────────────────────── + // Layer 1 – NormalizePathForSafeDirectoryComparison (pure string tests) + // ─────────────────────────────────────────────── + + [TestCase(@"C:\Repos\Foo", "C:/REPOS/FOO")] + [TestCase(@"c:\repos\foo", "C:/REPOS/FOO")] + [TestCase("c:/repos/foo", "C:/REPOS/FOO")] + [TestCase("C:/Repos/Foo/", "C:/REPOS/FOO")] + [TestCase(@"C:\Repos\Foo\", "C:/REPOS/FOO")] + [TestCase("C:/Repos/Foo///", "C:/REPOS/FOO")] + [TestCase(@"C:\Repos/Mixed\Path", "C:/REPOS/MIXED/PATH")] + [TestCase("already/normalized", "ALREADY/NORMALIZED")] + public void NormalizePathForSafeDirectoryComparison_ProducesExpectedResult(string input, string expected) + { + LibGit2Repo.NormalizePathForSafeDirectoryComparison(input).ShouldEqual(expected); + } + + [TestCase(null)] + [TestCase("")] + public void NormalizePathForSafeDirectoryComparison_HandlesNullAndEmpty(string input) + { + LibGit2Repo.NormalizePathForSafeDirectoryComparison(input).ShouldEqual(input); + } + + [TestCase(@"C:\Repos\Foo", "c:/repos/foo")] + [TestCase(@"C:\Repos\Foo", @"c:\Repos\Foo")] + [TestCase("C:/Repos/Foo/", @"c:\repos\foo")] + public void NormalizePathForSafeDirectoryComparison_CaseInsensitiveMatch(string a, string b) + { + LibGit2Repo.NormalizePathForSafeDirectoryComparison(a).ShouldEqual(LibGit2Repo.NormalizePathForSafeDirectoryComparison(b)); + } + + // ─────────────────────────────────────────────── + // Layer 2 – Constructor control-flow tests via mock + // Tests go through the public LibGit2Repo(ITracer, string) + // constructor, which is the real entry point. + // ─────────────────────────────────────────────── + + [TestCase] + public void Constructor_OwnershipError_WithMatchingConfigEntry_OpensSuccessfully() + { + // First Open() fails with ownership error, config has a case-variant match, + // second Open() with the configured path succeeds → constructor completes. + string requestedPath = @"C:\Repos\MyProject"; + string configuredPath = @"c:\repos\myproject"; + + using (MockSafeDirectoryRepo repo = MockSafeDirectoryRepo.Create( + requestedPath, + safeDirectoryEntries: new[] { configuredPath }, + openableRepos: new HashSet(StringComparer.Ordinal) { configuredPath })) + { + // Constructor completed without throwing — the workaround succeeded. + repo.OpenedPaths.ShouldContain(p => p == configuredPath); + } + } + + [TestCase] + public void Constructor_OwnershipError_NoMatchingConfigEntry_Throws() + { + // Open() fails with ownership error, config has no matching entry → throws. + string requestedPath = @"C:\Repos\MyProject"; + + Assert.Throws(() => + { + MockSafeDirectoryRepo.Create( + requestedPath, + safeDirectoryEntries: new[] { @"D:\Other\Repo" }, + openableRepos: new HashSet(StringComparer.Ordinal)); + }); + } + + [TestCase] + public void Constructor_OwnershipError_MatchButOpenFails_Throws() + { + // Open() fails with ownership error, config entry matches but + // the retry also fails → throws. + string requestedPath = @"C:\Repos\MyProject"; + string configuredPath = @"c:\repos\myproject"; + + Assert.Throws(() => + { + MockSafeDirectoryRepo.Create( + requestedPath, + safeDirectoryEntries: new[] { configuredPath }, + openableRepos: new HashSet(StringComparer.Ordinal)); + }); + } + + [TestCase] + public void Constructor_OwnershipError_EmptyConfig_Throws() + { + string requestedPath = @"C:\Repos\MyProject"; + + Assert.Throws(() => + { + MockSafeDirectoryRepo.Create( + requestedPath, + safeDirectoryEntries: Array.Empty(), + openableRepos: new HashSet(StringComparer.Ordinal)); + }); + } + + [TestCase] + public void Constructor_OwnershipError_MultipleEntries_PicksCorrectMatch() + { + // Config has several entries; only one is a case-variant match. + string requestedPath = @"C:\Repos\Target"; + string correctConfigEntry = @"c:/repos/target"; + + using (MockSafeDirectoryRepo repo = MockSafeDirectoryRepo.Create( + requestedPath, + safeDirectoryEntries: new[] + { + @"D:\Other\Repo", + correctConfigEntry, + @"E:\Unrelated\Path", + }, + openableRepos: new HashSet(StringComparer.Ordinal) + { + correctConfigEntry, + })) + { + repo.OpenedPaths.ShouldContain(p => p == correctConfigEntry); + } + } + + [TestCase] + public void Constructor_NonOwnershipError_Throws() + { + // Open() fails with a different error (not ownership) → throws + // without attempting safe.directory workaround. + string requestedPath = @"C:\Repos\MyProject"; + + Assert.Throws(() => + { + MockSafeDirectoryRepo.Create( + requestedPath, + safeDirectoryEntries: new[] { requestedPath }, + openableRepos: new HashSet(StringComparer.Ordinal), + nativeError: "repository not found"); + }); + + MockSafeDirectoryRepo.LastCreatedInstance + .SafeDirectoryCheckAttempted + .ShouldBeFalse("Safe.directory workaround should not be attempted for non-ownership errors"); + } + + [TestCase] + public void Constructor_OpenSucceedsFirstTime_NoWorkaround() + { + // Open() succeeds immediately → no safe.directory logic triggered. + string requestedPath = @"C:\Repos\MyProject"; + + using (MockSafeDirectoryRepo repo = MockSafeDirectoryRepo.Create( + requestedPath, + safeDirectoryEntries: Array.Empty(), + openableRepos: new HashSet(StringComparer.Ordinal) { requestedPath })) + { + // Only one Open call (the initial one), no retry. + repo.OpenedPaths.Count.ShouldEqual(1); + repo.OpenedPaths.ShouldContain(p => p == requestedPath); + } + } + + /// + /// Mock that intercepts all native P/Invoke calls so the public + /// constructor can be exercised without touching libgit2. + /// Uses thread-static config to work around virtual-call-from- + /// constructor ordering (base ctor runs before derived fields init). + /// + private class MockSafeDirectoryRepo : LibGit2Repo + { + [ThreadStatic] + private static MockConfig pendingConfig; + + [ThreadStatic] + private static MockSafeDirectoryRepo lastCreatedInstance; + + private string[] safeDirectoryEntries; + private HashSet openableRepos; + private string nativeError; + + public List OpenedPaths { get; } = new List(); + public bool SafeDirectoryCheckAttempted { get; private set; } + + /// + /// Returns the most recently constructed instance on the current + /// thread, even if the constructor threw an exception. + /// + public static MockSafeDirectoryRepo LastCreatedInstance => lastCreatedInstance; + + private MockSafeDirectoryRepo(ITracer tracer, string repoPath) + : base(tracer, repoPath) + { + // Fields already populated from pendingConfig by the time + // virtual methods are called from base ctor. + } + + public static MockSafeDirectoryRepo Create( + string repoPath, + string[] safeDirectoryEntries, + HashSet openableRepos, + string nativeError = "repository path '/some/path' is not owned by current user") + { + pendingConfig = new MockConfig + { + SafeDirectoryEntries = safeDirectoryEntries, + OpenableRepos = openableRepos, + NativeError = nativeError, + }; + + try + { + return new MockSafeDirectoryRepo(NullTracer.Instance, repoPath); + } + finally + { + pendingConfig = null; + } + } + + protected override void InitNative() + { + // Grab config from thread-static before base ctor proceeds. + this.safeDirectoryEntries = pendingConfig.SafeDirectoryEntries; + this.openableRepos = pendingConfig.OpenableRepos; + this.nativeError = pendingConfig.NativeError; + lastCreatedInstance = this; + } + + protected override void ShutdownNative() + { + } + + protected override string GetLastNativeError() + { + return this.nativeError; + } + + protected override void GetSafeDirectoryConfigEntries(MultiVarConfigCallback callback) + { + this.SafeDirectoryCheckAttempted = true; + foreach (string entry in this.safeDirectoryEntries) + { + callback(entry); + } + } + + protected override Native.ResultCode TryOpenRepo(string path, out IntPtr repoHandle) + { + this.OpenedPaths.Add(path); + repoHandle = IntPtr.Zero; + return this.openableRepos.Contains(path) + ? Native.ResultCode.Success + : Native.ResultCode.Failure; + } + + protected override void Dispose(bool disposing) + { + } + + private class MockConfig + { + public string[] SafeDirectoryEntries; + public HashSet OpenableRepos; + public string NativeError; + } + } + } +} diff --git a/GVFS/GVFS.UnitTests/Common/MissingTreeTrackerTests.cs b/GVFS/GVFS.UnitTests/Common/MissingTreeTrackerTests.cs new file mode 100644 index 000000000..a13a34554 --- /dev/null +++ b/GVFS/GVFS.UnitTests/Common/MissingTreeTrackerTests.cs @@ -0,0 +1,517 @@ +using GVFS.Common; +using GVFS.Tests.Should; +using GVFS.UnitTests.Mock.Common; +using NUnit.Framework; + +namespace GVFS.UnitTests.Common +{ + [TestFixture] + public class MissingTreeTrackerTests + { + private static MissingTreeTracker CreateTracker(int treeCapacity) + { + return new MissingTreeTracker(new MockTracer(), treeCapacity); + } + + // ------------------------------------------------------------------------- + // AddMissingRootTree + // ------------------------------------------------------------------------- + + [TestCase] + public void AddMissingRootTree_SingleTreeAndCommit() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + tracker.AddMissingRootTree("tree1", "commit1"); + + tracker.TryGetCommits("tree1", out string[] commits).ShouldEqual(true); + commits.Length.ShouldEqual(1); + commits[0].ShouldEqual("commit1"); + tracker.GetHighestMissingTreeCount(commits, out _).ShouldEqual(1); + } + + [TestCase] + public void AddMissingRootTree_MultipleTreesForSameCommit() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree2", "commit1"); + tracker.AddMissingRootTree("tree3", "commit1"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(3); + + tracker.TryGetCommits("tree1", out string[] c1).ShouldEqual(true); + c1[0].ShouldEqual("commit1"); + + tracker.TryGetCommits("tree2", out string[] c2).ShouldEqual(true); + c2[0].ShouldEqual("commit1"); + + tracker.TryGetCommits("tree3", out string[] c3).ShouldEqual(true); + c3[0].ShouldEqual("commit1"); + } + + [TestCase] + public void AddMissingRootTree_SameTreeAddedTwiceToSameCommit() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree1", "commit1"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(1); + } + + [TestCase] + public void AddMissingRootTree_SameTreeAddedToMultipleCommits() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree1", "commit2"); + + // tree1 is now tracked under both commits + tracker.TryGetCommits("tree1", out string[] commits).ShouldEqual(true); + commits.Length.ShouldEqual(2); + + // Both commits each have 1 tree + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(1); + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(1); + } + + [TestCase] + public void AddMissingRootTree_MultipleTrees_ChecksCount() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(1); + + tracker.AddMissingRootTree("tree2", "commit1"); + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(2); + + tracker.AddMissingRootTree("tree3", "commit1"); + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(3); + + tracker.AddMissingRootTree("tree4", "commit1"); + tracker.AddMissingRootTree("tree5", "commit1"); + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(5); + } + + // ------------------------------------------------------------------------- + // AddMissingSubTrees + // ------------------------------------------------------------------------- + + [TestCase] + public void AddMissingSubTrees_AddsSubTreesUnderParentsCommits() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + tracker.AddMissingRootTree("rootTree", "commit1"); + tracker.AddMissingSubTrees("rootTree", new[] { "sub1", "sub2" }); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(3); + + tracker.TryGetCommits("sub1", out string[] c1).ShouldEqual(true); + c1[0].ShouldEqual("commit1"); + + tracker.TryGetCommits("sub2", out string[] c2).ShouldEqual(true); + c2[0].ShouldEqual("commit1"); + } + + [TestCase] + public void AddMissingSubTrees_PropagatesAcrossAllSharingCommits() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + // Two commits share the same root tree + tracker.AddMissingRootTree("rootTree", "commit1"); + tracker.AddMissingRootTree("rootTree", "commit2"); + + tracker.AddMissingSubTrees("rootTree", new[] { "sub1" }); + + // sub1 should be tracked under both commits + tracker.TryGetCommits("sub1", out string[] commits).ShouldEqual(true); + commits.Length.ShouldEqual(2); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(2); + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(2); + } + + [TestCase] + public void AddMissingSubTrees_NoOp_WhenParentNotTracked() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + // Should not throw; parent is not tracked + tracker.AddMissingSubTrees("unknownParent", new[] { "sub1" }); + + tracker.TryGetCommits("sub1", out _).ShouldEqual(false); + } + + [TestCase] + public void AddMissingSubTrees_SkipsCommitEvictedDuringLoop() + { + // treeCapacity = 2: rootTree fills slot 1, rootTree2 fills slot 2. + // commit1 and commit2 both share rootTree (1 unique tree so far). + // commit3 holds rootTree2 (2 unique trees, at capacity). + // AddMissingSubTrees(rootTree, [sub1]) must add sub1 to commit1 then commit2. + // Adding sub1 for commit1 fills the 3rd slot, which evicts the LRU commit. + // commit2 is LRU (added to the tracker last among commit1/commit2 and then not used + // again, while commit1 just got used), so it is evicted before we process commit2. + // The loop must skip commit2 rather than crashing. + MissingTreeTracker tracker = CreateTracker(treeCapacity: 2); + + tracker.AddMissingRootTree("rootTree", "commit1"); + tracker.AddMissingRootTree("rootTree", "commit2"); + tracker.AddMissingRootTree("rootTree2", "commit3"); + + // Does not throw, and sub1 ends up under whichever commit survived eviction + tracker.AddMissingSubTrees("rootTree", new[] { "sub1" }); + + // Exactly one of commit1/commit2 was evicted; sub1 exists under the survivor + bool commit1HasSub1 = tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _) == 2; + bool commit2HasSub1 = tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _) == 2; + (commit1HasSub1 || commit2HasSub1).ShouldEqual(true); + (commit1HasSub1 && commit2HasSub1).ShouldEqual(false); + } + + [TestCase] + public void AddMissingSubTrees_DoesNotEvictIfOnlyOneCommit() + { + /* This shouldn't be possible if user has a proper threshold and is marking commits + * as completed, but test to be safe. */ + MissingTreeTracker tracker = CreateTracker(treeCapacity: 2); + tracker.AddMissingRootTree("rootTree", "commit1"); + tracker.AddMissingSubTrees("rootTree", new[] { "sub1" }); + tracker.AddMissingSubTrees("rootTree", new[] { "sub2" }); + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(3); + } + + // ------------------------------------------------------------------------- + // TryGetCommits + // ------------------------------------------------------------------------- + + [TestCase] + public void TryGetCommits_NonExistentTree() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + tracker.TryGetCommits("nonexistent", out string[] commits).ShouldEqual(false); + commits.ShouldBeNull(); + } + + [TestCase] + public void TryGetCommits_MarksAllCommitsAsRecentlyUsed() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 3); + + tracker.AddMissingRootTree("sharedTree", "commit1"); + tracker.AddMissingRootTree("sharedTree", "commit2"); + tracker.AddMissingRootTree("tree2", "commit3"); + tracker.AddMissingRootTree("tree3", "commit4"); + + // Access commit1 and commit2 via TryGetCommits + tracker.TryGetCommits("sharedTree", out _); + + // Adding a fourth tree should evict commit3 (oldest unused) + tracker.AddMissingRootTree("tree4", "commit5"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(1); + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(1); + tracker.GetHighestMissingTreeCount(new[] { "commit3" }, out _).ShouldEqual(0); + tracker.GetHighestMissingTreeCount(new[] { "commit4" }, out _).ShouldEqual(1); + tracker.GetHighestMissingTreeCount(new[] { "commit5" }, out _).ShouldEqual(1); + } + + // ------------------------------------------------------------------------- + // GetHighestMissingTreeCount + // ------------------------------------------------------------------------- + + [TestCase] + public void GetHighestMissingTreeCount_NonExistentCommit() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + tracker.GetHighestMissingTreeCount(new[] { "nonexistent" }, out string highest).ShouldEqual(0); + highest.ShouldBeNull(); + } + + [TestCase] + public void GetHighestMissingTreeCount_ReturnsCommitWithMostTrees() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree2", "commit1"); + tracker.AddMissingRootTree("tree3", "commit2"); + + int count = tracker.GetHighestMissingTreeCount(new[] { "commit1", "commit2" }, out string highest); + count.ShouldEqual(2); + highest.ShouldEqual("commit1"); + } + + [TestCase] + public void GetHighestMissingTreeCount_DoesNotUpdateLru() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 3); + + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree2", "commit2"); + tracker.AddMissingRootTree("tree3", "commit3"); + + // Query commit1's count (should not update LRU) + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _); + + // Adding a fourth commit should still evict commit1 (oldest) + tracker.AddMissingRootTree("tree4", "commit4"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(0); + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(1); + tracker.GetHighestMissingTreeCount(new[] { "commit3" }, out _).ShouldEqual(1); + tracker.GetHighestMissingTreeCount(new[] { "commit4" }, out _).ShouldEqual(1); + } + + // ------------------------------------------------------------------------- + // MarkCommitComplete (cascade removal) + // ------------------------------------------------------------------------- + + [TestCase] + public void MarkCommitComplete_RemovesAllTreesForCommit() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree2", "commit1"); + tracker.AddMissingRootTree("tree3", "commit1"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(3); + + tracker.MarkCommitComplete("commit1"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(0); + tracker.TryGetCommits("tree1", out _).ShouldEqual(false); + tracker.TryGetCommits("tree2", out _).ShouldEqual(false); + tracker.TryGetCommits("tree3", out _).ShouldEqual(false); + } + + [TestCase] + public void MarkCommitComplete_NonExistentCommit() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + // Should not throw + tracker.MarkCommitComplete("nonexistent"); + } + + [TestCase] + public void MarkCommitComplete_CascadesSharedTreesToOtherCommits() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + // commit1 and commit2 share tree1; commit2 also has tree2 + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree1", "commit2"); + tracker.AddMissingRootTree("tree2", "commit2"); + + tracker.MarkCommitComplete("commit1"); + + // tree1 was in commit1, so it should be removed from commit2 as well + tracker.TryGetCommits("tree1", out _).ShouldEqual(false); + + // tree2 is unrelated to commit1, so commit2 still has it + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(1); + tracker.TryGetCommits("tree2", out string[] c2).ShouldEqual(true); + c2[0].ShouldEqual("commit2"); + } + + [TestCase] + public void MarkCommitComplete_RemovesOtherCommitWhenItBecomesEmpty() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + // commit2's only tree is shared with commit1 + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree1", "commit2"); + + tracker.MarkCommitComplete("commit1"); + + // commit2 had only tree1, which was cascaded away, so commit2 should be gone too + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(0); + tracker.TryGetCommits("tree1", out _).ShouldEqual(false); + } + + [TestCase] + public void MarkCommitComplete_DoesNotAffectUnrelatedCommits() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 10); + + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree2", "commit2"); + + tracker.MarkCommitComplete("commit1"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(0); + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(1); + tracker.TryGetCommits("tree2", out string[] c).ShouldEqual(true); + c[0].ShouldEqual("commit2"); + } + + // ------------------------------------------------------------------------- + // LRU eviction (no cascade) + // ------------------------------------------------------------------------- + + [TestCase] + public void LruEviction_EvictsOldestCommit() + { + // treeCapacity = 3 trees; one tree per commit + MissingTreeTracker tracker = CreateTracker(treeCapacity: 3); + + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree2", "commit2"); + tracker.AddMissingRootTree("tree3", "commit3"); + + // Adding a fourth tree exceeds treeCapacity, so commit1 (LRU) is evicted + tracker.AddMissingRootTree("tree4", "commit4"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(0); + tracker.TryGetCommits("tree1", out _).ShouldEqual(false); + + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(1); + tracker.GetHighestMissingTreeCount(new[] { "commit3" }, out _).ShouldEqual(1); + tracker.GetHighestMissingTreeCount(new[] { "commit4" }, out _).ShouldEqual(1); + } + + [TestCase] + public void LruEviction_DoesNotCascadeSharedTreesToOtherCommits() + { + // treeCapacity = 3 trees; tree1 is shared so only 2 unique trees + tree3 = 3 total + MissingTreeTracker tracker = CreateTracker(treeCapacity: 3); + + // tree1 is shared between commit1 and commit2 (counts as 1 unique tree) + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree2", "commit1"); + tracker.AddMissingRootTree("tree1", "commit2"); + tracker.AddMissingRootTree("tree3", "commit3"); + + // tree4 is the 4th unique tree, exceeding treeCapacity; evicts commit1 (LRU) + // which removes tree2, freeing up capacity. + tracker.AddMissingRootTree("tree4", "commit4"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(0); + + // tree1 is still missing (not yet downloaded), so commit2 retains it + tracker.TryGetCommits("tree1", out string[] commits).ShouldEqual(true); + commits.Length.ShouldEqual(1); + commits[0].ShouldEqual("commit2"); + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(1); + } + + [TestCase] + public void LruEviction_AddingTreeToExistingCommitUpdatesLru() + { + // treeCapacity = 4 trees; tree1, tree2, tree3 fill it, then tree1b re-uses commit1 + MissingTreeTracker tracker = CreateTracker(treeCapacity: 4); + + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree2", "commit2"); + tracker.AddMissingRootTree("tree3", "commit3"); + + // Adding tree1b to commit1 marks commit1 as recently used (it's a new unique tree) + tracker.AddMissingRootTree("tree1b", "commit1"); + + // tree4 is the 5th unique tree, exceeding treeCapacity; commit2 is now LRU + tracker.AddMissingRootTree("tree4", "commit4"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(2); + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(0); + tracker.GetHighestMissingTreeCount(new[] { "commit3" }, out _).ShouldEqual(1); + tracker.GetHighestMissingTreeCount(new[] { "commit4" }, out _).ShouldEqual(1); + } + + [TestCase] + public void LruEviction_MultipleTreesPerCommit_EvictsEntireCommit() + { + // treeCapacity = 4 trees; commit1 holds 3, commit2 holds 1 + MissingTreeTracker tracker = CreateTracker(treeCapacity: 4); + + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree2", "commit1"); + tracker.AddMissingRootTree("tree3", "commit1"); + tracker.AddMissingRootTree("tree4", "commit2"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(3); + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(1); + + // tree5 is the 5th unique tree; evict LRU (commit1) freeing 3 slots, then add tree5 + tracker.AddMissingRootTree("tree5", "commit3"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(0); + tracker.TryGetCommits("tree1", out _).ShouldEqual(false); + tracker.TryGetCommits("tree2", out _).ShouldEqual(false); + tracker.TryGetCommits("tree3", out _).ShouldEqual(false); + + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(1); + tracker.GetHighestMissingTreeCount(new[] { "commit3" }, out _).ShouldEqual(1); + } + + [TestCase] + public void LruEviction_CapacityOne() + { + MissingTreeTracker tracker = CreateTracker(treeCapacity: 1); + + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(1); + + tracker.AddMissingRootTree("tree2", "commit2"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(0); + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(1); + } + + [TestCase] + public void LruEviction_ManyTreesOneCommit_ExceedsCapacity() + { + // treeCapacity = 3 trees; all trees belong to commit1 + // Adding a 4th tree must evict commit1 (the only commit) to make room + MissingTreeTracker tracker = CreateTracker(treeCapacity: 3); + + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree2", "commit1"); + tracker.AddMissingRootTree("tree3", "commit1"); + + // tree4 exceeds the tree treeCapacity; the LRU commit (commit1) is evicted + // and then commit2 with tree4 is added fresh + tracker.AddMissingRootTree("tree4", "commit2"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(0); + tracker.TryGetCommits("tree1", out _).ShouldEqual(false); + tracker.TryGetCommits("tree2", out _).ShouldEqual(false); + tracker.TryGetCommits("tree3", out _).ShouldEqual(false); + + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(1); + } + + [TestCase] + public void LruEviction_TryGetCommitsUpdatesLru() + { + // treeCapacity = 3 trees, one per commit + MissingTreeTracker tracker = CreateTracker(treeCapacity: 3); + + tracker.AddMissingRootTree("tree1", "commit1"); + tracker.AddMissingRootTree("tree2", "commit2"); + tracker.AddMissingRootTree("tree3", "commit3"); + + // Access commit1 via TryGetCommits (marks it as recently used) + tracker.TryGetCommits("tree1", out _); + + // tree4 exceeds treeCapacity; commit2 is now LRU + tracker.AddMissingRootTree("tree4", "commit4"); + + tracker.GetHighestMissingTreeCount(new[] { "commit1" }, out _).ShouldEqual(1); + tracker.GetHighestMissingTreeCount(new[] { "commit2" }, out _).ShouldEqual(0); + tracker.GetHighestMissingTreeCount(new[] { "commit3" }, out _).ShouldEqual(1); + tracker.GetHighestMissingTreeCount(new[] { "commit4" }, out _).ShouldEqual(1); + } + } +} diff --git a/GVFS/GVFS.UnitTests/Common/RetryWrapperTests.cs b/GVFS/GVFS.UnitTests/Common/RetryWrapperTests.cs index 81c6c023b..528252fa3 100644 --- a/GVFS/GVFS.UnitTests/Common/RetryWrapperTests.cs +++ b/GVFS/GVFS.UnitTests/Common/RetryWrapperTests.cs @@ -12,6 +12,12 @@ namespace GVFS.UnitTests.Common [TestFixture] public class RetryWrapperTests { + [SetUp] + public void SetUp() + { + RetryCircuitBreaker.Reset(); + } + [TestCase] [Category(CategoryConstants.ExceptionExpected)] public void WillRetryOnIOException() @@ -233,5 +239,118 @@ public void WillRetryWhenRequested() actualTries.ShouldEqual(ExpectedTries); actualFailures.ShouldEqual(ExpectedFailures); } + + [TestCase] + [Category(CategoryConstants.ExceptionExpected)] + public void CircuitBreakerOpensAfterConsecutiveFailures() + { + const int Threshold = 5; + const int CooldownMs = 5000; + RetryCircuitBreaker.Configure(Threshold, CooldownMs); + + // Generate enough failures to trip the circuit breaker + for (int i = 0; i < Threshold; i++) + { + RetryWrapper wrapper = new RetryWrapper(1, CancellationToken.None, exponentialBackoffBase: 0); + wrapper.Invoke(tryCount => throw new IOException("simulated failure")); + } + + RetryCircuitBreaker.IsOpen.ShouldBeTrue("Circuit breaker should be open after threshold failures"); + + // Next invocation should fail fast without calling the callback + int callbackInvocations = 0; + RetryWrapper dut = new RetryWrapper(5, CancellationToken.None, exponentialBackoffBase: 0); + RetryWrapper.InvocationResult result = dut.Invoke( + tryCount => + { + callbackInvocations++; + return new RetryWrapper.CallbackResult(true); + }); + + result.Succeeded.ShouldEqual(false); + callbackInvocations.ShouldEqual(0); + } + + [TestCase] + public void CircuitBreakerResetsOnSuccess() + { + const int Threshold = 3; + RetryCircuitBreaker.Configure(Threshold, 30_000); + + // Record failures just below threshold + for (int i = 0; i < Threshold - 1; i++) + { + RetryCircuitBreaker.RecordFailure(); + } + + RetryCircuitBreaker.IsOpen.ShouldBeFalse("Circuit should still be closed below threshold"); + + // A successful invocation resets the counter + RetryWrapper dut = new RetryWrapper(1, CancellationToken.None, exponentialBackoffBase: 0); + dut.Invoke(tryCount => new RetryWrapper.CallbackResult(true)); + + RetryCircuitBreaker.ConsecutiveFailures.ShouldEqual(0); + + // Now threshold more failures are needed to trip it again + for (int i = 0; i < Threshold - 1; i++) + { + RetryCircuitBreaker.RecordFailure(); + } + + RetryCircuitBreaker.IsOpen.ShouldBeFalse("Circuit should still be closed after reset"); + } + + [TestCase] + public void CircuitBreakerIgnoresNonRetryableErrors() + { + const int Threshold = 3; + RetryCircuitBreaker.Configure(Threshold, 30_000); + + // Generate non-retryable failures (e.g., 404/400) — these should NOT count + for (int i = 0; i < Threshold + 5; i++) + { + RetryWrapper wrapper = new RetryWrapper(1, CancellationToken.None, exponentialBackoffBase: 0); + wrapper.Invoke(tryCount => new RetryWrapper.CallbackResult(new Exception("404 Not Found"), shouldRetry: false)); + } + + RetryCircuitBreaker.IsOpen.ShouldBeFalse("Non-retryable errors should not trip the circuit breaker"); + RetryCircuitBreaker.ConsecutiveFailures.ShouldEqual(0); + } + + [TestCase] + [Category(CategoryConstants.ExceptionExpected)] + public void CircuitBreakerClosesAfterCooldown() + { + const int Threshold = 3; + const int CooldownMs = 100; // Very short cooldown for testing + RetryCircuitBreaker.Configure(Threshold, CooldownMs); + + // Trip the circuit breaker + for (int i = 0; i < Threshold; i++) + { + RetryWrapper wrapper = new RetryWrapper(1, CancellationToken.None, exponentialBackoffBase: 0); + wrapper.Invoke(tryCount => throw new IOException("simulated failure")); + } + + RetryCircuitBreaker.IsOpen.ShouldBeTrue("Circuit should be open"); + + // Wait for cooldown to expire + Thread.Sleep(CooldownMs + 50); + + RetryCircuitBreaker.IsOpen.ShouldBeFalse("Circuit should be closed after cooldown"); + + // Should be able to invoke successfully now + int callbackInvocations = 0; + RetryWrapper dut = new RetryWrapper(1, CancellationToken.None, exponentialBackoffBase: 0); + RetryWrapper.InvocationResult result = dut.Invoke( + tryCount => + { + callbackInvocations++; + return new RetryWrapper.CallbackResult(true); + }); + + result.Succeeded.ShouldEqual(true); + callbackInvocations.ShouldEqual(1); + } } } diff --git a/GVFS/GVFS.UnitTests/Common/WorktreeCommandParserTests.cs b/GVFS/GVFS.UnitTests/Common/WorktreeCommandParserTests.cs new file mode 100644 index 000000000..ccfa0c0a1 --- /dev/null +++ b/GVFS/GVFS.UnitTests/Common/WorktreeCommandParserTests.cs @@ -0,0 +1,176 @@ +using GVFS.Common; +using GVFS.Tests.Should; +using NUnit.Framework; + +namespace GVFS.UnitTests.Common +{ + [TestFixture] + public class WorktreeCommandParserTests + { + [TestCase] + public void GetSubcommandReturnsAdd() + { + string[] args = { "post-command", "worktree", "add", "-b", "branch", @"C:\wt" }; + WorktreeCommandParser.GetSubcommand(args).ShouldEqual("add"); + } + + [TestCase] + public void GetSubcommandReturnsRemove() + { + string[] args = { "pre-command", "worktree", "remove", @"C:\wt" }; + WorktreeCommandParser.GetSubcommand(args).ShouldEqual("remove"); + } + + [TestCase] + public void GetSubcommandSkipsLeadingDoubleHyphenArgs() + { + string[] args = { "post-command", "worktree", "--git-pid=1234", "add", @"C:\wt" }; + WorktreeCommandParser.GetSubcommand(args).ShouldEqual("add"); + } + + [TestCase] + public void GetSubcommandReturnsNullWhenNoSubcommand() + { + string[] args = { "post-command", "worktree" }; + WorktreeCommandParser.GetSubcommand(args).ShouldBeNull(); + } + + [TestCase] + public void GetSubcommandNormalizesToLowercase() + { + string[] args = { "post-command", "worktree", "Add" }; + WorktreeCommandParser.GetSubcommand(args).ShouldEqual("add"); + } + + [TestCase] + public void GetPathArgExtractsPathFromAddWithBranch() + { + // git worktree add -b branch C:\worktree + string[] args = { "post-command", "worktree", "add", "-b", "my-branch", @"C:\repos\wt", "--git-pid=123", "--exit_code=0" }; + WorktreeCommandParser.GetPathArg(args).ShouldEqual(@"C:\repos\wt"); + } + + [TestCase] + public void GetPathArgExtractsPathFromAddWithoutBranch() + { + // git worktree add C:\worktree + string[] args = { "post-command", "worktree", "add", @"C:\repos\wt", "--git-pid=123", "--exit_code=0" }; + WorktreeCommandParser.GetPathArg(args).ShouldEqual(@"C:\repos\wt"); + } + + [TestCase] + public void GetPathArgExtractsPathFromRemove() + { + string[] args = { "pre-command", "worktree", "remove", @"C:\repos\wt", "--git-pid=456" }; + WorktreeCommandParser.GetPathArg(args).ShouldEqual(@"C:\repos\wt"); + } + + [TestCase] + public void GetPathArgExtractsPathFromRemoveWithForce() + { + string[] args = { "pre-command", "worktree", "remove", "--force", @"C:\repos\wt" }; + WorktreeCommandParser.GetPathArg(args).ShouldEqual(@"C:\repos\wt"); + } + + [TestCase] + public void GetPathArgSkipsBranchNameAfterDashB() + { + // -b takes a value — the path is the arg AFTER the branch name + string[] args = { "post-command", "worktree", "add", "-b", "feature", @"C:\repos\feature" }; + WorktreeCommandParser.GetPathArg(args).ShouldEqual(@"C:\repos\feature"); + } + + [TestCase] + public void GetPathArgSkipsBranchNameAfterDashCapitalB() + { + string[] args = { "post-command", "worktree", "add", "-B", "feature", @"C:\repos\feature" }; + WorktreeCommandParser.GetPathArg(args).ShouldEqual(@"C:\repos\feature"); + } + + [TestCase] + public void GetPathArgSkipsAllOptionFlags() + { + // -f, -d, -q, --detach, --checkout, --lock, --no-checkout + string[] args = { "post-command", "worktree", "add", "-f", "--no-checkout", "--lock", "--reason", "testing", @"C:\repos\wt" }; + WorktreeCommandParser.GetPathArg(args).ShouldEqual(@"C:\repos\wt"); + } + + [TestCase] + public void GetPathArgHandlesSeparator() + { + // After --, everything is positional + string[] args = { "post-command", "worktree", "add", "--", @"C:\repos\wt" }; + WorktreeCommandParser.GetPathArg(args).ShouldEqual(@"C:\repos\wt"); + } + + [TestCase] + public void GetPathArgSkipsGitPidAndExitCode() + { + string[] args = { "post-command", "worktree", "add", @"C:\wt", "--git-pid=99", "--exit_code=0" }; + WorktreeCommandParser.GetPathArg(args).ShouldEqual(@"C:\wt"); + } + + [TestCase] + public void GetPathArgReturnsNullWhenNoPath() + { + string[] args = { "post-command", "worktree", "list" }; + WorktreeCommandParser.GetPathArg(args).ShouldBeNull(); + } + + [TestCase] + public void GetPositionalArgReturnsSecondPositional() + { + // git worktree move + string[] args = { "post-command", "worktree", "move", @"C:\old", @"C:\new" }; + WorktreeCommandParser.GetPositionalArg(args, 0).ShouldEqual(@"C:\old"); + WorktreeCommandParser.GetPositionalArg(args, 1).ShouldEqual(@"C:\new"); + } + + [TestCase] + public void GetPositionalArgReturnsNullForOutOfRangeIndex() + { + string[] args = { "post-command", "worktree", "remove", @"C:\wt" }; + WorktreeCommandParser.GetPositionalArg(args, 1).ShouldBeNull(); + } + + [TestCase] + public void GetPathArgHandlesShortArgs() + { + // Ensure single-char flags without values are skipped + string[] args = { "post-command", "worktree", "add", "-f", "-q", @"C:\repos\wt" }; + WorktreeCommandParser.GetPathArg(args).ShouldEqual(@"C:\repos\wt"); + } + + [TestCase] + public void GetPathArgHandlesCombinedShortFlags() + { + // -fd = --force --detach combined into one arg + string[] args = { "post-command", "worktree", "add", "-fd", @"C:\repos\wt", "HEAD" }; + WorktreeCommandParser.GetPathArg(args).ShouldEqual(@"C:\repos\wt"); + } + + [TestCase] + public void GetPathArgHandlesCombinedFlagWithBranch() + { + // -fb = --force + -b, next arg is the branch name + string[] args = { "post-command", "worktree", "add", "-fb", "my-branch", @"C:\repos\wt" }; + WorktreeCommandParser.GetPathArg(args).ShouldEqual(@"C:\repos\wt"); + } + + [TestCase] + public void GetPathArgHandlesBranchValueBakedIn() + { + // -bfd = -b with value "fd" baked in, no next-arg consumption + string[] args = { "post-command", "worktree", "add", "-bfd", @"C:\repos\wt" }; + WorktreeCommandParser.GetPathArg(args).ShouldEqual(@"C:\repos\wt"); + } + + [TestCase] + public void GetPathArgHandlesTwoValueOptionsFirstConsumes() + { + // -Bb = -B with value "b" baked in, no next-arg consumption + string[] args = { "post-command", "worktree", "add", "-Bb", @"C:\repos\wt" }; + WorktreeCommandParser.GetPathArg(args).ShouldEqual(@"C:\repos\wt"); + } + } +} diff --git a/GVFS/GVFS.UnitTests/Common/WorktreeEnlistmentTests.cs b/GVFS/GVFS.UnitTests/Common/WorktreeEnlistmentTests.cs new file mode 100644 index 000000000..2541e56ac --- /dev/null +++ b/GVFS/GVFS.UnitTests/Common/WorktreeEnlistmentTests.cs @@ -0,0 +1,158 @@ +using GVFS.Common; +using GVFS.Tests.Should; +using NUnit.Framework; +using System.IO; + +namespace GVFS.UnitTests.Common +{ + [TestFixture] + public class WorktreeEnlistmentTests + { + private string testRoot; + private string primaryRoot; + private string sharedGitDir; + private string worktreePath; + private string worktreeGitDir; + + [SetUp] + public void SetUp() + { + this.testRoot = Path.Combine(Path.GetTempPath(), "GVFSWTEnlTests_" + Path.GetRandomFileName()); + this.primaryRoot = Path.Combine(this.testRoot, "enlistment"); + string primarySrc = Path.Combine(this.primaryRoot, "src"); + this.sharedGitDir = Path.Combine(primarySrc, ".git"); + this.worktreePath = Path.Combine(this.testRoot, "agent-wt-1"); + this.worktreeGitDir = Path.Combine(this.sharedGitDir, "worktrees", "agent-wt-1"); + + Directory.CreateDirectory(this.sharedGitDir); + Directory.CreateDirectory(this.worktreeGitDir); + Directory.CreateDirectory(this.worktreePath); + Directory.CreateDirectory(Path.Combine(this.primaryRoot, ".gvfs")); + + File.WriteAllText( + Path.Combine(this.sharedGitDir, "config"), + "[core]\n\trepositoryformatversion = 0\n[remote \"origin\"]\n\turl = https://mock/repo\n"); + File.WriteAllText( + Path.Combine(this.sharedGitDir, "HEAD"), + "ref: refs/heads/main\n"); + File.WriteAllText( + Path.Combine(this.worktreePath, ".git"), + "gitdir: " + this.worktreeGitDir); + File.WriteAllText( + Path.Combine(this.worktreeGitDir, "commondir"), + "../.."); + File.WriteAllText( + Path.Combine(this.worktreeGitDir, "HEAD"), + "ref: refs/heads/agent-wt-1\n"); + File.WriteAllText( + Path.Combine(this.worktreeGitDir, "gitdir"), + Path.Combine(this.worktreePath, ".git")); + } + + [TearDown] + public void TearDown() + { + if (Directory.Exists(this.testRoot)) + { + Directory.Delete(this.testRoot, recursive: true); + } + } + + private GVFSEnlistment CreateWorktreeEnlistment() + { + string gitBinPath = GVFSPlatform.Instance.GitInstallation.GetInstalledGitBinPath() + ?? @"C:\Program Files\Git\cmd\git.exe"; + return GVFSEnlistment.CreateForWorktree( + this.primaryRoot, gitBinPath, authentication: null, + GVFSEnlistment.TryGetWorktreeInfo(this.worktreePath), + repoUrl: "https://mock/repo"); + } + + [TestCase] + public void IsWorktreeReturnsTrueForWorktreeEnlistment() + { + GVFSEnlistment enlistment = this.CreateWorktreeEnlistment(); + enlistment.IsWorktree.ShouldBeTrue(); + } + + [TestCase] + public void WorktreeInfoIsPopulated() + { + GVFSEnlistment enlistment = this.CreateWorktreeEnlistment(); + enlistment.Worktree.ShouldNotBeNull(); + enlistment.Worktree.Name.ShouldEqual("agent-wt-1"); + enlistment.Worktree.WorktreePath.ShouldEqual(this.worktreePath); + } + + [TestCase] + public void DotGitRootPointsToSharedGitDir() + { + GVFSEnlistment enlistment = this.CreateWorktreeEnlistment(); + enlistment.DotGitRoot.ShouldEqual(this.sharedGitDir); + } + + [TestCase] + public void WorkingDirectoryRootIsWorktreePath() + { + GVFSEnlistment enlistment = this.CreateWorktreeEnlistment(); + enlistment.WorkingDirectoryRoot.ShouldEqual(this.worktreePath); + } + + [TestCase] + public void LocalObjectsRootIsSharedGitObjects() + { + GVFSEnlistment enlistment = this.CreateWorktreeEnlistment(); + enlistment.LocalObjectsRoot.ShouldEqual( + Path.Combine(this.sharedGitDir, "objects")); + } + + [TestCase] + public void LocalObjectsRootDoesNotDoubleGitPath() + { + GVFSEnlistment enlistment = this.CreateWorktreeEnlistment(); + Assert.IsFalse( + enlistment.LocalObjectsRoot.Contains(Path.Combine(".git", ".git")), + "LocalObjectsRoot should not have doubled .git path"); + } + + [TestCase] + public void GitIndexPathUsesWorktreeGitDir() + { + GVFSEnlistment enlistment = this.CreateWorktreeEnlistment(); + enlistment.GitIndexPath.ShouldEqual( + Path.Combine(this.worktreeGitDir, "index")); + } + + [TestCase] + public void NamedPipeNameIncludesWorktreeSuffix() + { + GVFSEnlistment enlistment = this.CreateWorktreeEnlistment(); + Assert.IsTrue( + enlistment.NamedPipeName.Contains("_WT_AGENT-WT-1"), + "NamedPipeName should contain worktree suffix"); + } + + [TestCase] + public void DotGVFSRootIsInWorktreeGitDir() + { + GVFSEnlistment enlistment = this.CreateWorktreeEnlistment(); + Assert.IsTrue( + enlistment.DotGVFSRoot.Contains(this.worktreeGitDir), + "DotGVFSRoot should be inside worktree git dir"); + } + + [TestCase] + public void EnlistmentRootIsPrimaryRoot() + { + GVFSEnlistment enlistment = this.CreateWorktreeEnlistment(); + enlistment.EnlistmentRoot.ShouldEqual(this.primaryRoot); + } + + [TestCase] + public void RepoUrlIsReadFromSharedConfig() + { + GVFSEnlistment enlistment = this.CreateWorktreeEnlistment(); + enlistment.RepoUrl.ShouldEqual("https://mock/repo"); + } + } +} diff --git a/GVFS/GVFS.UnitTests/Common/WorktreeInfoTests.cs b/GVFS/GVFS.UnitTests/Common/WorktreeInfoTests.cs new file mode 100644 index 000000000..9ebe56963 --- /dev/null +++ b/GVFS/GVFS.UnitTests/Common/WorktreeInfoTests.cs @@ -0,0 +1,187 @@ +using GVFS.Common; +using GVFS.Tests.Should; +using NUnit.Framework; +using System.IO; + +namespace GVFS.UnitTests.Common +{ + [TestFixture] + public class WorktreeInfoTests + { + private string testRoot; + + [SetUp] + public void SetUp() + { + this.testRoot = Path.Combine(Path.GetTempPath(), "GVFSWorktreeTests_" + Path.GetRandomFileName()); + Directory.CreateDirectory(this.testRoot); + } + + [TearDown] + public void TearDown() + { + if (Directory.Exists(this.testRoot)) + { + Directory.Delete(this.testRoot, recursive: true); + } + } + + [TestCase] + public void ReturnsNullForNonWorktreeDirectory() + { + // A directory without a .git file is not a worktree + GVFSEnlistment.WorktreeInfo info = GVFSEnlistment.TryGetWorktreeInfo(this.testRoot); + info.ShouldBeNull(); + } + + [TestCase] + public void ReturnsNullWhenDotGitIsDirectory() + { + // A .git directory (not file) means primary enlistment, not a worktree + Directory.CreateDirectory(Path.Combine(this.testRoot, ".git")); + GVFSEnlistment.WorktreeInfo info = GVFSEnlistment.TryGetWorktreeInfo(this.testRoot); + info.ShouldBeNull(); + } + + [TestCase] + public void ReturnsNullWhenDotGitFileHasNoGitdirPrefix() + { + File.WriteAllText(Path.Combine(this.testRoot, ".git"), "not a gitdir line"); + GVFSEnlistment.WorktreeInfo info = GVFSEnlistment.TryGetWorktreeInfo(this.testRoot); + info.ShouldBeNull(); + } + + [TestCase] + public void DetectsWorktreeFromAbsoluteGitdir() + { + // Simulate a worktree: .git file pointing to .git/worktrees/ + string primaryGitDir = Path.Combine(this.testRoot, "primary", ".git"); + string worktreeGitDir = Path.Combine(primaryGitDir, "worktrees", "agent-1"); + Directory.CreateDirectory(worktreeGitDir); + + // Create commondir file pointing back to shared .git + File.WriteAllText(Path.Combine(worktreeGitDir, "commondir"), "../.."); + + // Create the worktree directory with a .git file + string worktreeDir = Path.Combine(this.testRoot, "wt"); + Directory.CreateDirectory(worktreeDir); + File.WriteAllText(Path.Combine(worktreeDir, ".git"), "gitdir: " + worktreeGitDir); + + GVFSEnlistment.WorktreeInfo info = GVFSEnlistment.TryGetWorktreeInfo(worktreeDir); + info.ShouldNotBeNull(); + info.Name.ShouldEqual("agent-1"); + info.WorktreePath.ShouldEqual(worktreeDir); + info.WorktreeGitDir.ShouldEqual(worktreeGitDir); + info.SharedGitDir.ShouldEqual(primaryGitDir); + info.PipeSuffix.ShouldEqual("_WT_AGENT-1"); + } + + [TestCase] + public void DetectsWorktreeFromRelativeGitdir() + { + // Simulate worktree with relative gitdir path + string primaryGitDir = Path.Combine(this.testRoot, "primary", ".git"); + string worktreeGitDir = Path.Combine(primaryGitDir, "worktrees", "feature-branch"); + Directory.CreateDirectory(worktreeGitDir); + + File.WriteAllText(Path.Combine(worktreeGitDir, "commondir"), "../.."); + + // Worktree as sibling of primary + string worktreeDir = Path.Combine(this.testRoot, "feature-branch"); + Directory.CreateDirectory(worktreeDir); + + // Use a relative path: ../primary/.git/worktrees/feature-branch + string relativePath = "../primary/.git/worktrees/feature-branch"; + File.WriteAllText(Path.Combine(worktreeDir, ".git"), "gitdir: " + relativePath); + + GVFSEnlistment.WorktreeInfo info = GVFSEnlistment.TryGetWorktreeInfo(worktreeDir); + info.ShouldNotBeNull(); + info.Name.ShouldEqual("feature-branch"); + info.PipeSuffix.ShouldEqual("_WT_FEATURE-BRANCH"); + } + + [TestCase] + public void ReturnsNullWithoutCommondirFile() + { + // Worktree git dir without a commondir file is invalid + string worktreeGitDir = Path.Combine(this.testRoot, "primary", ".git", "worktrees", "no-common"); + Directory.CreateDirectory(worktreeGitDir); + + string worktreeDir = Path.Combine(this.testRoot, "no-common"); + Directory.CreateDirectory(worktreeDir); + File.WriteAllText(Path.Combine(worktreeDir, ".git"), "gitdir: " + worktreeGitDir); + + GVFSEnlistment.WorktreeInfo info = GVFSEnlistment.TryGetWorktreeInfo(worktreeDir); + info.ShouldBeNull(); + } + + [TestCase] + public void PipeSuffixReturnsNullForNonWorktree() + { + string suffix = GVFSEnlistment.GetWorktreePipeSuffix(this.testRoot); + suffix.ShouldBeNull(); + } + + [TestCase] + public void PipeSuffixReturnsCorrectValueForWorktree() + { + string worktreeGitDir = Path.Combine(this.testRoot, "primary", ".git", "worktrees", "my-wt"); + Directory.CreateDirectory(worktreeGitDir); + File.WriteAllText(Path.Combine(worktreeGitDir, "commondir"), "../.."); + + string worktreeDir = Path.Combine(this.testRoot, "my-wt"); + Directory.CreateDirectory(worktreeDir); + File.WriteAllText(Path.Combine(worktreeDir, ".git"), "gitdir: " + worktreeGitDir); + + string suffix = GVFSEnlistment.GetWorktreePipeSuffix(worktreeDir); + suffix.ShouldEqual("_WT_MY-WT"); + } + + [TestCase] + public void ReturnsNullForNonexistentDirectory() + { + string nonexistent = Path.Combine(this.testRoot, "does-not-exist"); + GVFSEnlistment.WorktreeInfo info = GVFSEnlistment.TryGetWorktreeInfo(nonexistent); + info.ShouldBeNull(); + } + + [TestCase] + public void DetectsWorktreeFromSubdirectory() + { + // Set up a worktree at testRoot/wt-sub with .git file + string primaryGitDir = Path.Combine(this.testRoot, "primary", ".git"); + string worktreeGitDir = Path.Combine(primaryGitDir, "worktrees", "wt-sub"); + Directory.CreateDirectory(worktreeGitDir); + File.WriteAllText(Path.Combine(worktreeGitDir, "commondir"), "../.."); + + string worktreeDir = Path.Combine(this.testRoot, "wt-sub"); + Directory.CreateDirectory(worktreeDir); + File.WriteAllText(Path.Combine(worktreeDir, ".git"), "gitdir: " + worktreeGitDir); + + // Create a subdirectory inside the worktree + string subDir = Path.Combine(worktreeDir, "a", "b", "c"); + Directory.CreateDirectory(subDir); + + // TryGetWorktreeInfo should walk up and find the worktree root + GVFSEnlistment.WorktreeInfo info = GVFSEnlistment.TryGetWorktreeInfo(subDir); + info.ShouldNotBeNull(); + info.Name.ShouldEqual("wt-sub"); + info.WorktreePath.ShouldEqual(worktreeDir); + } + + [TestCase] + public void ReturnsNullForPrimaryFromSubdirectory() + { + // Set up a primary repo with a real .git directory + string primaryDir = Path.Combine(this.testRoot, "primary-repo"); + Directory.CreateDirectory(Path.Combine(primaryDir, ".git")); + + // Walking up from a subdirectory should find the .git dir and return null + string subDir = Path.Combine(primaryDir, "src", "folder"); + Directory.CreateDirectory(subDir); + + GVFSEnlistment.WorktreeInfo info = GVFSEnlistment.TryGetWorktreeInfo(subDir); + info.ShouldBeNull(); + } + } +} diff --git a/GVFS/GVFS.UnitTests/Common/WorktreeNestedPathTests.cs b/GVFS/GVFS.UnitTests/Common/WorktreeNestedPathTests.cs new file mode 100644 index 000000000..76d10a3b8 --- /dev/null +++ b/GVFS/GVFS.UnitTests/Common/WorktreeNestedPathTests.cs @@ -0,0 +1,126 @@ +using GVFS.Common; +using NUnit.Framework; +using System.IO; + +namespace GVFS.UnitTests.Common +{ + [TestFixture] + public class WorktreeNestedPathTests + { + // Basic containment + [TestCase(@"C:\repo\src\subfolder", @"C:\repo\src", true, Description = "Child path is inside directory")] + [TestCase(@"C:\repo\src", @"C:\repo\src", true, Description = "Equal path is inside directory")] + [TestCase(@"C:\repo\src\a\b\c\d", @"C:\repo\src", true, Description = "Deeply nested path is inside")] + [TestCase(@"C:\repo\src.worktrees\wt1", @"C:\repo\src", false, Description = "Path with prefix overlap is outside")] + [TestCase(@"C:\repo\src2", @"C:\repo\src", false, Description = "Sibling path is outside")] + + // Path traversal normalization + [TestCase(@"C:\repo\src\..\..\..\evil", @"C:\repo\src", false, Description = "Traversal escaping directory is outside")] + [TestCase(@"C:\repo\src\..", @"C:\repo\src", false, Description = "Traversal to parent is outside")] + [TestCase(@"C:\repo\src\..\other", @"C:\repo\src", false, Description = "Traversal to sibling is outside")] + [TestCase(@"C:\repo\src\sub\..\other", @"C:\repo\src", true, Description = "Traversal staying inside directory")] + [TestCase(@"C:\repo\src\.\subfolder", @"C:\repo\src", true, Description = "Dot segment resolves to same path")] + [TestCase(@"C:\repo\src\subfolder", @"C:\repo\.\src", true, Description = "Dot segment in directory")] + + // Trailing separators + [TestCase(@"C:\repo\src\subfolder", @"C:\repo\src\", true, Description = "Trailing slash on directory")] + [TestCase(@"C:\repo\src\subfolder\", @"C:\repo\src", true, Description = "Trailing slash on path")] + + // Case sensitivity + [TestCase(@"C:\Repo\SRC\subfolder", @"C:\repo\src", true, Description = "Case-insensitive child path")] + [TestCase(@"C:\REPO\SRC", @"C:\repo\src", true, Description = "Case-insensitive equal path")] + [TestCase(@"c:\repo\src\subfolder", @"C:\REPO\SRC", true, Description = "Lower drive letter vs upper")] + [TestCase(@"C:\Repo\Src2", @"C:\repo\src", false, Description = "Case-insensitive sibling is outside")] + + // Mixed forward and backward slashes + [TestCase(@"C:\repo\src/subfolder", @"C:\repo\src", true, Description = "Forward slash in child path")] + [TestCase("C:/repo/src/subfolder", @"C:\repo\src", true, Description = "All forward slashes in path")] + [TestCase(@"C:\repo\src\subfolder", "C:/repo/src", true, Description = "All forward slashes in directory")] + [TestCase("C:/repo/src", "C:/repo/src", true, Description = "Both paths with forward slashes")] + [TestCase("C:/repo/src/../other", @"C:\repo\src", false, Description = "Forward slashes with traversal")] + public void IsPathInsideDirectory(string path, string directory, bool expected) + { + Assert.AreEqual(expected, GVFSEnlistment.IsPathInsideDirectory(path, directory)); + } + + private string testDir; + + [SetUp] + public void SetUp() + { + this.testDir = Path.Combine(Path.GetTempPath(), "WorktreeNestedPathTests_" + Path.GetRandomFileName()); + Directory.CreateDirectory(this.testDir); + } + + [TearDown] + public void TearDown() + { + if (Directory.Exists(this.testDir)) + { + Directory.Delete(this.testDir, recursive: true); + } + } + + [TestCase] + public void GetKnownWorktreePathsReturnsEmptyWhenNoWorktreesDir() + { + string[] paths = GVFSEnlistment.GetKnownWorktreePaths(this.testDir); + Assert.AreEqual(0, paths.Length); + } + + [TestCase] + public void GetKnownWorktreePathsReturnsEmptyWhenWorktreesDirIsEmpty() + { + Directory.CreateDirectory(Path.Combine(this.testDir, "worktrees")); + + string[] paths = GVFSEnlistment.GetKnownWorktreePaths(this.testDir); + Assert.AreEqual(0, paths.Length); + } + + [TestCase] + public void GetKnownWorktreePathsReadsGitdirFiles() + { + string wt1Dir = Path.Combine(this.testDir, "worktrees", "wt1"); + string wt2Dir = Path.Combine(this.testDir, "worktrees", "wt2"); + Directory.CreateDirectory(wt1Dir); + Directory.CreateDirectory(wt2Dir); + + File.WriteAllText(Path.Combine(wt1Dir, "gitdir"), @"C:\worktrees\wt1\.git" + "\n"); + File.WriteAllText(Path.Combine(wt2Dir, "gitdir"), @"C:\worktrees\wt2\.git" + "\n"); + + string[] paths = GVFSEnlistment.GetKnownWorktreePaths(this.testDir); + Assert.AreEqual(2, paths.Length); + Assert.That(paths, Has.Member(@"C:\worktrees\wt1")); + Assert.That(paths, Has.Member(@"C:\worktrees\wt2")); + } + + [TestCase] + public void GetKnownWorktreePathsSkipsEntriesWithoutGitdirFile() + { + string wt1Dir = Path.Combine(this.testDir, "worktrees", "wt1"); + string wt2Dir = Path.Combine(this.testDir, "worktrees", "wt2"); + Directory.CreateDirectory(wt1Dir); + Directory.CreateDirectory(wt2Dir); + + File.WriteAllText(Path.Combine(wt1Dir, "gitdir"), @"C:\worktrees\wt1\.git" + "\n"); + // wt2 has no gitdir file + + string[] paths = GVFSEnlistment.GetKnownWorktreePaths(this.testDir); + Assert.AreEqual(1, paths.Length); + Assert.AreEqual(@"C:\worktrees\wt1", paths[0]); + } + + [TestCase] + public void GetKnownWorktreePathsNormalizesForwardSlashes() + { + string wtDir = Path.Combine(this.testDir, "worktrees", "wt1"); + Directory.CreateDirectory(wtDir); + + File.WriteAllText(Path.Combine(wtDir, "gitdir"), "C:/worktrees/wt1/.git\n"); + + string[] paths = GVFSEnlistment.GetKnownWorktreePaths(this.testDir); + Assert.AreEqual(1, paths.Length); + Assert.AreEqual(@"C:\worktrees\wt1", paths[0]); + } + } +} diff --git a/GVFS/GVFS.UnitTests/GVFS.UnitTests.csproj b/GVFS/GVFS.UnitTests/GVFS.UnitTests.csproj index 890714857..8c3669baa 100644 --- a/GVFS/GVFS.UnitTests/GVFS.UnitTests.csproj +++ b/GVFS/GVFS.UnitTests/GVFS.UnitTests.csproj @@ -37,6 +37,12 @@ + + + Hooks\UnstageCommandParser.cs + + + Always diff --git a/GVFS/GVFS.UnitTests/Git/GVFSGitObjectsTests.cs b/GVFS/GVFS.UnitTests/Git/GVFSGitObjectsTests.cs index e99e63dd7..caa23da24 100644 --- a/GVFS/GVFS.UnitTests/Git/GVFSGitObjectsTests.cs +++ b/GVFS/GVFS.UnitTests/Git/GVFSGitObjectsTests.cs @@ -118,6 +118,405 @@ public void FailsNullBytePackDownloads() gitObjects => gitObjects.TryDownloadCommit("object0")); } + [TestCase] + public void CoalescesMultipleConcurrentRequestsForSameObject() + { + ManualResetEventSlim downloadStarted = new ManualResetEventSlim(false); + ManualResetEventSlim downloadGate = new ManualResetEventSlim(false); + int downloadCount = 0; + + CoalescingTestHttpGitObjects httpObjects = new CoalescingTestHttpGitObjects( + this.validTestObjectFileContents, + onDownloadStarting: () => + { + Interlocked.Increment(ref downloadCount); + downloadStarted.Set(); + downloadGate.Wait(); + }); + + MockFileSystemWithCallbacks fileSystem = new MockFileSystemWithCallbacks(); + fileSystem.OnFileExists = (path) => false; + fileSystem.OnMoveFile = (source, target) => { }; + fileSystem.OnOpenFileStream = (path, mode, access) => + { + if (access == FileAccess.Read) + { + return new MemoryStream(this.validTestObjectFileContents); + } + + return new MemoryStream(); + }; + + GVFSGitObjects dut = this.CreateTestableGVFSGitObjects(httpObjects, fileSystem); + + const int threadCount = 10; + GitObjects.DownloadAndSaveObjectResult[] results = new GitObjects.DownloadAndSaveObjectResult[threadCount]; + Thread[] threads = new Thread[threadCount]; + CountdownEvent allReady = new CountdownEvent(threadCount); + ManualResetEventSlim go = new ManualResetEventSlim(false); + + for (int i = 0; i < threadCount; i++) + { + int idx = i; + threads[i] = new Thread(() => + { + allReady.Signal(); + go.Wait(); + results[idx] = dut.TryDownloadAndSaveObject( + ValidTestObjectFileSha1, + GVFSGitObjects.RequestSource.NamedPipeMessage); + }); + threads[i].Start(); + } + + // Release all threads simultaneously + allReady.Wait(); + go.Set(); + + // Wait for the first download to start (proves one thread entered the factory) + downloadStarted.Wait(TimeSpan.FromSeconds(5)).ShouldBeTrue("Download should have started"); + + // Give other threads time to pile up on the Lazy + Thread.Sleep(200); + + // Release the download + downloadGate.Set(); + + // Wait for all threads + foreach (Thread t in threads) + { + t.Join(TimeSpan.FromSeconds(10)).ShouldBeTrue("Thread should complete"); + } + + // Only one download should have occurred + downloadCount.ShouldEqual(1); + + // All threads should have gotten Success + foreach (GitObjects.DownloadAndSaveObjectResult result in results) + { + result.ShouldEqual(GitObjects.DownloadAndSaveObjectResult.Success); + } + } + + [TestCase] + public void DifferentObjectsAreNotCoalesced() + { + string secondSha = "b376885ac8452b6cbf9ced81b1080bfd570d9b91"; + int downloadCount = 0; + + CoalescingTestHttpGitObjects httpObjects = new CoalescingTestHttpGitObjects( + this.validTestObjectFileContents, + onDownloadStarting: () => Interlocked.Increment(ref downloadCount)); + + MockFileSystemWithCallbacks fileSystem = new MockFileSystemWithCallbacks(); + fileSystem.OnFileExists = (path) => false; + fileSystem.OnMoveFile = (source, target) => { }; + fileSystem.OnOpenFileStream = (path, mode, access) => + { + if (access == FileAccess.Read) + { + return new MemoryStream(this.validTestObjectFileContents); + } + + return new MemoryStream(); + }; + + GVFSGitObjects dut = this.CreateTestableGVFSGitObjects(httpObjects, fileSystem); + + dut.TryDownloadAndSaveObject(ValidTestObjectFileSha1, GVFSGitObjects.RequestSource.NamedPipeMessage) + .ShouldEqual(GitObjects.DownloadAndSaveObjectResult.Success); + + dut.TryDownloadAndSaveObject(secondSha, GVFSGitObjects.RequestSource.NamedPipeMessage) + .ShouldEqual(GitObjects.DownloadAndSaveObjectResult.Success); + + downloadCount.ShouldEqual(2); + } + + [TestCase] + public void FailedDownloadAllowsSubsequentRetry() + { + int downloadCount = 0; + + CoalescingTestHttpGitObjects httpObjects = new CoalescingTestHttpGitObjects( + this.validTestObjectFileContents, + onDownloadStarting: () => Interlocked.Increment(ref downloadCount), + failUntilAttempt: 2); + + MockFileSystemWithCallbacks fileSystem = new MockFileSystemWithCallbacks(); + fileSystem.OnFileExists = (path) => false; + fileSystem.OnMoveFile = (source, target) => { }; + fileSystem.OnOpenFileStream = (path, mode, access) => + { + if (access == FileAccess.Read) + { + return new MemoryStream(this.validTestObjectFileContents); + } + + return new MemoryStream(); + }; + + GVFSGitObjects dut = this.CreateTestableGVFSGitObjects(httpObjects, fileSystem); + + // First attempt fails + dut.TryDownloadAndSaveObject(ValidTestObjectFileSha1, GVFSGitObjects.RequestSource.NamedPipeMessage) + .ShouldEqual(GitObjects.DownloadAndSaveObjectResult.Error); + + // Second attempt should start a new download (not reuse cached failure) + dut.TryDownloadAndSaveObject(ValidTestObjectFileSha1, GVFSGitObjects.RequestSource.NamedPipeMessage) + .ShouldEqual(GitObjects.DownloadAndSaveObjectResult.Success); + + // Two separate downloads should have occurred + downloadCount.ShouldEqual(2); + } + + [TestCase] + public void ConcurrentFailedDownloadAllowsSubsequentRetry() + { + ManualResetEventSlim downloadStarted = new ManualResetEventSlim(false); + ManualResetEventSlim downloadGate = new ManualResetEventSlim(false); + int downloadCount = 0; + + CoalescingTestHttpGitObjects httpObjects = new CoalescingTestHttpGitObjects( + this.validTestObjectFileContents, + onDownloadStarting: () => + { + int count = Interlocked.Increment(ref downloadCount); + if (count == 1) + { + downloadStarted.Set(); + downloadGate.Wait(); + } + }, + failUntilAttempt: 2); + + MockFileSystemWithCallbacks fileSystem = new MockFileSystemWithCallbacks(); + fileSystem.OnFileExists = (path) => false; + fileSystem.OnMoveFile = (source, target) => { }; + fileSystem.OnOpenFileStream = (path, mode, access) => + { + if (access == FileAccess.Read) + { + return new MemoryStream(this.validTestObjectFileContents); + } + + return new MemoryStream(); + }; + + GVFSGitObjects dut = this.CreateTestableGVFSGitObjects(httpObjects, fileSystem); + + const int threadCount = 5; + GitObjects.DownloadAndSaveObjectResult[] results = new GitObjects.DownloadAndSaveObjectResult[threadCount]; + Thread[] threads = new Thread[threadCount]; + CountdownEvent allReady = new CountdownEvent(threadCount); + ManualResetEventSlim go = new ManualResetEventSlim(false); + + for (int i = 0; i < threadCount; i++) + { + int idx = i; + threads[i] = new Thread(() => + { + allReady.Signal(); + go.Wait(); + results[idx] = dut.TryDownloadAndSaveObject( + ValidTestObjectFileSha1, + GVFSGitObjects.RequestSource.NamedPipeMessage); + }); + threads[i].Start(); + } + + allReady.Wait(); + go.Set(); + + downloadStarted.Wait(TimeSpan.FromSeconds(5)).ShouldBeTrue("Download should have started"); + Thread.Sleep(200); + downloadGate.Set(); + + foreach (Thread t in threads) + { + t.Join(TimeSpan.FromSeconds(10)).ShouldBeTrue("Thread should complete"); + } + + // All coalesced threads should have gotten Error + foreach (GitObjects.DownloadAndSaveObjectResult result in results) + { + result.ShouldEqual(GitObjects.DownloadAndSaveObjectResult.Error); + } + + // Subsequent request should succeed (new download, not cached failure) + dut.TryDownloadAndSaveObject(ValidTestObjectFileSha1, GVFSGitObjects.RequestSource.NamedPipeMessage) + .ShouldEqual(GitObjects.DownloadAndSaveObjectResult.Success); + + downloadCount.ShouldEqual(2); + } + + [TestCase] + [Category(CategoryConstants.ExceptionExpected)] + public void ExceptionInDownloadFactoryAllowsRetry() + { + ManualResetEventSlim downloadStarted = new ManualResetEventSlim(false); + ManualResetEventSlim downloadGate = new ManualResetEventSlim(false); + int downloadCount = 0; + + CoalescingTestHttpGitObjects httpObjects = new CoalescingTestHttpGitObjects( + this.validTestObjectFileContents, + onDownloadStarting: () => + { + int count = Interlocked.Increment(ref downloadCount); + if (count == 1) + { + downloadStarted.Set(); + downloadGate.Wait(); + } + }, + throwUntilAttempt: 2); + + MockFileSystemWithCallbacks fileSystem = new MockFileSystemWithCallbacks(); + fileSystem.OnFileExists = (path) => false; + fileSystem.OnMoveFile = (source, target) => { }; + fileSystem.OnOpenFileStream = (path, mode, access) => + { + if (access == FileAccess.Read) + { + return new MemoryStream(this.validTestObjectFileContents); + } + + return new MemoryStream(); + }; + + GVFSGitObjects dut = this.CreateTestableGVFSGitObjects(httpObjects, fileSystem); + + const int threadCount = 5; + Exception[] exceptions = new Exception[threadCount]; + Thread[] threads = new Thread[threadCount]; + CountdownEvent allReady = new CountdownEvent(threadCount); + ManualResetEventSlim go = new ManualResetEventSlim(false); + + for (int i = 0; i < threadCount; i++) + { + int idx = i; + threads[i] = new Thread(() => + { + allReady.Signal(); + go.Wait(); + try + { + dut.TryDownloadAndSaveObject( + ValidTestObjectFileSha1, + GVFSGitObjects.RequestSource.NamedPipeMessage); + } + catch (Exception ex) + { + exceptions[idx] = ex; + } + }); + threads[i].Start(); + } + + allReady.Wait(); + go.Set(); + + downloadStarted.Wait(TimeSpan.FromSeconds(5)).ShouldBeTrue("Download should have started"); + Thread.Sleep(200); + downloadGate.Set(); + + foreach (Thread t in threads) + { + t.Join(TimeSpan.FromSeconds(10)).ShouldBeTrue("Thread should complete"); + } + + // All coalesced threads should have caught the exception + foreach (Exception ex in exceptions) + { + Assert.IsNotNull(ex, "Each coalesced caller should receive the exception"); + Assert.IsInstanceOf(ex); + } + + // Subsequent retry should succeed (inflight entry was cleaned up) + dut.TryDownloadAndSaveObject(ValidTestObjectFileSha1, GVFSGitObjects.RequestSource.NamedPipeMessage) + .ShouldEqual(GitObjects.DownloadAndSaveObjectResult.Success); + + downloadCount.ShouldEqual(2); + } + + [TestCase] + public void StragglingFinallyDoesNotRemoveNewInflightDownload() + { + // Deterministically reproduce the ABA race against the real inflightDownloads + // dictionary: a straggling wave-1 thread's TryRemoveInflightDownload must not + // remove a wave-2 Lazy that was added for the same key. + ManualResetEventSlim wave2Started = new ManualResetEventSlim(false); + ManualResetEventSlim wave2Gate = new ManualResetEventSlim(false); + int downloadCount = 0; + + CoalescingTestHttpGitObjects httpObjects = new CoalescingTestHttpGitObjects( + this.validTestObjectFileContents, + onDownloadStarting: () => + { + int count = Interlocked.Increment(ref downloadCount); + if (count == 2) + { + // Wave 2's download: signal that it's in-flight, then block + wave2Started.Set(); + wave2Gate.Wait(); + } + }); + + MockFileSystemWithCallbacks fileSystem = new MockFileSystemWithCallbacks(); + fileSystem.OnFileExists = (path) => false; + fileSystem.OnMoveFile = (source, target) => { }; + fileSystem.OnOpenFileStream = (path, mode, access) => + { + if (access == FileAccess.Read) + { + return new MemoryStream(this.validTestObjectFileContents); + } + + return new MemoryStream(); + }; + + GVFSGitObjects dut = this.CreateTestableGVFSGitObjects(httpObjects, fileSystem); + + // Wave 1: single download completes immediately (downloadCount becomes 1) + dut.TryDownloadAndSaveObject(ValidTestObjectFileSha1, GVFSGitObjects.RequestSource.NamedPipeMessage) + .ShouldEqual(GitObjects.DownloadAndSaveObjectResult.Success); + + // After wave 1, the inflight entry should be cleaned up + dut.inflightDownloads.ContainsKey(ValidTestObjectFileSha1).ShouldBeFalse("Wave 1 should have cleaned up"); + + // Wave 2: start a new download that blocks inside its factory (downloadCount becomes 2) + Thread wave2Thread = new Thread(() => + { + dut.TryDownloadAndSaveObject(ValidTestObjectFileSha1, GVFSGitObjects.RequestSource.NamedPipeMessage) + .ShouldEqual(GitObjects.DownloadAndSaveObjectResult.Success); + }); + wave2Thread.Start(); + + // Wait until wave 2's download factory is executing (Lazy is in the dictionary) + wave2Started.Wait(TimeSpan.FromSeconds(5)).ShouldBeTrue("Wave 2 download should have started"); + + // Capture wave 2's Lazy from the dictionary + Lazy wave2Lazy; + dut.inflightDownloads.TryGetValue(ValidTestObjectFileSha1, out wave2Lazy).ShouldBeTrue("Wave 2 Lazy should be in dictionary"); + + // Simulate a straggling wave-1 thread: create a different Lazy and try to remove it. + // With value-aware removal, this must NOT remove wave 2's Lazy. + Lazy staleLazy = + new Lazy(() => GitObjects.DownloadAndSaveObjectResult.Success); + bool staleRemoved = ((ICollection>>)dut.inflightDownloads) + .Remove(new KeyValuePair>(ValidTestObjectFileSha1, staleLazy)); + + staleRemoved.ShouldBeFalse("Straggling finally must not remove wave 2's Lazy"); + dut.inflightDownloads.ContainsKey(ValidTestObjectFileSha1).ShouldBeTrue("Wave 2 Lazy must survive"); + ReferenceEquals(dut.inflightDownloads[ValidTestObjectFileSha1], wave2Lazy).ShouldBeTrue("The entry should still be wave 2's Lazy"); + + // Release wave 2 and verify it completes + wave2Gate.Set(); + wave2Thread.Join(TimeSpan.FromSeconds(10)).ShouldBeTrue("Wave 2 thread should complete"); + + // Both waves should have triggered separate downloads + downloadCount.ShouldEqual(2); + } + private void AssertRetryableExceptionOnDownload( MemoryStream inputStream, string mediaType, @@ -140,7 +539,7 @@ private void AssertRetryableExceptionOnDownload( } } - private GVFSGitObjects CreateTestableGVFSGitObjects(MockHttpGitObjects httpObjects, MockFileSystemWithCallbacks fileSystem) + private GVFSGitObjects CreateTestableGVFSGitObjects(GitObjectsHttpRequestor httpObjects, MockFileSystemWithCallbacks fileSystem) { MockTracer tracer = new MockTracer(); GVFSEnlistment enlistment = new GVFSEnlistment(TestEnlistmentRoot, "https://fakeRepoUrl", "fakeGitBinPath", authentication: null); @@ -224,5 +623,78 @@ public UnsafeGVFSGitObjects(GVFSContext context, GitObjectsHttpRequestor objectR this.checkData = false; } } + + private class CoalescingTestHttpGitObjects : GitObjectsHttpRequestor + { + private readonly byte[] objectContents; + private readonly Action onDownloadStarting; + private readonly int failUntilAttempt; + private readonly int throwUntilAttempt; + private int attemptCount; + + public CoalescingTestHttpGitObjects(byte[] objectContents, Action onDownloadStarting, int failUntilAttempt = 0, int throwUntilAttempt = 0) + : this(new MockGVFSEnlistment(), objectContents, onDownloadStarting, failUntilAttempt, throwUntilAttempt) + { + } + + private CoalescingTestHttpGitObjects(MockGVFSEnlistment enlistment, byte[] objectContents, Action onDownloadStarting, int failUntilAttempt, int throwUntilAttempt) + : base(new MockTracer(), enlistment, new MockCacheServerInfo(), new RetryConfig(maxRetries: 1)) + { + this.objectContents = objectContents; + this.onDownloadStarting = onDownloadStarting; + this.failUntilAttempt = failUntilAttempt; + this.throwUntilAttempt = throwUntilAttempt; + } + + public override RetryWrapper.InvocationResult TryDownloadLooseObject( + string objectId, + bool retryOnFailure, + CancellationToken cancellationToken, + string requestSource, + Func.CallbackResult> onSuccess) + { + this.onDownloadStarting?.Invoke(); + + int attempt = Interlocked.Increment(ref this.attemptCount); + if (attempt < this.throwUntilAttempt) + { + throw new IOException("Simulated download exception"); + } + + if (attempt < this.failUntilAttempt) + { + GitObjectTaskResult failResult = new GitObjectTaskResult(false); + return new RetryWrapper.InvocationResult(0, false, failResult); + } + + using (MemoryStream stream = new MemoryStream(this.objectContents)) + using (GitEndPointResponseData response = new GitEndPointResponseData( + HttpStatusCode.OK, + GVFSConstants.MediaTypes.LooseObjectMediaType, + stream, + message: null, + onResponseDisposed: null)) + { + onSuccess(0, response); + } + + GitObjectTaskResult result = new GitObjectTaskResult(true); + return new RetryWrapper.InvocationResult(0, true, result); + } + + public override RetryWrapper.InvocationResult TryDownloadObjects( + IEnumerable objectIds, + Func.CallbackResult> onSuccess, + Action.ErrorEventArgs> onFailure, + bool preferBatchedLooseObjects) + { + throw new NotImplementedException(); + } + + public override List QueryForFileSizes(IEnumerable objectIds, CancellationToken cancellationToken) + { + throw new NotImplementedException(); + } + } } } \ No newline at end of file diff --git a/GVFS/GVFS.UnitTests/Git/GitProcessTests.cs b/GVFS/GVFS.UnitTests/Git/GitProcessTests.cs index 2ef875245..74ddde8ee 100644 --- a/GVFS/GVFS.UnitTests/Git/GitProcessTests.cs +++ b/GVFS/GVFS.UnitTests/Git/GitProcessTests.cs @@ -2,6 +2,7 @@ using GVFS.Tests.Should; using GVFS.UnitTests.Mock.Common; using NUnit.Framework; +using System.Diagnostics; namespace GVFS.UnitTests.Git { @@ -253,5 +254,71 @@ public void ConfigResult_TryParseAsInt_ParsesWhenOutputIncludesWhitespace() result.TryParseAsInt(1, -1, out int value, out string error).ShouldBeTrue(); value.ShouldEqual(32); } + + [TestCase("dir/file.txt", "\"dir/file.txt\"")] + [TestCase("my dir/my file.txt", "\"my dir/my file.txt\"")] + [TestCase("dir/file\"name.txt", "\"dir/file\\\"name.txt\"")] + [TestCase("\"quoted\"", "\"\\\"quoted\\\"\"")] + [TestCase("dir\\subdir\\file.txt", "\"dir\\subdir\\file.txt\"")] // Backslashes as path separators left as-is + [TestCase("", "\"\"")] + [TestCase("dir\\\"file.txt", "\"dir\\\\\\\"file.txt\"")] // Backslash before quote: doubled, then quote escaped + [TestCase("dir\\subdir\\", "\"dir\\subdir\\\\\"")] // Trailing backslash doubled + public void QuoteGitPath(string input, string expected) + { + GitProcess.QuoteGitPath(input).ShouldEqual(expected); + } + + [TestCase] + [Description("Integration test: verify QuoteGitPath produces arguments that git actually receives correctly")] + public void QuoteGitPath_RoundTripThroughProcess() + { + // Test that paths with special characters survive the + // ProcessStartInfo.Arguments → Windows CRT argument parsing → git round-trip. + // We use "git rev-parse --sq-quote " which echoes the path back + // in shell-quoted form, proving git received it correctly. + string[] testPaths = new[] + { + "simple/path.txt", + "path with spaces/file name.txt", + "path\\with\\backslashes\\file.txt", + }; + + string gitPath = "C:\\Program Files\\Git\\cmd\\git.exe"; + if (!System.IO.File.Exists(gitPath)) + { + Assert.Ignore("Git not found at expected path — skipping integration test"); + } + + foreach (string testPath in testPaths) + { + string quoted = GitProcess.QuoteGitPath(testPath); + ProcessStartInfo psi = new ProcessStartInfo(gitPath) + { + Arguments = "rev-parse --sq-quote " + quoted, + UseShellExecute = false, + RedirectStandardOutput = true, + RedirectStandardError = true, + CreateNoWindow = true, + }; + + using (Process proc = Process.Start(psi)) + { + string output = proc.StandardOutput.ReadToEnd().Trim(); + proc.WaitForExit(); + + // git sq-quote wraps in single quotes and escapes single quotes + // For a simple path "foo/bar.txt" → output is "'foo/bar.txt'" + // Strip the outer single quotes to get the raw path back + if (output.StartsWith("'") && output.EndsWith("'")) + { + output = output.Substring(1, output.Length - 2); + } + + output.ShouldEqual( + testPath, + $"Path round-trip failed for: {testPath} (quoted as: {quoted})"); + } + } + } } } diff --git a/GVFS/GVFS.UnitTests/Hooks/UnstageTests.cs b/GVFS/GVFS.UnitTests/Hooks/UnstageTests.cs new file mode 100644 index 000000000..2341c15be --- /dev/null +++ b/GVFS/GVFS.UnitTests/Hooks/UnstageTests.cs @@ -0,0 +1,286 @@ +using GVFS.Hooks; +using GVFS.Tests.Should; +using NUnit.Framework; + +namespace GVFS.UnitTests.Hooks +{ + [TestFixture] + public class UnstageTests + { + // ── IsUnstageOperation ────────────────────────────────────────── + + [TestCase] + public void IsUnstageOperation_RestoreStaged() + { + UnstageCommandParser.IsUnstageOperation( + "restore", + new[] { "pre-command", "restore", "--staged", "." }) + .ShouldBeTrue(); + } + + [TestCase] + public void IsUnstageOperation_RestoreShortFlag() + { + UnstageCommandParser.IsUnstageOperation( + "restore", + new[] { "pre-command", "restore", "-S", "file.txt" }) + .ShouldBeTrue(); + } + + [TestCase] + public void IsUnstageOperation_RestoreCombinedShortFlags() + { + // -WS means --worktree --staged + UnstageCommandParser.IsUnstageOperation( + "restore", + new[] { "pre-command", "restore", "-WS", "file.txt" }) + .ShouldBeTrue(); + } + + [TestCase] + public void IsUnstageOperation_RestoreLowerS_NotStaged() + { + // -s means --source, not --staged + UnstageCommandParser.IsUnstageOperation( + "restore", + new[] { "pre-command", "restore", "-s", "HEAD~1", "file.txt" }) + .ShouldBeFalse(); + } + + [TestCase] + public void IsUnstageOperation_RestoreWithoutStaged() + { + UnstageCommandParser.IsUnstageOperation( + "restore", + new[] { "pre-command", "restore", "file.txt" }) + .ShouldBeFalse(); + } + + [TestCase] + public void IsUnstageOperation_CheckoutHeadDashDash() + { + UnstageCommandParser.IsUnstageOperation( + "checkout", + new[] { "pre-command", "checkout", "HEAD", "--", "file.txt" }) + .ShouldBeTrue(); + } + + [TestCase] + public void IsUnstageOperation_CheckoutNoDashDash() + { + UnstageCommandParser.IsUnstageOperation( + "checkout", + new[] { "pre-command", "checkout", "HEAD", "file.txt" }) + .ShouldBeFalse(); + } + + [TestCase] + public void IsUnstageOperation_CheckoutBranchName() + { + UnstageCommandParser.IsUnstageOperation( + "checkout", + new[] { "pre-command", "checkout", "my-branch" }) + .ShouldBeFalse(); + } + + [TestCase] + public void IsUnstageOperation_OtherCommand() + { + UnstageCommandParser.IsUnstageOperation( + "status", + new[] { "pre-command", "status" }) + .ShouldBeFalse(); + } + + // ── GetRestorePathspec: inline pathspecs ──────────────────────── + + [TestCase] + public void GetRestorePathspec_RestoreStagedAllFiles() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "restore", + new[] { "pre-command", "restore", "--staged", "." }); + result.Failed.ShouldBeFalse(); + result.InlinePathspecs.ShouldEqual("."); + result.PathspecFromFile.ShouldBeNull(); + } + + [TestCase] + public void GetRestorePathspec_RestoreStagedSpecificFiles() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "restore", + new[] { "pre-command", "restore", "--staged", "a.txt", "b.txt" }); + result.Failed.ShouldBeFalse(); + result.InlinePathspecs.ShouldEqual("a.txt\0b.txt"); + } + + [TestCase] + public void GetRestorePathspec_RestoreStagedNoPathspec() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "restore", + new[] { "pre-command", "restore", "--staged" }); + result.Failed.ShouldBeFalse(); + result.InlinePathspecs.ShouldEqual(string.Empty); + result.PathspecFromFile.ShouldBeNull(); + } + + [TestCase] + public void GetRestorePathspec_RestoreSkipsSourceFlag() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "restore", + new[] { "pre-command", "restore", "--staged", "--source", "HEAD~1", "file.txt" }); + result.InlinePathspecs.ShouldEqual("file.txt"); + } + + [TestCase] + public void GetRestorePathspec_RestoreSkipsSourceEqualsFlag() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "restore", + new[] { "pre-command", "restore", "--staged", "--source=HEAD~1", "file.txt" }); + result.InlinePathspecs.ShouldEqual("file.txt"); + } + + [TestCase] + public void GetRestorePathspec_RestoreSkipsShortSourceFlag() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "restore", + new[] { "pre-command", "restore", "--staged", "-s", "HEAD~1", "file.txt" }); + result.InlinePathspecs.ShouldEqual("file.txt"); + } + + [TestCase] + public void GetRestorePathspec_RestorePathsAfterDashDash() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "restore", + new[] { "pre-command", "restore", "--staged", "--", "a.txt", "b.txt" }); + result.InlinePathspecs.ShouldEqual("a.txt\0b.txt"); + } + + [TestCase] + public void GetRestorePathspec_RestoreSkipsGitPid() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "restore", + new[] { "pre-command", "restore", "--staged", "--git-pid=1234", "file.txt" }); + result.InlinePathspecs.ShouldEqual("file.txt"); + } + + // ── Checkout tree-ish stripping ──────────────────────────────── + + [TestCase] + public void GetRestorePathspec_CheckoutStripsTreeish() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "checkout", + new[] { "pre-command", "checkout", "HEAD", "--", "foo.txt" }); + result.InlinePathspecs.ShouldEqual("foo.txt"); + } + + [TestCase] + public void GetRestorePathspec_CheckoutStripsTreeishMultiplePaths() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "checkout", + new[] { "pre-command", "checkout", "HEAD", "--", "a.txt", "b.txt" }); + result.InlinePathspecs.ShouldEqual("a.txt\0b.txt"); + } + + [TestCase] + public void GetRestorePathspec_CheckoutNoPaths() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "checkout", + new[] { "pre-command", "checkout", "HEAD", "--" }); + result.InlinePathspecs.ShouldEqual(string.Empty); + } + + [TestCase] + public void GetRestorePathspec_CheckoutTreeishNotIncludedAsPaths() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "checkout", + new[] { "pre-command", "checkout", "HEAD", "--", "file.txt" }); + result.InlinePathspecs.ShouldNotContain(false, "HEAD"); + } + + // ── --pathspec-from-file forwarding ─────────────────────────── + + [TestCase] + public void GetRestorePathspec_PathspecFromFileEqualsForm() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "restore", + new[] { "pre-command", "restore", "--staged", "--pathspec-from-file=list.txt" }); + result.Failed.ShouldBeFalse(); + result.PathspecFromFile.ShouldEqual("list.txt"); + result.PathspecFileNul.ShouldBeFalse(); + } + + [TestCase] + public void GetRestorePathspec_PathspecFromFileSeparateArg() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "restore", + new[] { "pre-command", "restore", "--staged", "--pathspec-from-file", "list.txt" }); + result.Failed.ShouldBeFalse(); + result.PathspecFromFile.ShouldEqual("list.txt"); + } + + [TestCase] + public void GetRestorePathspec_PathspecFileNulSetsFlag() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "restore", + new[] { "pre-command", "restore", "--staged", "--pathspec-from-file=list.txt", "--pathspec-file-nul" }); + result.Failed.ShouldBeFalse(); + result.PathspecFromFile.ShouldEqual("list.txt"); + result.PathspecFileNul.ShouldBeTrue(); + } + + [TestCase] + public void GetRestorePathspec_PathspecFromFileStdinFails() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "restore", + new[] { "pre-command", "restore", "--staged", "--pathspec-from-file=-" }); + result.Failed.ShouldBeTrue(); + } + + [TestCase] + public void GetRestorePathspec_CheckoutPathspecFromFile() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "checkout", + new[] { "pre-command", "checkout", "HEAD", "--pathspec-from-file=list.txt", "--" }); + result.Failed.ShouldBeFalse(); + result.PathspecFromFile.ShouldEqual("list.txt"); + } + + [TestCase] + public void GetRestorePathspec_PathspecFileNulAloneIsIgnored() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "restore", + new[] { "pre-command", "restore", "--staged", "--pathspec-file-nul", "file.txt" }); + result.InlinePathspecs.ShouldEqual("file.txt"); + result.PathspecFromFile.ShouldBeNull(); + } + + [TestCase] + public void GetRestorePathspec_PathspecFromFileWithInlinePaths() + { + UnstageCommandParser.PathspecResult result = UnstageCommandParser.GetRestorePathspec( + "restore", + new[] { "pre-command", "restore", "--staged", "--pathspec-from-file=list.txt", "extra.txt" }); + result.Failed.ShouldBeFalse(); + result.PathspecFromFile.ShouldEqual("list.txt"); + result.InlinePathspecs.ShouldEqual("extra.txt"); + } + } +} diff --git a/GVFS/GVFS.UnitTests/Mock/Common/MockFileBasedLock.cs b/GVFS/GVFS.UnitTests/Mock/Common/MockFileBasedLock.cs index c18c707b4..ba821d1d5 100644 --- a/GVFS/GVFS.UnitTests/Mock/Common/MockFileBasedLock.cs +++ b/GVFS/GVFS.UnitTests/Mock/Common/MockFileBasedLock.cs @@ -1,6 +1,7 @@ using GVFS.Common; using GVFS.Common.FileSystem; using GVFS.Common.Tracing; +using System; namespace GVFS.UnitTests.Mock.Common { @@ -14,8 +15,9 @@ public MockFileBasedLock( { } - public override bool TryAcquireLock() + public override bool TryAcquireLock(out Exception lockException) { + lockException = null; return true; } diff --git a/GVFS/GVFS.UnitTests/Mock/Git/MockGVFSGitObjects.cs b/GVFS/GVFS.UnitTests/Mock/Git/MockGVFSGitObjects.cs index 47c30d35e..b95984ecc 100644 --- a/GVFS/GVFS.UnitTests/Mock/Git/MockGVFSGitObjects.cs +++ b/GVFS/GVFS.UnitTests/Mock/Git/MockGVFSGitObjects.cs @@ -71,7 +71,7 @@ public override void DeleteStaleTempPrefetchPackAndIdxs() { } - public override bool TryDownloadPrefetchPacks(GitProcess gitProcess, long latestTimestamp, out List packIndexes) + public override bool TryDownloadPrefetchPacks(GitProcess gitProcess, long latestTimestamp, bool trustPackIndexes, out List packIndexes) { packIndexes = new List(); return true; diff --git a/GVFS/GVFS.Virtualization/Background/BackgroundFileSystemTaskRunner.cs b/GVFS/GVFS.Virtualization/Background/BackgroundFileSystemTaskRunner.cs index 0b6be8994..88ea89709 100644 --- a/GVFS/GVFS.Virtualization/Background/BackgroundFileSystemTaskRunner.cs +++ b/GVFS/GVFS.Virtualization/Background/BackgroundFileSystemTaskRunner.cs @@ -131,6 +131,11 @@ protected void Dispose(bool disposing) this.backgroundThread.Dispose(); this.backgroundThread = null; } + if (this.backgroundTasks != null) + { + this.backgroundTasks.Dispose(); + this.backgroundTasks = null; + } } } diff --git a/GVFS/GVFS.Virtualization/FileSystemCallbacks.cs b/GVFS/GVFS.Virtualization/FileSystemCallbacks.cs index e7737ccba..078b403f5 100644 --- a/GVFS/GVFS.Virtualization/FileSystemCallbacks.cs +++ b/GVFS/GVFS.Virtualization/FileSystemCallbacks.cs @@ -115,8 +115,10 @@ public FileSystemCallbacks( // If the status cache is not enabled, create a dummy GitStatusCache that will never be initialized // This lets us from having to add null checks to callsites into GitStatusCache. this.gitStatusCache = gitStatusCache ?? new GitStatusCache(context, TimeSpan.Zero); + this.gitStatusCache.SetProjectedFolderCountProvider( + () => this.GitIndexProjection.GetProjectedFolderCount()); - this.logsHeadPath = Path.Combine(this.context.Enlistment.WorkingDirectoryBackingRoot, GVFSConstants.DotGit.Logs.Head); + this.logsHeadPath = Path.Combine(this.context.Enlistment.DotGitRoot, GVFSConstants.DotGit.Logs.HeadRelativePath); EventMetadata metadata = new EventMetadata(); metadata.Add("placeholders.Count", this.placeholderDatabase.GetCount()); @@ -227,12 +229,6 @@ public void Dispose() this.backgroundFileSystemTaskRunner.Dispose(); this.backgroundFileSystemTaskRunner = null; } - - if (this.context != null) - { - this.context.Dispose(); - this.context = null; - } } public bool IsReadyForExternalAcquireLockRequests(NamedPipeMessages.LockData requester, out string denyMessage) @@ -371,6 +367,169 @@ public IEnumerable GetAllModifiedPaths() return this.modifiedPaths.GetAllModifiedPaths(); } + /// + /// Checks whether the given folder path, or any of its parent folders, + /// is in the ModifiedPaths database. Used to determine if git/user has + /// taken ownership of a directory tree. + /// + public bool IsPathOrParentInModifiedPaths(string path, bool isFolder) + { + return this.modifiedPaths.Contains(path, isFolder) || + this.modifiedPaths.ContainsParentFolder(path, out _); + } + + /// + /// Finds index entries that are staged (differ from HEAD) matching the given + /// pathspec, and adds them to ModifiedPaths. This prepares for an unstage operation + /// (e.g., restore --staged) by ensuring git will clear skip-worktree for these + /// entries so it can detect their working tree state correctly. + /// Files that were added (not in HEAD) are also written to disk from the git object + /// store as full files, so they persist after projection changes. + /// + /// + /// IPC message body. Formats: + /// null/empty — all staged files + /// "path1\0path2" — inline pathspecs (null-separated) + /// "\nF\n{filepath}" — --pathspec-from-file (forwarded to git) + /// "\nFZ\n{filepath}" — --pathspec-from-file with --pathspec-file-nul + /// File-reference bodies may include inline pathspecs after a 4th \n field. + /// + /// Number of paths added to ModifiedPaths. + /// True if all operations succeeded, false if any failed. + public bool AddStagedFilesToModifiedPaths(string messageBody, out int addedCount) + { + addedCount = 0; + bool success = true; + + // Use a dedicated GitProcess instance to avoid serialization with other + // concurrent pipe message handlers that may also be running git commands. + GitProcess gitProcess = new GitProcess(this.context.Enlistment); + + // Parse message body to extract pathspec arguments for git diff --cached + string[] pathspecs = null; + string pathspecFromFile = null; + bool pathspecFileNul = false; + + if (!string.IsNullOrEmpty(messageBody)) + { + if (messageBody.StartsWith("\n")) + { + // File-reference format: "\n{F|FZ}\n[\n]" + string[] fields = messageBody.Split(new[] { '\n' }, 4, StringSplitOptions.None); + if (fields.Length >= 3) + { + pathspecFileNul = fields[1] == "FZ"; + pathspecFromFile = fields[2]; + + if (fields.Length >= 4 && !string.IsNullOrEmpty(fields[3])) + { + pathspecs = fields[3].Split('\0'); + } + } + } + else + { + pathspecs = messageBody.Split('\0'); + } + } + + // Query all staged files in one call using --name-status -z. + // Output format: "A\0path1\0M\0path2\0D\0path3\0" + GitProcess.Result result = gitProcess.DiffCachedNameStatus(pathspecs, pathspecFromFile, pathspecFileNul); + if (result.ExitCodeIsSuccess && !string.IsNullOrEmpty(result.Output)) + { + string[] parts = result.Output.Split(new[] { '\0' }, StringSplitOptions.RemoveEmptyEntries); + List addedFilePaths = new List(); + + // Parts alternate: status, path, status, path, ... + for (int i = 0; i + 1 < parts.Length; i += 2) + { + string status = parts[i]; + string gitPath = parts[i + 1]; + + if (string.IsNullOrEmpty(gitPath)) + { + continue; + } + + string platformPath = gitPath.Replace(GVFSConstants.GitPathSeparator, Path.DirectorySeparatorChar); + if (this.modifiedPaths.TryAdd(platformPath, isFolder: false, isRetryable: out _)) + { + addedCount++; + } + + // Added files (in index but not in HEAD) are ProjFS placeholders that + // would vanish when the projection reverts to HEAD. Collect them for + // hydration below. + if (status.StartsWith("A")) + { + addedFilePaths.Add(gitPath); + } + } + + // Write added files from the git object store to disk as full files + // so they persist across projection changes. Batched into as few git + // process invocations as possible. + if (addedFilePaths.Count > 0) + { + if (!this.WriteStagedFilesToWorkingDirectory(gitProcess, addedFilePaths)) + { + success = false; + } + } + } + else if (!result.ExitCodeIsSuccess) + { + EventMetadata metadata = new EventMetadata(); + metadata.Add("ExitCode", result.ExitCode); + metadata.Add("Errors", result.Errors ?? string.Empty); + this.context.Tracer.RelatedError( + metadata, + nameof(this.AddStagedFilesToModifiedPaths) + ": git diff --cached failed"); + success = false; + } + + return success; + } + + /// + /// Writes the staged (index) versions of files to the working directory as + /// full files, bypassing ProjFS. Uses "git checkout-index --force" with + /// batched paths to minimize process invocations. + /// Returns true if all batches succeeded, false if any failed. + /// + private bool WriteStagedFilesToWorkingDirectory(GitProcess gitProcess, List gitPaths) + { + bool allSucceeded = true; + try + { + List results = gitProcess.CheckoutIndexForFiles(gitPaths); + foreach (GitProcess.Result result in results) + { + if (!result.ExitCodeIsSuccess) + { + allSucceeded = false; + EventMetadata metadata = new EventMetadata(); + metadata.Add("pathCount", gitPaths.Count); + metadata.Add("error", result.Errors); + this.context.Tracer.RelatedWarning( + metadata, + nameof(this.WriteStagedFilesToWorkingDirectory) + ": git checkout-index failed"); + } + } + } + catch (Exception e) + { + allSucceeded = false; + EventMetadata metadata = new EventMetadata(); + metadata.Add("pathCount", gitPaths.Count); + metadata.Add("Exception", e.ToString()); + this.context.Tracer.RelatedWarning(metadata, nameof(this.WriteStagedFilesToWorkingDirectory) + ": Failed to write files"); + } + + return allSucceeded; + } + public virtual void OnIndexFileChange() { string lockedGitCommand = this.context.Repository.GVFSLock.GetLockedGitCommand(); @@ -403,6 +562,16 @@ public void InvalidateGitStatusCache() } } + public EnlistmentHydrationSummary GetCachedHydrationSummary() + { + return this.gitStatusCache.GetCachedHydrationSummary(); + } + + public int GetProjectedFolderCount() + { + return this.GitIndexProjection.GetProjectedFolderCount(); + } + public virtual void OnLogsHeadChange() { // Don't open the .git\logs\HEAD file here to check its attributes as we're in a callback for the .git folder diff --git a/GVFS/GVFS.Virtualization/Projection/GitIndexProjection.FolderData.cs b/GVFS/GVFS.Virtualization/Projection/GitIndexProjection.FolderData.cs index 4777cabbc..bf56444a8 100644 --- a/GVFS/GVFS.Virtualization/Projection/GitIndexProjection.FolderData.cs +++ b/GVFS/GVFS.Virtualization/Projection/GitIndexProjection.FolderData.cs @@ -21,6 +21,29 @@ internal class FolderData : FolderEntryData public bool ChildrenHaveSizes { get; private set; } public bool IsIncluded { get; set; } = true; + public int GetRecursiveFolderCount() + { + int count = 0; + Stack stack = new Stack(); + stack.Push(this); + + while (stack.Count > 0) + { + FolderData current = stack.Pop(); + for (int i = 0; i < current.ChildEntries.Count; i++) + { + FolderData childFolder = current.ChildEntries[i] as FolderData; + if (childFolder != null) + { + count++; + stack.Push(childFolder); + } + } + } + + return count; + } + public void ResetData(LazyUTF8String name, bool isIncluded) { this.Name = name; diff --git a/GVFS/GVFS.Virtualization/Projection/GitIndexProjection.GitIndexParser.cs b/GVFS/GVFS.Virtualization/Projection/GitIndexProjection.GitIndexParser.cs index c05348e2b..382a05945 100644 --- a/GVFS/GVFS.Virtualization/Projection/GitIndexProjection.GitIndexParser.cs +++ b/GVFS/GVFS.Virtualization/Projection/GitIndexProjection.GitIndexParser.cs @@ -6,6 +6,7 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Text; namespace GVFS.Virtualization.Projection { @@ -60,6 +61,53 @@ public static void ValidateIndex(ITracer tracer, Stream indexStream) } } + /// + /// Count unique directories in the index by scanning entry paths for separators. + /// Uses the existing index parser to read entries, avoiding a custom index parser. + /// + public static int CountIndexFolders(ITracer tracer, Stream indexStream) + { + HashSet dirs = new HashSet(StringComparer.OrdinalIgnoreCase); + GitIndexParser indexParser = new GitIndexParser(null); + + FileSystemTaskResult result = indexParser.ParseIndex( + tracer, + indexStream, + indexParser.resuableProjectionBuildingIndexEntry, + entry => + { + // Match the same filter as AddIndexEntryToProjection so the + // fallback folder count agrees with the mounted projection. + if (!((entry.MergeState != MergeStage.CommonAncestor && entry.SkipWorktree) || entry.MergeState == MergeStage.Yours)) + { + return FileSystemTaskResult.Success; + } + + // Extract unique parent directories from the raw path buffer + string path = Encoding.UTF8.GetString(entry.PathBuffer, 0, entry.PathLength); + int lastSlash = path.LastIndexOf('/'); + while (lastSlash > 0) + { + string dir = path.Substring(0, lastSlash); + if (!dirs.Add(dir)) + { + break; + } + + lastSlash = dir.LastIndexOf('/'); + } + + return FileSystemTaskResult.Success; + }); + + if (result != FileSystemTaskResult.Success) + { + throw new InvalidOperationException($"{nameof(CountIndexFolders)} failed: {result}"); + } + + return dirs.Count; + } + public void RebuildProjection(ITracer tracer, Stream indexStream) { if (this.projection == null) diff --git a/GVFS/GVFS.Virtualization/Projection/GitIndexProjection.cs b/GVFS/GVFS.Virtualization/Projection/GitIndexProjection.cs index 10fd7b573..3b8301b12 100644 --- a/GVFS/GVFS.Virtualization/Projection/GitIndexProjection.cs +++ b/GVFS/GVFS.Virtualization/Projection/GitIndexProjection.cs @@ -109,7 +109,7 @@ public GitIndexProjection( this.projectionParseComplete = new ManualResetEventSlim(initialState: false); this.wakeUpIndexParsingThread = new AutoResetEvent(initialState: false); this.projectionIndexBackupPath = Path.Combine(this.context.Enlistment.DotGVFSRoot, ProjectionIndexBackupName); - this.indexPath = Path.Combine(this.context.Enlistment.WorkingDirectoryBackingRoot, GVFSConstants.DotGit.Index); + this.indexPath = this.context.Enlistment.GitIndexPath; this.placeholderDatabase = placeholderDatabase; this.sparseCollection = sparseCollection; this.modifiedPaths = modifiedPaths; @@ -309,6 +309,45 @@ public virtual bool IsProjectionParseComplete() return this.projectionParseComplete.IsSet; } + /// + /// Get the total number of directories in the projection. + /// This is computed from the in-memory tree built during index parsing, + /// so it is essentially free (no I/O, no process spawn). + /// + public virtual int GetProjectedFolderCount() + { + this.projectionReadWriteLock.EnterReadLock(); + try + { + return this.rootFolderData.GetRecursiveFolderCount(); + } + finally + { + this.projectionReadWriteLock.ExitReadLock(); + } + } + + /// + /// Count unique directories by parsing the index file directly. + /// This is a fallback for when the in-memory projection is not available + /// (e.g., when running gvfs health --status without a mount process). + /// + public static int CountIndexFolders(ITracer tracer, string indexPath) + { + using (FileStream indexStream = new FileStream(indexPath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) + { + return CountIndexFolders(tracer, indexStream); + } + } + + /// + /// Count unique directories by parsing an index stream. + /// + public static int CountIndexFolders(ITracer tracer, Stream indexStream) + { + return GitIndexParser.CountIndexFolders(tracer, indexStream); + } + public virtual void InvalidateProjection() { this.context.Tracer.RelatedEvent(EventLevel.Informational, "InvalidateProjection", null); diff --git a/GVFS/GVFS/CommandLine/CacheServerVerb.cs b/GVFS/GVFS/CommandLine/CacheServerVerb.cs index 86754ae67..9fedad0b0 100644 --- a/GVFS/GVFS/CommandLine/CacheServerVerb.cs +++ b/GVFS/GVFS/CommandLine/CacheServerVerb.cs @@ -42,12 +42,6 @@ protected override void Execute(GVFSEnlistment enlistment) using (ITracer tracer = new JsonTracer(GVFSConstants.GVFSEtwProviderName, "CacheVerb")) { - string authErrorMessage; - if (!this.TryAuthenticate(tracer, enlistment, out authErrorMessage)) - { - this.ReportErrorAndExit(tracer, "Authentication failed: " + authErrorMessage); - } - CacheServerResolver cacheServerResolver = new CacheServerResolver(tracer, enlistment); ServerGVFSConfig serverGVFSConfig = null; string error = null; @@ -55,8 +49,12 @@ protected override void Execute(GVFSEnlistment enlistment) // Handle the three operation types: list, set, and get (default) if (this.ListCacheServers) { - // For listing, require config endpoint to succeed - serverGVFSConfig = this.QueryGVFSConfig(tracer, enlistment, retryConfig); + // For listing, require config endpoint to succeed (no fallback) + if (!this.TryAuthenticateAndQueryGVFSConfig( + tracer, enlistment, retryConfig, out serverGVFSConfig, out error)) + { + this.ReportErrorAndExit(tracer, "Unable to query /gvfs/config" + Environment.NewLine + error); + } List cacheServers = serverGVFSConfig.CacheServers.ToList(); @@ -80,11 +78,12 @@ protected override void Execute(GVFSEnlistment enlistment) CacheServerInfo cacheServer = cacheServerResolver.ParseUrlOrFriendlyName(this.CacheToSet); // For set operation, allow fallback if config endpoint fails but cache server URL is valid - serverGVFSConfig = this.QueryGVFSConfigWithFallbackCacheServer( - tracer, - enlistment, - retryConfig, - cacheServer); + if (!this.TryAuthenticateAndQueryGVFSConfig( + tracer, enlistment, retryConfig, out serverGVFSConfig, out error, + fallbackCacheServer: cacheServer)) + { + this.ReportErrorAndExit(tracer, "Authentication failed: " + error); + } cacheServer = this.ResolveCacheServer(tracer, cacheServer, cacheServerResolver, serverGVFSConfig); @@ -101,11 +100,12 @@ protected override void Execute(GVFSEnlistment enlistment) CacheServerInfo cacheServer = CacheServerResolver.GetCacheServerFromConfig(enlistment); // For get operation, allow fallback if config endpoint fails but cache server URL is valid - serverGVFSConfig =this.QueryGVFSConfigWithFallbackCacheServer( - tracer, - enlistment, - retryConfig, - cacheServer); + if (!this.TryAuthenticateAndQueryGVFSConfig( + tracer, enlistment, retryConfig, out serverGVFSConfig, out error, + fallbackCacheServer: cacheServer)) + { + this.ReportErrorAndExit(tracer, "Authentication failed: " + error); + } CacheServerInfo resolvedCacheServer = cacheServerResolver.ResolveNameFromRemote(cacheServer.Url, serverGVFSConfig); diff --git a/GVFS/GVFS/CommandLine/CacheVerb.cs b/GVFS/GVFS/CommandLine/CacheVerb.cs new file mode 100644 index 000000000..70c8a65fd --- /dev/null +++ b/GVFS/GVFS/CommandLine/CacheVerb.cs @@ -0,0 +1,233 @@ +using CommandLine; +using GVFS.Common; +using GVFS.Common.FileSystem; +using GVFS.Common.Tracing; +using System; +using System.Globalization; +using System.IO; + +namespace GVFS.CommandLine +{ + [Verb(CacheVerb.CacheVerbName, HelpText = "Display information about the GVFS shared object cache")] + public class CacheVerb : GVFSVerb.ForExistingEnlistment + { + private const string CacheVerbName = "cache"; + + public CacheVerb() + { + } + + protected override string VerbName + { + get { return CacheVerbName; } + } + + protected override void Execute(GVFSEnlistment enlistment) + { + using (ITracer tracer = new JsonTracer(GVFSConstants.GVFSEtwProviderName, "CacheVerb")) + { + string localCacheRoot; + string gitObjectsRoot; + this.GetLocalCachePaths(tracer, enlistment, out localCacheRoot, out gitObjectsRoot); + + if (string.IsNullOrWhiteSpace(gitObjectsRoot)) + { + this.ReportErrorAndExit("Could not determine git objects root. Is this a GVFS enlistment with a shared cache?"); + } + + this.Output.WriteLine("Repo URL: " + enlistment.RepoUrl); + this.Output.WriteLine("Cache root: " + (localCacheRoot ?? "(unknown)")); + this.Output.WriteLine("Git objects: " + gitObjectsRoot); + + string packRoot = Path.Combine(gitObjectsRoot, GVFSConstants.DotGit.Objects.Pack.Name); + if (!Directory.Exists(packRoot)) + { + this.Output.WriteLine(); + this.Output.WriteLine("Pack directory not found: " + packRoot); + tracer.RelatedError("Pack directory not found: " + packRoot); + return; + } + + int prefetchPackCount; + long prefetchPackSize; + int otherPackCount; + long otherPackSize; + long latestPrefetchTimestamp; + this.GetPackSummary(packRoot, out prefetchPackCount, out prefetchPackSize, out otherPackCount, out otherPackSize, out latestPrefetchTimestamp); + + int looseObjectCount = this.CountLooseObjects(gitObjectsRoot); + + long totalSize = prefetchPackSize + otherPackSize; + this.Output.WriteLine(); + this.Output.WriteLine("Total pack size: " + this.FormatSizeForUserDisplay(totalSize)); + this.Output.WriteLine("Prefetch packs: " + prefetchPackCount + " (" + this.FormatSizeForUserDisplay(prefetchPackSize) + ")"); + this.Output.WriteLine("Other packs: " + otherPackCount + " (" + this.FormatSizeForUserDisplay(otherPackSize) + ")"); + + if (latestPrefetchTimestamp > 0) + { + try + { + DateTimeOffset latestTime = DateTimeOffset.FromUnixTimeSeconds(latestPrefetchTimestamp).ToLocalTime(); + this.Output.WriteLine("Latest prefetch: " + latestTime.ToString("yyyy-MM-dd HH:mm:ss zzz")); + } + catch (ArgumentOutOfRangeException) + { + tracer.RelatedWarning("Prefetch timestamp out of range: " + latestPrefetchTimestamp); + } + } + + this.Output.WriteLine("Loose objects: " + looseObjectCount.ToString("N0")); + + EventMetadata metadata = new EventMetadata(); + metadata.Add("repoUrl", enlistment.RepoUrl); + metadata.Add("localCacheRoot", localCacheRoot); + metadata.Add("gitObjectsRoot", gitObjectsRoot); + metadata.Add("prefetchPackCount", prefetchPackCount); + metadata.Add("prefetchPackSize", prefetchPackSize); + metadata.Add("otherPackCount", otherPackCount); + metadata.Add("otherPackSize", otherPackSize); + metadata.Add("latestPrefetchTimestamp", latestPrefetchTimestamp); + metadata.Add("looseObjectCount", looseObjectCount); + tracer.RelatedEvent(EventLevel.Informational, "CacheInfo", metadata, Keywords.Telemetry); + } + } + + internal void GetPackSummary( + string packRoot, + out int prefetchPackCount, + out long prefetchPackSize, + out int otherPackCount, + out long otherPackSize, + out long latestPrefetchTimestamp) + { + prefetchPackCount = 0; + prefetchPackSize = 0; + otherPackCount = 0; + otherPackSize = 0; + latestPrefetchTimestamp = 0; + + string[] packFiles = Directory.GetFiles(packRoot, "*.pack"); + + foreach (string packFile in packFiles) + { + long length; + try + { + length = new FileInfo(packFile).Length; + } + catch (IOException) + { + continue; + } + + string fileName = Path.GetFileName(packFile); + + if (fileName.StartsWith(GVFSConstants.PrefetchPackPrefix, StringComparison.OrdinalIgnoreCase)) + { + prefetchPackCount++; + prefetchPackSize += length; + + long? timestamp = this.TryGetPrefetchTimestamp(packFile); + if (timestamp.HasValue && timestamp.Value > latestPrefetchTimestamp) + { + latestPrefetchTimestamp = timestamp.Value; + } + } + else + { + otherPackCount++; + otherPackSize += length; + } + } + } + + internal int CountLooseObjects(string gitObjectsRoot) + { + int looseObjectCount = 0; + + for (int i = 0; i < 256; i++) + { + string hexDir = Path.Combine(gitObjectsRoot, i.ToString("x2")); + if (Directory.Exists(hexDir)) + { + try + { + looseObjectCount += Directory.GetFiles(hexDir).Length; + } + catch (IOException) + { + } + } + } + + return looseObjectCount; + } + + private long? TryGetPrefetchTimestamp(string packPath) + { + string filename = Path.GetFileName(packPath); + string[] parts = filename.Split('-'); + if (parts.Length > 1 && long.TryParse(parts[1], out long timestamp)) + { + return timestamp; + } + + return null; + } + + internal string FormatSizeForUserDisplay(long bytes) + { + if (bytes >= 1L << 30) + { + return string.Format(CultureInfo.CurrentCulture, "{0:F1} GB", bytes / (double)(1L << 30)); + } + + if (bytes >= 1L << 20) + { + return string.Format(CultureInfo.CurrentCulture, "{0:F1} MB", bytes / (double)(1L << 20)); + } + + if (bytes >= 1L << 10) + { + return string.Format(CultureInfo.CurrentCulture, "{0:F1} KB", bytes / (double)(1L << 10)); + } + + return bytes + " bytes"; + } + + private void GetLocalCachePaths(ITracer tracer, GVFSEnlistment enlistment, out string localCacheRoot, out string gitObjectsRoot) + { + localCacheRoot = null; + gitObjectsRoot = null; + + try + { + string error; + if (RepoMetadata.TryInitialize(tracer, Path.Combine(enlistment.EnlistmentRoot, GVFSPlatform.Instance.Constants.DotGVFSRoot), out error)) + { + if (!RepoMetadata.Instance.TryGetLocalCacheRoot(out localCacheRoot, out error)) + { + tracer.RelatedWarning("Failed to read local cache root: " + error); + } + + if (!RepoMetadata.Instance.TryGetGitObjectsRoot(out gitObjectsRoot, out error)) + { + tracer.RelatedWarning("Failed to read git objects root: " + error); + } + } + else + { + this.ReportErrorAndExit("Failed to read repo metadata: " + error); + } + } + catch (Exception e) + { + this.ReportErrorAndExit("Failed to read repo metadata: " + e.Message); + } + finally + { + RepoMetadata.Shutdown(); + } + } + } +} diff --git a/GVFS/GVFS/CommandLine/CloneVerb.cs b/GVFS/GVFS/CommandLine/CloneVerb.cs index e0d858360..bd37c7d4b 100644 --- a/GVFS/GVFS/CommandLine/CloneVerb.cs +++ b/GVFS/GVFS/CommandLine/CloneVerb.cs @@ -121,6 +121,7 @@ public override void Execute() CacheServerInfo cacheServer = null; ServerGVFSConfig serverGVFSConfig = null; + bool trustPackIndexes; using (JsonTracer tracer = new JsonTracer(GVFSConstants.GVFSEtwProviderName, "GVFSClone")) { @@ -184,19 +185,19 @@ public override void Execute() this.Output.WriteLine(" Local Cache: " + resolvedLocalCacheRoot); this.Output.WriteLine(" Destination: " + enlistment.EnlistmentRoot); - string authErrorMessage; - if (!this.TryAuthenticate(tracer, enlistment, out authErrorMessage)) - { - this.ReportErrorAndExit(tracer, "Cannot clone because authentication failed: " + authErrorMessage); - } - RetryConfig retryConfig = this.GetRetryConfig(tracer, enlistment, TimeSpan.FromMinutes(RetryConfig.FetchAndCloneTimeoutMinutes)); - serverGVFSConfig = this.QueryGVFSConfigWithFallbackCacheServer( + string authErrorMessage; + if (!this.TryAuthenticateAndQueryGVFSConfig( tracer, enlistment, retryConfig, - cacheServer); + out serverGVFSConfig, + out authErrorMessage, + fallbackCacheServer: cacheServer)) + { + this.ReportErrorAndExit(tracer, "Cannot clone because authentication failed: " + authErrorMessage); + } cacheServer = this.ResolveCacheServer(tracer, cacheServer, cacheServerResolver, serverGVFSConfig); @@ -216,13 +217,17 @@ public override void Execute() { tracer.RelatedError(cloneResult.ErrorMessage); } + + using (var repo = new LibGit2RepoInvoker(tracer, enlistment.WorkingDirectoryBackingRoot)) + { + trustPackIndexes = repo.GetConfigBoolOrDefault(GVFSConstants.GitConfig.TrustPackIndexes, GVFSConstants.GitConfig.TrustPackIndexesDefault); + } } if (cloneResult.Success) { if (!this.NoPrefetch) { - bool trustPackIndexes = enlistment.GetTrustPackIndexesConfig(); /* If pack indexes are not trusted, the prefetch can take a long time. * We will run the prefetch command in the background. */ diff --git a/GVFS/GVFS/CommandLine/DehydrateVerb.cs b/GVFS/GVFS/CommandLine/DehydrateVerb.cs index b739b9b2d..5f9702239 100644 --- a/GVFS/GVFS/CommandLine/DehydrateVerb.cs +++ b/GVFS/GVFS/CommandLine/DehydrateVerb.cs @@ -43,9 +43,19 @@ public class DehydrateVerb : GVFSVerb.ForExistingEnlistment "folders", Default = "", Required = false, - HelpText = "A semicolon (" + FolderListSeparator + ") delimited list of folders to dehydrate. Each folder must be relative to the repository root.")] + HelpText = "A semicolon (" + FolderListSeparator + ") delimited list of folders to dehydrate. " + + "Each folder must be relative to the repository root. " + + "When omitted (without --full), all root-level folders are dehydrated.")] public string Folders { get; set; } + [Option( + "full", + Default = false, + Required = false, + HelpText = "Perform a full dehydration that unmounts, backs up the entire src folder, and re-creates the virtualization root from scratch. " + + "Without this flag, the default behavior dehydrates individual folders which is faster and does not require a full unmount.")] + public bool Full { get; set; } + public string RunningVerbName { get; set; } = DehydrateVerbName; public string ActionName { get; set; } = DehydrateVerbName; @@ -75,6 +85,7 @@ protected override void Execute(GVFSEnlistment enlistment) { { "Confirmed", this.Confirmed }, { "NoStatus", this.NoStatus }, + { "Full", this.Full }, { "NamedPipeName", enlistment.NamedPipeName }, { "Folders", this.Folders }, { nameof(this.EnlistmentRootPathParameter), this.EnlistmentRootPathParameter }, @@ -112,42 +123,57 @@ protected override void Execute(GVFSEnlistment enlistment) } } - bool fullDehydrate = string.IsNullOrEmpty(this.Folders); + bool fullDehydrate = this.Full; + bool hasFoldersList = !string.IsNullOrEmpty(this.Folders); + + if (fullDehydrate && hasFoldersList) + { + this.ReportErrorAndExit("Cannot combine --full with --folders."); + } if (!this.Confirmed && fullDehydrate) { this.Output.WriteLine( $@"WARNING: THIS IS AN EXPERIMENTAL FEATURE -Dehydrate will back up your src folder, and then create a new, empty src folder -with a fresh virtualization of the repo. All of your downloaded objects, branches, -and siblings of the src folder will be preserved. Your modified working directory -files will be moved to the backup, and your new working directory will not have +Dehydrate --full will back up your src folder, and then create a new, empty src folder +with a fresh virtualization of the repo. All of your downloaded objects, branches, +and siblings of the src folder will be preserved. Your modified working directory +files will be moved to the backup, and your new working directory will not have any of your uncommitted changes. -Before you dehydrate, make sure you have committed any working directory changes -you want to keep. If you choose not to, you can still find your uncommitted changes -in the backup folder, but it will be harder to find them because 'git status' +Before you dehydrate, make sure you have committed any working directory changes +you want to keep. If you choose not to, you can still find your uncommitted changes +in the backup folder, but it will be harder to find them because 'git status' will not work in the backup. -To actually execute the dehydrate, run 'gvfs dehydrate --confirm' from {enlistment.EnlistmentRoot}. +To actually execute the dehydrate, run 'gvfs dehydrate --confirm --full' from {enlistment.EnlistmentRoot}. "); return; } else if (!this.Confirmed) { + string folderDescription = hasFoldersList + ? "the folders specified" + : "all root-level folders"; + + string confirmCommand = hasFoldersList + ? $"'gvfs dehydrate --confirm --folders '" + : $"'gvfs dehydrate --confirm'"; + this.Output.WriteLine( -@"WARNING: THIS IS AN EXPERIMENTAL FEATURE +$@"WARNING: THIS IS AN EXPERIMENTAL FEATURE All of your downloaded objects, branches, and siblings of the src folder -will be preserved. This will remove the folders specified and any working directory +will be preserved. This will remove {folderDescription} and any working directory files and folders even if ignored by git similar to 'git clean -xdf '. -Before you dehydrate, you will have to commit any working directory changes -you want to keep and have a clean 'git status'. +Before you dehydrate, you will have to commit any working directory changes +you want to keep and have a clean 'git status', or run with --no-status to +undo any uncommitted changes. -To actually execute the dehydrate, run 'gvfs dehydrate --confirm --folders ' +To actually execute the dehydrate, run {confirmCommand} from a parent of the folders list. "); @@ -157,13 +183,7 @@ from a parent of the folders list. if (fullDehydrate && Environment.CurrentDirectory.StartsWith(enlistment.WorkingDirectoryBackingRoot)) { /* If running from /src, the dehydrate would fail because of the handle we are holding on it. */ - this.Output.WriteLine($"Dehydrate must be run from {enlistment.EnlistmentRoot}"); - return; - } - - if (this.NoStatus && !fullDehydrate) - { - this.ReportErrorAndExit(tracer, "Dehydrate --no-status not valid with --folders"); + this.Output.WriteLine($"Dehydrate --full must be run from {enlistment.EnlistmentRoot}"); return; } @@ -176,21 +196,25 @@ from a parent of the folders list. { this.WriteMessage(tracer, $"Starting {this.RunningVerbName}. All of your existing files will be backed up in " + backupRoot); } + else + { + this.WriteMessage(tracer, $"Starting {this.RunningVerbName}. Selected folders will be backed up in " + backupRoot); + } this.WriteMessage(tracer, $"WARNING: If you abort the {this.RunningVerbName} after this point, the repo may become corrupt"); this.Output.WriteLine(); - this.Unmount(tracer); - - string error; - if (!DiskLayoutUpgrade.TryCheckDiskLayoutVersion(tracer, enlistment.EnlistmentRoot, out error)) - { - this.ReportErrorAndExit(tracer, error); - } - if (fullDehydrate) { + this.Unmount(tracer); + + string error; + if (!DiskLayoutUpgrade.TryCheckDiskLayoutVersion(tracer, enlistment.EnlistmentRoot, out error)) + { + this.ReportErrorAndExit(tracer, error); + } + RetryConfig retryConfig; if (!RetryConfig.TryLoadFromGitConfig(tracer, enlistment, out retryConfig, out error)) { @@ -210,13 +234,21 @@ from a parent of the folders list. } else { - string[] folders = this.Folders.Split(new[] { FolderListSeparator }, StringSplitOptions.RemoveEmptyEntries); + string[] folders; + if (hasFoldersList) + { + folders = this.Folders.Split(new[] { FolderListSeparator }, StringSplitOptions.RemoveEmptyEntries); + } + else + { + folders = this.GetRootLevelFolders(enlistment); + } if (folders.Length > 0) { if (cleanStatus) { - this.DehydrateFolders(tracer, enlistment, folders); + this.DehydrateFolders(tracer, enlistment, folders, backupRoot); } else { @@ -231,8 +263,13 @@ from a parent of the folders list. } } - private void DehydrateFolders(JsonTracer tracer, GVFSEnlistment enlistment, string[] folders) + private void DehydrateFolders(JsonTracer tracer, GVFSEnlistment enlistment, string[] folders, string backupRoot) { + if (!this.TryBackupNonSrcFiles(tracer, enlistment, backupRoot)) + { + return; + } + List foldersToDehydrate = new List(); List folderErrors = new List(); @@ -241,7 +278,7 @@ private void DehydrateFolders(JsonTracer tracer, GVFSEnlistment enlistment, stri { if (!ModifiedPathsDatabase.TryLoadOrCreate( tracer, - Path.Combine(enlistment.DotGVFSRoot, GVFSConstants.DotGVFS.Databases.ModifiedPaths), + Path.Combine(GetBackupDatabasesPath(backupRoot), GVFSConstants.DotGVFS.Databases.ModifiedPaths), this.fileSystem, out ModifiedPathsDatabase modifiedPaths, out string error)) @@ -271,29 +308,7 @@ private void DehydrateFolders(JsonTracer tracer, GVFSEnlistment enlistment, stri else { string fullPath = Path.Combine(enlistment.WorkingDirectoryBackingRoot, folder); - if (this.fileSystem.DirectoryExists(fullPath)) - { - // Since directories are deleted last and will be empty at that point we can skip errors - // while trying to delete it and leave the empty directory and continue to dehydrate - if (!this.TryIO(tracer, () => this.fileSystem.DeleteDirectory(fullPath, ignoreDirectoryDeleteExceptions: true), $"Deleting '{fullPath}'", out ioError)) - { - this.WriteMessage(tracer, $"Cannot {this.ActionName} folder '{folder}': removing '{folder}' failed."); - this.WriteMessage(tracer, "Ensure no applications are accessing the folder and retry."); - this.WriteMessage(tracer, $"More details: {ioError}"); - folderErrors.Add($"{folder}\0{ioError}"); - } - else - { - foldersToDehydrate.Add(folder); - } - } - else - { - this.WriteMessage(tracer, $"Cannot {this.ActionName} folder '{folder}': '{folder}' does not exist."); - - // Still add to foldersToDehydrate so that any placeholders or modified paths get cleaned up - foldersToDehydrate.Add(folder); - } + foldersToDehydrate.Add(folder); } } } @@ -306,16 +321,10 @@ private void DehydrateFolders(JsonTracer tracer, GVFSEnlistment enlistment, stri this.ReportErrorAndExit(tracer, $"{this.ActionName} for folders failed."); } - // We can skip the version check because dehydrating folders requires that a git status - // be run first, and running git status requires that the repo already be mounted (meaning - // we don't need to perform another version check again) - this.Mount( - tracer, - skipVersionCheck: true); - if (foldersToDehydrate.Count > 0) { - this.SendDehydrateMessage(tracer, enlistment, folderErrors, foldersToDehydrate); + string backupSrc = GetBackupSrcPath(backupRoot); + this.SendDehydrateMessage(tracer, enlistment, folderErrors, foldersToDehydrate, backupSrc); } if (folderErrors.Count > 0) @@ -329,6 +338,43 @@ private void DehydrateFolders(JsonTracer tracer, GVFSEnlistment enlistment, stri } } + private static string GetBackupSrcPath(string backupRoot) + { + return Path.Combine(backupRoot, "src"); + } + + private string[] GetRootLevelFolders(GVFSEnlistment enlistment) + { + HashSet rootFolders = new HashSet(GVFSPlatform.Instance.Constants.PathComparer); + GitProcess git = new GitProcess(enlistment); + GitProcess.Result result = git.LsTree( + GVFSConstants.DotGit.HeadName, + line => + { + // ls-tree output format: " \t" + int tabIndex = line.IndexOf('\t'); + if (tabIndex >= 0) + { + string path = line.Substring(tabIndex + 1); + int separatorIndex = path.IndexOf('/'); + string rootFolder = separatorIndex >= 0 ? path.Substring(0, separatorIndex) : path; + if (!rootFolder.Equals(GVFSConstants.DotGit.Root, StringComparison.OrdinalIgnoreCase)) + { + rootFolders.Add(rootFolder); + } + } + }, + recursive: false, + showDirectories: true); + + if (result.ExitCodeIsFailure) + { + this.ReportErrorAndExit($"Failed to enumerate root-level folders from HEAD: {result.Errors}"); + } + + return rootFolders.ToArray(); + } + private bool IsFolderValid(string folderPath) { if (folderPath == GVFSConstants.DotGit.Root || @@ -343,7 +389,12 @@ private bool IsFolderValid(string folderPath) return true; } - private void SendDehydrateMessage(ITracer tracer, GVFSEnlistment enlistment, List folderErrors, List folders) + private void SendDehydrateMessage( + ITracer tracer, + GVFSEnlistment enlistment, + List folderErrors, + List folders, + string backupFolder) { NamedPipeMessages.DehydrateFolders.Response response = null; @@ -353,10 +404,17 @@ private void SendDehydrateMessage(ITracer tracer, GVFSEnlistment enlistment, Lis { if (!pipeClient.Connect()) { - this.ReportErrorAndExit("Unable to connect to GVFS. Try running 'gvfs mount'"); + this.Output.WriteLine("Mounting..."); + this.Mount(tracer, skipVersionCheck: false); + if (!pipeClient.Connect()) + { + this.ReportErrorAndExit("Unable to connect to GVFS. Try running 'gvfs mount'"); + } } - NamedPipeMessages.DehydrateFolders.Request request = new NamedPipeMessages.DehydrateFolders.Request(string.Join(FolderListSeparator, folders)); + NamedPipeMessages.DehydrateFolders.Request request = new NamedPipeMessages.DehydrateFolders.Request( + folders: string.Join(";", folders), + backupFolderPath: backupFolder); pipeClient.SendRequest(request.CreateMessage()); response = NamedPipeMessages.DehydrateFolders.Response.FromMessage(NamedPipeMessages.Message.FromString(pipeClient.ReadRawResponse())); } @@ -438,83 +496,83 @@ private void Mount(ITracer tracer, bool skipVersionCheck) private bool CheckGitStatus(ITracer tracer, GVFSEnlistment enlistment, bool fullDehydrate) { - if (!this.NoStatus) + if (this.NoStatus) { - this.WriteMessage(tracer, $"Running git status before {this.ActionName} to make sure you don't have any pending changes."); - if (fullDehydrate) - { - this.WriteMessage(tracer, $"If this takes too long, you can abort and run {this.RunningVerbName} with --no-status to skip this safety check."); - } + return true; + } - this.Output.WriteLine(); + this.WriteMessage(tracer, $"Running git status before {this.ActionName} to make sure you don't have any pending changes."); + if (fullDehydrate) + { + this.WriteMessage(tracer, $"If this takes too long, you can abort and run {this.RunningVerbName} with --no-status to skip this safety check."); + } - bool isMounted = false; - GitProcess.Result statusResult = null; - if (!this.ShowStatusWhileRunning( - () => + this.Output.WriteLine(); + + bool isMounted = false; + GitProcess.Result statusResult = null; + if (!this.ShowStatusWhileRunning( + () => + { + if (this.ExecuteGVFSVerb(tracer) != ReturnCode.Success) { - if (this.ExecuteGVFSVerb(tracer) != ReturnCode.Success) - { - return false; - } + return false; + } - isMounted = true; + isMounted = true; - GitProcess git = new GitProcess(enlistment); - statusResult = git.Status(allowObjectDownloads: false, useStatusCache: false, showUntracked: true); - if (statusResult.ExitCodeIsFailure) - { - return false; - } + GitProcess git = new GitProcess(enlistment); + statusResult = git.Status(allowObjectDownloads: false, useStatusCache: false, showUntracked: true); + if (statusResult.ExitCodeIsFailure) + { + return false; + } - if (!statusResult.Output.Contains("nothing to commit, working tree clean")) - { - return false; - } + if (!statusResult.Output.Contains("nothing to commit, working tree clean")) + { + return false; + } - return true; - }, - "Running git status", - suppressGvfsLogMessage: true)) - { - this.Output.WriteLine(); + return true; + }, + "Running git status", + suppressGvfsLogMessage: true)) + { + this.Output.WriteLine(); - if (!isMounted) + if (!isMounted) + { + this.WriteMessage(tracer, "Failed to run git status because the repo is not mounted"); + if (fullDehydrate) { - this.WriteMessage(tracer, "Failed to run git status because the repo is not mounted"); - if (fullDehydrate) - { - this.WriteMessage(tracer, "Either mount first, or run with --no-status"); - } + this.WriteMessage(tracer, "Either mount first, or run with --no-status"); } - else if (statusResult.ExitCodeIsFailure) + } + else if (statusResult.ExitCodeIsFailure) + { + this.WriteMessage(tracer, "Failed to run git status: " + statusResult.Errors); + } + else + { + this.WriteMessage(tracer, statusResult.Output); + this.WriteMessage(tracer, "git status reported that you have dirty files"); + if (fullDehydrate) { - this.WriteMessage(tracer, "Failed to run git status: " + statusResult.Errors); + this.WriteMessage(tracer, $"Either commit your changes or run {this.RunningVerbName} with --no-status"); } else { - this.WriteMessage(tracer, statusResult.Output); - this.WriteMessage(tracer, "git status reported that you have dirty files"); - if (fullDehydrate) - { - this.WriteMessage(tracer, $"Either commit your changes or run {this.RunningVerbName} with --no-status"); - } - else - { - this.WriteMessage(tracer, "Either commit your changes or reset and clean your working directory."); - } + this.WriteMessage(tracer, "Either commit your changes or reset and clean your working directory."); } - - this.ReportErrorAndExit(tracer, $"Aborted {this.ActionName}"); - return false; } - else - { - return true; - } - } - return false; + this.ReportErrorAndExit(tracer, $"Aborted {this.ActionName}"); + return false; + } + else + { + return true; + } } private void PrepareSrcFolder(ITracer tracer, GVFSEnlistment enlistment) @@ -535,12 +593,84 @@ private void PrepareSrcFolder(ITracer tracer, GVFSEnlistment enlistment) } } + private bool TryBackupNonSrcFiles(ITracer tracer, GVFSEnlistment enlistment, string backupRoot) + { + string backupSrc = GetBackupSrcPath(backupRoot); + string backupGit = Path.Combine(backupRoot, ".git"); + string backupGvfs = Path.Combine(backupRoot, GVFSPlatform.Instance.Constants.DotGVFSRoot); + string backupDatabases = GetBackupDatabasesPath(backupGvfs); + + string errorMessage = string.Empty; + if (!this.ShowStatusWhileRunning( + () => + { + string ioError; + if (!this.TryIO(tracer, () => Directory.CreateDirectory(backupRoot), "Create backup directory", out ioError) || + !this.TryIO(tracer, () => Directory.CreateDirectory(backupGit), "Create backup .git directory", out ioError) || + !this.TryIO(tracer, () => Directory.CreateDirectory(backupGvfs), "Create backup .gvfs directory", out ioError) || + !this.TryIO(tracer, () => Directory.CreateDirectory(backupDatabases), "Create backup .gvfs databases directory", out ioError)) + { + errorMessage = "Failed to create backup folders at " + backupRoot + ": " + ioError; + return false; + } + + // ... backup the .gvfs hydration-related data structures... + string databasesFolder = Path.Combine(enlistment.DotGVFSRoot, GVFSConstants.DotGVFS.Databases.Name); + if (!this.TryCopyFilesInFolder(tracer, databasesFolder, backupDatabases, searchPattern: "*", filenamesToSkip: "RepoMetadata.dat")) + { + return false; + } + + // ... backup everything related to the .git\index... + if (!this.TryIO( + tracer, + () => File.Copy( + Path.Combine(enlistment.DotGitRoot, GVFSConstants.DotGit.IndexName), + Path.Combine(backupGit, GVFSConstants.DotGit.IndexName)), + "Backup the git index", + out errorMessage) || + !this.TryIO( + tracer, + () => File.Copy( + Path.Combine(enlistment.DotGVFSRoot, GitIndexProjection.ProjectionIndexBackupName), + Path.Combine(backupGvfs, GitIndexProjection.ProjectionIndexBackupName)), + "Backup GVFS_projection", + out errorMessage)) + { + return false; + } + + // ... backup all .git\*.lock files + if (!this.TryCopyFilesInFolder(tracer, enlistment.DotGitRoot, backupGit, searchPattern: "*.lock")) + { + errorMessage = "Failed to backup .git lock files."; + return false; + } + + return true; + }, + "Backing up your files")) + { + this.Output.WriteLine(); + this.WriteMessage(tracer, "ERROR: " + errorMessage); + + return false; + } + + return true; + } + + private static string GetBackupDatabasesPath(string backupGvfs) + { + return Path.Combine(backupGvfs, GVFSConstants.DotGVFS.Databases.Name); + } + private bool TryBackupFiles(ITracer tracer, GVFSEnlistment enlistment, string backupRoot) { - string backupSrc = Path.Combine(backupRoot, "src"); + string backupSrc = GetBackupSrcPath(backupRoot); string backupGit = Path.Combine(backupRoot, ".git"); string backupGvfs = Path.Combine(backupRoot, GVFSPlatform.Instance.Constants.DotGVFSRoot); - string backupDatabases = Path.Combine(backupGvfs, GVFSConstants.DotGVFS.Databases.Name); + string backupDatabases = GetBackupDatabasesPath(backupRoot); string errorMessage = string.Empty; if (!this.ShowStatusWhileRunning( @@ -638,6 +768,28 @@ private bool TryBackupFilesInFolder(ITracer tracer, string folderPath, string ba return true; } + private bool TryCopyFilesInFolder(ITracer tracer, string folderPath, string backupPath, string searchPattern, params string[] filenamesToSkip) + { + string errorMessage; + foreach (string file in Directory.GetFiles(folderPath, searchPattern)) + { + string fileName = Path.GetFileName(file); + if (!filenamesToSkip.Any(x => x.Equals(fileName, GVFSPlatform.Instance.Constants.PathComparison))) + { + if (!this.TryIO( + tracer, + () => File.Copy(file, file.Replace(folderPath, backupPath)), + $"Backing up {Path.GetFileName(file)}", + out errorMessage)) + { + return false; + } + } + } + + return true; + } + private bool TryDownloadGitObjects(ITracer tracer, GVFSEnlistment enlistment, RetryConfig retryConfig) { string errorMessage = null; diff --git a/GVFS/GVFS/CommandLine/GVFSVerb.cs b/GVFS/GVFS/CommandLine/GVFSVerb.cs index fe0731a00..c254a92d1 100644 --- a/GVFS/GVFS/CommandLine/GVFSVerb.cs +++ b/GVFS/GVFS/CommandLine/GVFSVerb.cs @@ -36,60 +36,6 @@ public GVFSVerb(bool validateOrigin = true) this.InitializeDefaultParameterValues(); } - [Flags] - private enum GitCoreGVFSFlags - { - // GVFS_SKIP_SHA_ON_INDEX - // Disables the calculation of the sha when writing the index - SkipShaOnIndex = 1 << 0, - - // GVFS_BLOCK_COMMANDS - // Blocks git commands that are not allowed in a GVFS/Scalar repo - BlockCommands = 1 << 1, - - // GVFS_MISSING_OK - // Normally git write-tree ensures that the objects referenced by the - // directory exist in the object database.This option disables this check. - MissingOk = 1 << 2, - - // GVFS_NO_DELETE_OUTSIDE_SPARSECHECKOUT - // When marking entries to remove from the index and the working - // directory this option will take into account what the - // skip-worktree bit was set to so that if the entry has the - // skip-worktree bit set it will not be removed from the working - // directory. This will allow virtualized working directories to - // detect the change to HEAD and use the new commit tree to show - // the files that are in the working directory. - NoDeleteOutsideSparseCheckout = 1 << 3, - - // GVFS_FETCH_SKIP_REACHABILITY_AND_UPLOADPACK - // While performing a fetch with a virtual file system we know - // that there will be missing objects and we don't want to download - // them just because of the reachability of the commits. We also - // don't want to download a pack file with commits, trees, and blobs - // since these will be downloaded on demand. This flag will skip the - // checks on the reachability of objects during a fetch as well as - // the upload pack so that extraneous objects don't get downloaded. - FetchSkipReachabilityAndUploadPack = 1 << 4, - - // 1 << 5 has been deprecated - - // GVFS_BLOCK_FILTERS_AND_EOL_CONVERSIONS - // With a virtual file system we only know the file size before any - // CRLF or smudge/clean filters processing is done on the client. - // To prevent file corruption due to truncation or expansion with - // garbage at the end, these filters must not run when the file - // is first accessed and brought down to the client. Git.exe can't - // currently tell the first access vs subsequent accesses so this - // flag just blocks them from occurring at all. - BlockFiltersAndEolConversions = 1 << 6, - - // GVFS_PREFETCH_DURING_FETCH - // While performing a `git fetch` command, use the gvfs-helper to - // perform a "prefetch" of commits and trees. - PrefetchDuringFetch = 1 << 7, - } - public abstract string EnlistmentRootPathParameter { get; set; } [Option( @@ -157,162 +103,7 @@ public string ServicePipeName public static bool TrySetRequiredGitConfigSettings(Enlistment enlistment) { - string expectedHooksPath = Path.Combine(enlistment.WorkingDirectoryBackingRoot, GVFSConstants.DotGit.Hooks.Root); - expectedHooksPath = Paths.ConvertPathToGitFormat(expectedHooksPath); - - string gitStatusCachePath = null; - if (!GVFSEnlistment.IsUnattended(tracer: null) && GVFSPlatform.Instance.IsGitStatusCacheSupported()) - { - gitStatusCachePath = Path.Combine( - enlistment.EnlistmentRoot, - GVFSPlatform.Instance.Constants.DotGVFSRoot, - GVFSConstants.DotGVFS.GitStatusCache.CachePath); - - gitStatusCachePath = Paths.ConvertPathToGitFormat(gitStatusCachePath); - } - - string coreGVFSFlags = Convert.ToInt32( - GitCoreGVFSFlags.SkipShaOnIndex | - GitCoreGVFSFlags.BlockCommands | - GitCoreGVFSFlags.MissingOk | - GitCoreGVFSFlags.NoDeleteOutsideSparseCheckout | - GitCoreGVFSFlags.FetchSkipReachabilityAndUploadPack | - GitCoreGVFSFlags.BlockFiltersAndEolConversions) - .ToString(); - - // These settings are required for normal GVFS functionality. - // They will override any existing local configuration values. - // - // IMPORTANT! These must parallel the settings in ControlGitRepo:Initialize - // - Dictionary requiredSettings = new Dictionary - { - // When running 'git am' it will remove the CRs from the patch file by default. This causes the patch to fail to apply because the - // file that is getting the patch applied will still have the CRs. There is a --keep-cr option that you can pass the 'git am' command - // but since we always want to keep CRs it is better to just set the config setting to always keep them so the user doesn't have to - // remember to pass the flag. - { "am.keepcr", "true" }, - - // Update git settings to enable optimizations in git 2.20 - // Set 'checkout.optimizeNewBranch=true' to enable optimized 'checkout -b' - { "checkout.optimizenewbranch", "true" }, - - // We don't support line ending conversions - automatic conversion of LF to Crlf by git would cause un-necessary hydration. Disabling it. - { "core.autocrlf", "false" }, - - // Enable commit graph. https://devblogs.microsoft.com/devops/supercharging-the-git-commit-graph/ - { "core.commitGraph", "true" }, - - // Perf - Git for Windows uses this to bulk-read and cache lstat data of entire directories (instead of doing lstat file by file). - { "core.fscache", "true" }, - - // Turns on all special gvfs logic. https://github.com/microsoft/git/blob/be5e0bb969495c428e219091e6976b52fb33b301/gvfs.h - { "core.gvfs", coreGVFSFlags }, - - // Use 'multi-pack-index' builtin instead of 'midx' to match upstream implementation - { "core.multiPackIndex", "true" }, - - // Perf - Enable parallel index preload for operations like git diff - { "core.preloadIndex", "true" }, - - // VFS4G never wants git to adjust line endings (causes un-necessary hydration of files)- explicitly setting core.safecrlf to false. - { "core.safecrlf", "false" }, - - // Possibly cause hydration while creating untrackedCache. - { "core.untrackedCache", "false" }, - - // This is to match what git init does. - { "core.repositoryformatversion", "0" }, - - // Turn on support for file modes on Mac & Linux. - { "core.filemode", GVFSPlatform.Instance.FileSystem.SupportsFileMode ? "true" : "false" }, - - // For consistency with git init. - { "core.bare", "false" }, - - // For consistency with git init. - { "core.logallrefupdates", "true" }, - - // Git to download objects on demand. - { GitConfigSetting.CoreVirtualizeObjectsName, "true" }, - - // Configure hook that git calls to get the paths git needs to consider for changes or untracked files - { GitConfigSetting.CoreVirtualFileSystemName, Paths.ConvertPathToGitFormat(GVFSConstants.DotGit.Hooks.VirtualFileSystemPath) }, - - // Ensure hooks path is configured correctly. - { "core.hookspath", expectedHooksPath }, - - // Hostname is no longer sufficent for VSTS authentication. VSTS now requires dev.azure.com/account to determine the tenant. - // By setting useHttpPath, credential managers will get the path which contains the account as the first parameter. They can then use this information for auth appropriately. - { GitConfigSetting.CredentialUseHttpPath, "true" }, - - // Turn off credential validation(https://github.com/microsoft/Git-Credential-Manager-for-Windows/blob/master/Docs/Configuration.md#validate). - // We already have logic to call git credential if we get back a 401, so there's no need to validate the PAT each time we ask for it. - { "credential.validate", "false" }, - - // This setting is not needed anymore, because current version of gvfs does not use index.lock. - // (This change was introduced initially to prevent `git diff` from acquiring index.lock file.) - // Explicitly setting this to true (which also is the default value) because the repo could have been - // cloned in the past when autoRefreshIndex used to be set to false. - { "diff.autoRefreshIndex", "true" }, - - // In Git 2.24.0, some new config settings were created. Disable them locally in VFS for Git repos in case a user has set them globally. - // https://github.com/microsoft/VFSForGit/pull/1594 - // This applies to feature.manyFiles, feature.experimental and fetch.writeCommitGraph settings. - { "feature.manyFiles", "false" }, - { "feature.experimental", "false" }, - { "fetch.writeCommitGraph", "false" }, - - // Turn off of git garbage collection. Git garbage collection does not work with virtualized object. - // We do run maintenance jobs now that do the packing of loose objects so in theory we shouldn't need - // this - but it is not hurting anything and it will prevent a gc from getting kicked off if for some - // reason the maintenance jobs have not been running and there are too many loose objects - { "gc.auto", "0" }, - - // Prevent git GUI from displaying GC warnings. - { "gui.gcwarning", "false" }, - - // Update git settings to enable optimizations in git 2.20 - // Set 'index.threads=true' to enable multi-threaded index reads - { "index.threads", "true" }, - - // index parsing code in VFSForGit currently only supports version 4. - { "index.version", "4" }, - - // Perf - avoid un-necessary blob downloads during a merge. - { "merge.stat", "false" }, - - // Perf - avoid un-necessary blob downloads while git tries to search and find renamed files. - { "merge.renames", "false" }, - - // Don't use bitmaps to determine pack file contents, because we use MIDX for this. - { "pack.useBitmaps", "false" }, - - // Update Git to include sparse push algorithm - { "pack.useSparse", "true" }, - - // Stop automatic git GC - { "receive.autogc", "false" }, - - // Update git settings to enable optimizations in git 2.20 - // Set 'reset.quiet=true' to speed up 'git reset " - { "reset.quiet", "true" }, - - // Configure git to use our serialize status file - make git use the serialized status file rather than compute the status by - // parsing the index file and going through the files to determine changes. - { "status.deserializePath", gitStatusCachePath }, - - // The GVFS Protocol forbids submodules, so prevent a user's - // global config of "status.submoduleSummary=true" from causing - // extreme slowness in "git status" - { "status.submoduleSummary", "false" }, - - // Generation number v2 isn't ready for full use. Wait for v3. - { "commitGraph.generationVersion", "1" }, - - // Disable the builtin FS Monitor in case it was enabled globally. - { "core.useBuiltinFSMonitor", "false" }, - }; + Dictionary requiredSettings = RequiredGitConfig.GetRequiredSettings(enlistment); if (!TrySetConfig(enlistment, requiredSettings, isRequired: true)) { @@ -429,14 +220,50 @@ protected bool ShowStatusWhileRunning( protected bool TryAuthenticate(ITracer tracer, GVFSEnlistment enlistment, out string authErrorMessage) { - string authError = null; + return this.TryAuthenticateAndQueryGVFSConfig(tracer, enlistment, null, out _, out authErrorMessage); + } + + /// + /// Combines authentication and GVFS config query into a single operation, + /// eliminating a redundant HTTP round-trip. If + /// is null, a default RetryConfig is used. + /// If the config query fails but a valid + /// URL is available, auth succeeds but + /// will be null (caller should handle this gracefully). + /// + protected bool TryAuthenticateAndQueryGVFSConfig( + ITracer tracer, + GVFSEnlistment enlistment, + RetryConfig retryConfig, + out ServerGVFSConfig serverGVFSConfig, + out string errorMessage, + CacheServerInfo fallbackCacheServer = null) + { + ServerGVFSConfig config = null; + string error = null; bool result = this.ShowStatusWhileRunning( - () => enlistment.Authentication.TryInitialize(tracer, enlistment, out authError), + () => enlistment.Authentication.TryInitializeAndQueryGVFSConfig( + tracer, + enlistment, + retryConfig ?? new RetryConfig(), + out config, + out error), "Authenticating", enlistment.EnlistmentRoot); - authErrorMessage = authError; + if (!result && fallbackCacheServer != null && !string.IsNullOrWhiteSpace(fallbackCacheServer.Url)) + { + // Auth/config query failed, but we have a fallback cache server. + // Allow auth to succeed so mount/clone can proceed; config will be null. + tracer.RelatedWarning("Config query failed but continuing with fallback cache server: " + error); + serverGVFSConfig = null; + errorMessage = null; + return true; + } + + serverGVFSConfig = config; + errorMessage = error; return result; } @@ -493,50 +320,7 @@ protected RetryConfig GetRetryConfig(ITracer tracer, GVFSEnlistment enlistment, return retryConfig; } - /// - /// Attempts to query the GVFS config endpoint. If successful, returns the config. - /// If the query fails but a valid fallback cache server URL is available, returns null and continues. - /// (A warning will be logged later.) - /// If the query fails and no valid fallback is available, reports an error and exits. - /// - protected ServerGVFSConfig QueryGVFSConfigWithFallbackCacheServer( - ITracer tracer, - GVFSEnlistment enlistment, - RetryConfig retryConfig, - CacheServerInfo fallbackCacheServer) - { - ServerGVFSConfig serverGVFSConfig = null; - string errorMessage = null; - bool configSuccess = this.ShowStatusWhileRunning( - () => - { - using (ConfigHttpRequestor configRequestor = new ConfigHttpRequestor(tracer, enlistment, retryConfig)) - { - const bool LogErrors = true; - return configRequestor.TryQueryGVFSConfig(LogErrors, out serverGVFSConfig, out _, out errorMessage); - } - }, - "Querying remote for config", - suppressGvfsLogMessage: true); - - if (!configSuccess) - { - // If a valid cache server URL is available, warn and continue - if (fallbackCacheServer != null && !string.IsNullOrWhiteSpace(fallbackCacheServer.Url)) - { - // Continue without config - // Warning will be logged/displayed when version check is run - return null; - } - else - { - this.ReportErrorAndExit(tracer, "Unable to query /gvfs/config" + Environment.NewLine + errorMessage); - } - } - return serverGVFSConfig; - } - - // Restore original QueryGVFSConfig for other callers + // QueryGVFSConfig for callers that require config to succeed (no fallback) protected ServerGVFSConfig QueryGVFSConfig(ITracer tracer, GVFSEnlistment enlistment, RetryConfig retryConfig) { ServerGVFSConfig serverGVFSConfig = null; @@ -886,7 +670,9 @@ private static bool TrySetConfig(Enlistment enlistment, Dictionary folderCountProvider = () => + GVFS.Virtualization.Projection.GitIndexProjection.CountIndexFolders(tracer, enlistment.GitIndexPath); + EnlistmentHydrationSummary summary = EnlistmentHydrationSummary.CreateSummary( + enlistment, this.FileSystem, tracer, folderCountProvider); this.Output.WriteLine(summary.ToMessage()); } + /// + /// Try to get the cached hydration summary from the mount process via named pipe. + /// Returns null if unavailable (GVFS not mounted, no cached value, parse error, timeout). + /// + private string TryGetCachedHydrationMessage(GVFSEnlistment enlistment) + { + const int ConnectTimeoutMs = 500; + const int TotalTimeoutMs = 1000; + + try + { + Task task = Task.Run(() => + { + using (NamedPipeClient pipeClient = new NamedPipeClient(enlistment.NamedPipeName)) + { + if (!pipeClient.Connect(timeoutMilliseconds: ConnectTimeoutMs)) + { + return null; + } + + pipeClient.SendRequest(new NamedPipeMessages.Message(NamedPipeMessages.HydrationStatus.Request, null)); + NamedPipeMessages.Message response = pipeClient.ReadResponse(); + + if (response.Header != NamedPipeMessages.HydrationStatus.SuccessResult + || !NamedPipeMessages.HydrationStatus.Response.TryParse(response.Body, out NamedPipeMessages.HydrationStatus.Response status)) + { + return null; + } + + return status.ToDisplayMessage(); + } + }); + + if (task.Wait(TotalTimeoutMs) && task.Status == TaskStatus.RanToCompletion) + { + return task.Result; + } + + return null; + } + catch (Exception) + { + return null; + } + } + private void PrintOutput(EnlistmentHealthData enlistmentHealthData) { string trackedFilesCountFormatted = enlistmentHealthData.GitTrackedItemsCount.ToString("N0"); diff --git a/GVFS/GVFS/CommandLine/MountVerb.cs b/GVFS/GVFS/CommandLine/MountVerb.cs index 5183ec434..2fa730a8e 100644 --- a/GVFS/GVFS/CommandLine/MountVerb.cs +++ b/GVFS/GVFS/CommandLine/MountVerb.cs @@ -1,15 +1,12 @@ using CommandLine; using GVFS.Common; -using GVFS.Common.FileSystem; -using GVFS.Common.Git; using GVFS.Common.Http; using GVFS.Common.NamedPipes; using GVFS.Common.Tracing; using GVFS.DiskLayoutUpgrades; -using GVFS.Virtualization.Projection; using System; using System.IO; -using System.Security.Principal; +using System.Threading; namespace GVFS.CommandLine { @@ -55,16 +52,58 @@ protected override void PreCreateEnlistment() { string errorMessage; string enlistmentRoot; - if (!GVFSPlatform.Instance.TryGetGVFSEnlistmentRoot(this.EnlistmentRootPathParameter, out enlistmentRoot, out errorMessage)) + + // Always check if the given path is a worktree first, before + // falling back to the standard .gvfs/ walk-up. A worktree dir + // may be under the enlistment tree, so TryGetGVFSEnlistmentRoot + // can succeed by walking up — but we still need worktree-specific handling. + string pathToCheck = string.IsNullOrEmpty(this.EnlistmentRootPathParameter) + ? Environment.CurrentDirectory + : this.EnlistmentRootPathParameter; + + string worktreeError; + GVFSEnlistment.WorktreeInfo wtInfo = GVFSEnlistment.TryGetWorktreeInfo(pathToCheck, out worktreeError); + if (worktreeError != null) { - this.ReportErrorAndExit("Error: '{0}' is not a valid GVFS enlistment", this.EnlistmentRootPathParameter); + this.ReportErrorAndExit("Error: failed to check worktree status for '{0}': {1}", pathToCheck, worktreeError); } - if (!this.SkipMountedCheck) + if (wtInfo?.SharedGitDir != null) + { + // This is a worktree mount request. Find the primary enlistment root. + enlistmentRoot = wtInfo.GetEnlistmentRoot(); + + if (enlistmentRoot == null) + { + this.ReportErrorAndExit("Error: could not determine enlistment root for worktree '{0}'", pathToCheck); + } + + // Check the worktree-specific pipe, not the primary + if (!this.SkipMountedCheck) + { + string worktreePipeName = GVFSPlatform.Instance.GetNamedPipeName(enlistmentRoot) + wtInfo.PipeSuffix; + using (NamedPipeClient pipeClient = new NamedPipeClient(worktreePipeName)) + { + if (pipeClient.Connect(500)) + { + this.ReportErrorAndExit(tracer: null, exitCode: ReturnCode.Success, error: $"The worktree at '{wtInfo.WorktreePath}' is already mounted."); + } + } + } + } + else if (!GVFSPlatform.Instance.TryGetGVFSEnlistmentRoot(this.EnlistmentRootPathParameter, out enlistmentRoot, out errorMessage)) + { + this.ReportErrorAndExit("Error: '{0}' is not a valid GVFS enlistment", this.EnlistmentRootPathParameter); + } + else { - if (this.IsExistingPipeListening(enlistmentRoot)) + // Primary enlistment — check primary pipe as before + if (!this.SkipMountedCheck) { - this.ReportErrorAndExit(tracer: null, exitCode: ReturnCode.Success, error: $"The repo at '{enlistmentRoot}' is already mounted."); + if (this.IsExistingPipeListening(enlistmentRoot)) + { + this.ReportErrorAndExit(tracer: null, exitCode: ReturnCode.Success, error: $"The repo at '{enlistmentRoot}' is already mounted."); + } } } @@ -86,17 +125,11 @@ protected override void Execute(GVFSEnlistment enlistment) string mountExecutableLocation = null; using (JsonTracer tracer = new JsonTracer(GVFSConstants.GVFSEtwProviderName, "ExecuteMount")) { - PhysicalFileSystem fileSystem = new PhysicalFileSystem(); - GitRepo gitRepo = new GitRepo(tracer, enlistment, fileSystem); - GVFSContext context = new GVFSContext(tracer, fileSystem, gitRepo, enlistment); + // Validate these before handing them to the background process + // which cannot tell the user when they are bad + this.ValidateEnumArgs(); - if (!this.SkipInstallHooks && !HooksInstaller.InstallHooks(context, out errorMessage)) - { - this.ReportErrorAndExit("Error installing hooks: " + errorMessage); - } - - var resolvedCacheServer = this.ResolvedCacheServer; - var cacheServerFromConfig = resolvedCacheServer ?? CacheServerResolver.GetCacheServerFromConfig(enlistment); + CacheServerInfo cacheServerFromConfig = CacheServerResolver.GetCacheServerFromConfig(enlistment); tracer.AddLogFileEventListener( GVFSEnlistment.GetNewGVFSLogFileName(enlistment.GVFSLogsRoot, GVFSConstants.LogFileTypes.MountVerb), @@ -133,65 +166,11 @@ protected override void Execute(GVFSEnlistment enlistment) } } - RetryConfig retryConfig = null; - ServerGVFSConfig serverGVFSConfig = this.DownloadedGVFSConfig; - /* If resolved cache server was passed in, we've already checked server config and version check in previous operation. */ - if (resolvedCacheServer == null) + // Verify mount executable exists before launching + mountExecutableLocation = Path.Combine(ProcessHelper.GetCurrentProcessLocation(), GVFSPlatform.Instance.Constants.MountExecutableName); + if (!File.Exists(mountExecutableLocation)) { - string authErrorMessage; - if (!this.TryAuthenticate(tracer, enlistment, out authErrorMessage)) - { - this.Output.WriteLine(" WARNING: " + authErrorMessage); - this.Output.WriteLine(" Mount will proceed, but new files cannot be accessed until GVFS can authenticate."); - } - - if (serverGVFSConfig == null) - { - if (retryConfig == null) - { - retryConfig = this.GetRetryConfig(tracer, enlistment); - } - - serverGVFSConfig = this.QueryGVFSConfigWithFallbackCacheServer( - tracer, - enlistment, - retryConfig, - cacheServerFromConfig); - } - - this.ValidateClientVersions(tracer, enlistment, serverGVFSConfig, showWarnings: true); - - CacheServerResolver cacheServerResolver = new CacheServerResolver(tracer, enlistment); - resolvedCacheServer = cacheServerResolver.ResolveNameFromRemote(cacheServerFromConfig.Url, serverGVFSConfig); - this.Output.WriteLine("Configured cache server: " + cacheServerFromConfig); - } - - this.InitializeLocalCacheAndObjectsPaths(tracer, enlistment, retryConfig, serverGVFSConfig, resolvedCacheServer); - - if (!this.ShowStatusWhileRunning( - () => { return this.PerformPreMountValidation(tracer, enlistment, out mountExecutableLocation, out errorMessage); }, - "Validating repo")) - { - this.ReportErrorAndExit(tracer, errorMessage); - } - - if (!this.SkipVersionCheck) - { - string error; - if (!RepoMetadata.TryInitialize(tracer, enlistment.DotGVFSRoot, out error)) - { - this.ReportErrorAndExit(tracer, error); - } - - try - { - GitProcess git = new GitProcess(enlistment); - this.LogEnlistmentInfoAndSetConfigValues(tracer, git, enlistment); - } - finally - { - RepoMetadata.Shutdown(); - } + this.ReportErrorAndExit(tracer, $"Could not find {GVFSPlatform.Instance.Constants.MountExecutableName}. You may need to reinstall GVFS."); } if (!this.ShowStatusWhileRunning( @@ -219,73 +198,23 @@ protected override void Execute(GVFSEnlistment enlistment) } } } - - private bool PerformPreMountValidation(ITracer tracer, GVFSEnlistment enlistment, out string mountExecutableLocation, out string errorMessage) - { - errorMessage = string.Empty; - mountExecutableLocation = string.Empty; - - // We have to parse these parameters here to make sure they are valid before - // handing them to the background process which cannot tell the user when they are bad - EventLevel verbosity; - Keywords keywords; - this.ParseEnumArgs(out verbosity, out keywords); - - mountExecutableLocation = Path.Combine(ProcessHelper.GetCurrentProcessLocation(), GVFSPlatform.Instance.Constants.MountExecutableName); - if (!File.Exists(mountExecutableLocation)) - { - errorMessage = $"Could not find {GVFSPlatform.Instance.Constants.MountExecutableName}. You may need to reinstall GVFS."; - return false; - } - - GitProcess git = new GitProcess(enlistment); - if (!git.IsValidRepo()) - { - errorMessage = "The .git folder is missing or has invalid contents"; - return false; - } - - try - { - GitIndexProjection.ReadIndex(tracer, Path.Combine(enlistment.WorkingDirectoryBackingRoot, GVFSConstants.DotGit.Index)); - } - catch (Exception e) - { - EventMetadata metadata = new EventMetadata(); - metadata.Add("Exception", e.ToString()); - tracer.RelatedError(metadata, "Index validation failed"); - errorMessage = "Index validation failed, run 'gvfs repair' to repair index."; - - return false; - } - - if (!GVFSPlatform.Instance.FileSystem.IsFileSystemSupported(enlistment.EnlistmentRoot, out string error)) - { - errorMessage = $"FileSystem unsupported: {error}"; - return false; - } - - return true; - } - private bool TryMount(ITracer tracer, GVFSEnlistment enlistment, string mountExecutableLocation, out string errorMessage) { - if (!GVFSVerb.TrySetRequiredGitConfigSettings(enlistment)) - { - errorMessage = "Unable to configure git repo"; - return false; - } - const string ParamPrefix = "--"; - tracer.RelatedInfo($"{nameof(this.TryMount)}: Launching background process('{mountExecutableLocation}') for {enlistment.EnlistmentRoot}"); + // For worktrees, pass the worktree path so GVFS.Mount.exe creates the right enlistment + string mountPath = enlistment.IsWorktree + ? enlistment.WorkingDirectoryRoot + : enlistment.EnlistmentRoot; + + tracer.RelatedInfo($"{nameof(this.TryMount)}: Launching background process('{mountExecutableLocation}') for {mountPath}"); GVFSPlatform.Instance.StartBackgroundVFS4GProcess( tracer, mountExecutableLocation, new[] { - enlistment.EnlistmentRoot, + mountPath, ParamPrefix + GVFSConstants.VerbParameters.Mount.Verbosity, this.Verbosity, ParamPrefix + GVFSConstants.VerbParameters.Mount.Keywords, @@ -297,7 +226,8 @@ private bool TryMount(ITracer tracer, GVFSEnlistment enlistment, string mountExe }); tracer.RelatedInfo($"{nameof(this.TryMount)}: Waiting for repo to be mounted"); - return GVFSEnlistment.WaitUntilMounted(tracer, enlistment.EnlistmentRoot, this.Unattended, out errorMessage); + + return GVFSEnlistment.WaitUntilMounted(tracer, enlistment.NamedPipeName, enlistment.EnlistmentRoot, this.Unattended, out errorMessage); } private bool RegisterMount(GVFSEnlistment enlistment, out string errorMessage) @@ -305,7 +235,12 @@ private bool RegisterMount(GVFSEnlistment enlistment, out string errorMessage) errorMessage = string.Empty; NamedPipeMessages.RegisterRepoRequest request = new NamedPipeMessages.RegisterRepoRequest(); - request.EnlistmentRoot = enlistment.EnlistmentRoot; + + // Worktree mounts register with their worktree path so they can be + // listed and unregistered independently of the primary enlistment. + request.EnlistmentRoot = enlistment.IsWorktree + ? enlistment.WorkingDirectoryRoot + : enlistment.EnlistmentRoot; request.OwnerSID = GVFSPlatform.Instance.GetCurrentUser(); @@ -355,14 +290,14 @@ private bool RegisterMount(GVFSEnlistment enlistment, out string errorMessage) } } - private void ParseEnumArgs(out EventLevel verbosity, out Keywords keywords) + private void ValidateEnumArgs() { - if (!Enum.TryParse(this.KeywordsCsv, out keywords)) + if (!Enum.TryParse(this.KeywordsCsv, out Keywords _)) { this.ReportErrorAndExit("Error: Invalid logging filter keywords: " + this.KeywordsCsv); } - if (!Enum.TryParse(this.Verbosity, out verbosity)) + if (!Enum.TryParse(this.Verbosity, out EventLevel _)) { this.ReportErrorAndExit("Error: Invalid logging verbosity: " + this.Verbosity); } diff --git a/GVFS/GVFS/CommandLine/PrefetchVerb.cs b/GVFS/GVFS/CommandLine/PrefetchVerb.cs index ab72b5e9f..1dd31b3b1 100644 --- a/GVFS/GVFS/CommandLine/PrefetchVerb.cs +++ b/GVFS/GVFS/CommandLine/PrefetchVerb.cs @@ -243,23 +243,23 @@ private void InitializeServerConnection( // If ResolvedCacheServer is set, then we have already tried querying the server config and checking versions. if (resolvedCacheServer == null) { - string authErrorMessage; - if (!this.TryAuthenticate(tracer, enlistment, out authErrorMessage)) - { - this.ReportErrorAndExit(tracer, "Unable to prefetch because authentication failed: " + authErrorMessage); - } - - CacheServerResolver cacheServerResolver = new CacheServerResolver(tracer, enlistment); - if (serverGVFSConfig == null) { - serverGVFSConfig = this.QueryGVFSConfigWithFallbackCacheServer( + string authErrorMessage; + if (!this.TryAuthenticateAndQueryGVFSConfig( tracer, enlistment, retryConfig, - cacheServerFromConfig); + out serverGVFSConfig, + out authErrorMessage, + fallbackCacheServer: cacheServerFromConfig)) + { + this.ReportErrorAndExit(tracer, "Unable to prefetch because authentication failed: " + authErrorMessage); + } } + CacheServerResolver cacheServerResolver = new CacheServerResolver(tracer, enlistment); + resolvedCacheServer = cacheServerResolver.ResolveNameFromRemote(cacheServerFromConfig.Url, serverGVFSConfig); if (!this.SkipVersionCheck) diff --git a/GVFS/GVFS/CommandLine/UnmountVerb.cs b/GVFS/GVFS/CommandLine/UnmountVerb.cs index 84d7fe3d9..eebb4a3b1 100644 --- a/GVFS/GVFS/CommandLine/UnmountVerb.cs +++ b/GVFS/GVFS/CommandLine/UnmountVerb.cs @@ -36,22 +36,58 @@ public override void Execute() { this.ValidatePathParameter(this.EnlistmentRootPathParameter); - string errorMessage; + string errorMessage = null; string root; - if (!GVFSPlatform.Instance.TryGetGVFSEnlistmentRoot(this.EnlistmentRootPathParameter, out root, out errorMessage)) + string pipeName; + + // Check for worktree first — a worktree path will walk up + // to find the primary .gvfs/ but needs its own pipe name. + string pathToCheck = string.IsNullOrEmpty(this.EnlistmentRootPathParameter) + ? System.Environment.CurrentDirectory + : this.EnlistmentRootPathParameter; + + string registrationPath; + string worktreeError; + GVFSEnlistment.WorktreeInfo wtInfo = GVFSEnlistment.TryGetWorktreeInfo(pathToCheck, out worktreeError); + if (worktreeError != null) + { + this.ReportErrorAndExit("Error: failed to check worktree status for '{0}': {1}", pathToCheck, worktreeError); + } + + if (wtInfo?.SharedGitDir != null) + { + root = wtInfo.GetEnlistmentRoot(); + if (root == null) + { + this.ReportErrorAndExit("Error: could not determine enlistment root for worktree '{0}'", pathToCheck); + } + + pipeName = GVFSPlatform.Instance.GetNamedPipeName(root) + wtInfo.PipeSuffix; + + // Worktree mounts register with their worktree path, + // so unregister with the same path — not the primary root. + registrationPath = wtInfo.WorktreePath; + } + else if (!GVFSPlatform.Instance.TryGetGVFSEnlistmentRoot(this.EnlistmentRootPathParameter, out root, out errorMessage)) { this.ReportErrorAndExit( "Error: '{0}' is not a valid GVFS enlistment", this.EnlistmentRootPathParameter); + return; + } + else + { + pipeName = GVFSPlatform.Instance.GetNamedPipeName(root); + registrationPath = root; } if (!this.SkipLock) { - this.AcquireLock(root); + this.AcquireLock(pipeName, root); } if (!this.ShowStatusWhileRunning( - () => { return this.Unmount(root, out errorMessage); }, + () => { return this.Unmount(pipeName, out errorMessage); }, "Unmounting")) { this.ReportErrorAndExit(errorMessage); @@ -60,7 +96,7 @@ public override void Execute() if (!this.Unattended && !this.SkipUnregister) { if (!this.ShowStatusWhileRunning( - () => { return this.UnregisterRepo(root, out errorMessage); }, + () => { return this.UnregisterRepo(registrationPath, out errorMessage); }, "Unregistering automount")) { this.Output.WriteLine(" WARNING: " + errorMessage); @@ -68,11 +104,9 @@ public override void Execute() } } - private bool Unmount(string enlistmentRoot, out string errorMessage) + private bool Unmount(string pipeName, out string errorMessage) { errorMessage = string.Empty; - - string pipeName = GVFSPlatform.Instance.GetNamedPipeName(enlistmentRoot); string rawGetStatusResponse = string.Empty; try @@ -197,9 +231,8 @@ private bool UnregisterRepo(string rootPath, out string errorMessage) } } - private void AcquireLock(string enlistmentRoot) + private void AcquireLock(string pipeName, string enlistmentRoot) { - string pipeName = GVFSPlatform.Instance.GetNamedPipeName(enlistmentRoot); using (NamedPipeClient pipeClient = new NamedPipeClient(pipeName)) { try diff --git a/GVFS/GVFS/Program.cs b/GVFS/GVFS/Program.cs index c8fba0235..81d712d52 100644 --- a/GVFS/GVFS/Program.cs +++ b/GVFS/GVFS/Program.cs @@ -22,6 +22,7 @@ public static void Main(string[] args) Type[] verbTypes = new Type[] { typeof(CacheServerVerb), + typeof(CacheVerb), typeof(CloneVerb), typeof(ConfigVerb), typeof(DehydrateVerb), diff --git a/scripts/RunFunctionalTests-Dev.ps1 b/scripts/RunFunctionalTests-Dev.ps1 new file mode 100644 index 000000000..3fe4ba540 --- /dev/null +++ b/scripts/RunFunctionalTests-Dev.ps1 @@ -0,0 +1,125 @@ +<# +.SYNOPSIS + Runs GVFS functional tests in dev mode (no admin, no install required). + +.DESCRIPTION + Runs GVFS.FunctionalTests.exe using build output from out\ instead of + requiring a system-wide GVFS installation. The test harness launches a + test service as a console process (not a Windows service), so no admin + privileges are required. + + After the test process exits, any GVFS.Service.exe child processes it + spawned are killed by PID. This is safe for concurrent runs — each + invocation only cleans up its own child processes. + +.PARAMETER Configuration + Build configuration: Debug (default) or Release. + +.PARAMETER ExtraArgs + Additional arguments passed through to GVFS.FunctionalTests.exe + (e.g. --test=GVFS.FunctionalTests.Tests.GVFSVerbTests.UnknownVerb) + +.EXAMPLE + .\RunFunctionalTests-Dev.ps1 + .\RunFunctionalTests-Dev.ps1 -Configuration Release + .\RunFunctionalTests-Dev.ps1 -ExtraArgs "--test=GVFS.FunctionalTests.Tests.GVFSVerbTests.UnknownVerb" + .\RunFunctionalTests-Dev.ps1 Debug --test=GVFS.FunctionalTests.Tests.EnlistmentPerFixture.WorktreeTests +#> +param( + [string]$Configuration = "Debug", + [Parameter(ValueFromRemainingArguments)] + [string[]]$ExtraArgs +) + +$ErrorActionPreference = "Stop" + +# Resolve paths (mirrors InitializeEnvironment.bat) +$scriptsDir = $PSScriptRoot +$srcDir = Split-Path $scriptsDir -Parent +$enlistmentDir = Split-Path $srcDir -Parent +$outDir = Join-Path $enlistmentDir "out" + +# Dev mode environment +$env:GVFS_FUNCTIONAL_TEST_DEV_MODE = "1" +$env:GVFS_DEV_OUT_DIR = $outDir +$env:GVFS_DEV_CONFIGURATION = $Configuration + +# Derive a unique service name from the enlistment path so concurrent runs +# from different working directories don't collide on the named pipe. +$hash = [System.BitConverter]::ToString( + [System.Security.Cryptography.SHA256]::Create().ComputeHash( + [System.Text.Encoding]::UTF8.GetBytes($enlistmentDir.ToLowerInvariant()) + ) +).Replace("-","").Substring(0,8) +$env:GVFS_TEST_SERVICE_NAME = "Test.GVFS.Service.$hash.$PID" + +# Isolate test data per enlistment and run +$env:GVFS_TEST_DATA = Join-Path $env:TEMP "GVFS-FunctionalTest-$hash.$PID" +$env:GVFS_COMMON_APPDATA_ROOT = Join-Path $env:GVFS_TEST_DATA "AppData" +$env:GVFS_SECURE_DATA_ROOT = Join-Path $env:GVFS_TEST_DATA "ProgramData" + +# Put build output gvfs.exe on PATH +$payloadDir = Join-Path $outDir "GVFS.Payload\bin\$Configuration\win-x64" +$env:PATH = "$payloadDir;C:\Program Files\Git\cmd;$env:PATH" + +Write-Host "============================================" +Write-Host "GVFS Functional Tests - Dev Mode (no admin)" +Write-Host "============================================" +Write-Host "Configuration: $Configuration" +Write-Host "Build output: $outDir" +Write-Host "Test service: $env:GVFS_TEST_SERVICE_NAME" +Write-Host "Test data: $env:GVFS_TEST_DATA" +Write-Host "" + +# Validate prerequisites +$gvfsPath = Get-Command gvfs -ErrorAction SilentlyContinue +if (-not $gvfsPath) { + Write-Error "Unable to locate gvfs on the PATH. Has the solution been built?" + exit 1 +} +Write-Host "gvfs location: $($gvfsPath.Source)" + +$gitPath = Get-Command git -ErrorAction SilentlyContinue +if (-not $gitPath) { + Write-Error "Unable to locate git on the PATH." + exit 1 +} +Write-Host "git location: $($gitPath.Source)" +Write-Host "" + +# Build test exe path +$testExe = Join-Path $outDir "GVFS.FunctionalTests\bin\$Configuration\net471\win-x64\GVFS.FunctionalTests.exe" +if (-not (Test-Path $testExe)) { + Write-Error "Test executable not found: $testExe`nRun Build.bat first." + exit 1 +} + +# Build arguments +$testArgs = @("/result:$(Join-Path $enlistmentDir 'TestResult.xml')") +if ($ExtraArgs) { $testArgs += $ExtraArgs } + +Write-Host "Running: $testExe" +Write-Host " Args: $($testArgs -join ' ')" +Write-Host "" + +# Start the test process and track its PID +$testProc = Start-Process -FilePath $testExe -ArgumentList $testArgs ` + -NoNewWindow -PassThru + +try { + $testProc.WaitForExit() +} +finally { + # Kill any GVFS.Service.exe that was spawned by our test process. + # ParentProcessId is set at creation time and doesn't change when the + # parent exits, so this works even after GVFS.FunctionalTests.exe is gone. + $orphans = Get-CimInstance Win32_Process -Filter ` + "Name = 'GVFS.Service.exe' AND ParentProcessId = $($testProc.Id)" ` + -ErrorAction SilentlyContinue + foreach ($orphan in $orphans) { + Write-Host "Cleaning up test service process (PID $($orphan.ProcessId))..." + Stop-Process -Id $orphan.ProcessId -Force -ErrorAction SilentlyContinue + } +} + +exit $testProc.ExitCode