From 236ebf363c68e2a4369f73e90480f2341e53e3a7 Mon Sep 17 00:00:00 2001 From: Adit Chandra Date: Wed, 13 May 2026 02:15:29 -0400 Subject: [PATCH 1/4] feat(core): resolve exact source sides for expansion Attach per-file source fetchers from explicit Git refs, index, worktree, file-pair, and untracked endpoints so expanded context never guesses from the wrong revision. --- src/core/fileSource.test.ts | 193 +++++++++++++++++++++++++ src/core/fileSource.ts | 153 ++++++++++++++++++++ src/core/git.test.ts | 229 +++++++++++++++++++++++++++++- src/core/git.ts | 201 +++++++++++++++++++++++++- src/core/loaders.test.ts | 275 ++++++++++++++++++++++++++++++++++++ src/core/loaders.ts | 129 ++++++++++++++++- src/core/types.ts | 4 + 7 files changed, 1178 insertions(+), 6 deletions(-) create mode 100644 src/core/fileSource.test.ts create mode 100644 src/core/fileSource.ts diff --git a/src/core/fileSource.test.ts b/src/core/fileSource.test.ts new file mode 100644 index 00000000..48523415 --- /dev/null +++ b/src/core/fileSource.test.ts @@ -0,0 +1,193 @@ +import { afterEach, describe, expect, test } from "bun:test"; +import { mkdtempSync, rmSync, writeFileSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { createFileSourceFetcher } from "./fileSource"; + +const tempDirs: string[] = []; + +function createTempDir(prefix: string) { + const dir = mkdtempSync(join(tmpdir(), prefix)); + tempDirs.push(dir); + return dir; +} + +function git(cwd: string, ...cmd: string[]) { + const proc = Bun.spawnSync(["git", ...cmd], { + cwd, + stdout: "pipe", + stderr: "pipe", + stdin: "ignore", + }); + + if (proc.exitCode !== 0) { + const stderr = Buffer.from(proc.stderr).toString("utf8"); + throw new Error(stderr.trim() || `git ${cmd.join(" ")} failed`); + } + + return Buffer.from(proc.stdout).toString("utf8"); +} + +function createTempRepo(prefix: string) { + const dir = createTempDir(prefix); + git(dir, "init"); + git(dir, "config", "user.name", "Test User"); + git(dir, "config", "user.email", "test@example.com"); + git(dir, "config", "commit.gpgSign", "false"); + return dir; +} + +/** Capture console.error calls while exercising diagnostic paths. */ +async function captureConsoleErrors(fn: () => Promise) { + const originalConsoleError = console.error; + const loggedErrors: unknown[][] = []; + console.error = (...args: unknown[]) => { + loggedErrors.push(args); + }; + + try { + await fn(); + } finally { + console.error = originalConsoleError; + } + + return loggedErrors; +} + +afterEach(() => { + while (tempDirs.length > 0) { + const dir = tempDirs.pop(); + if (dir) { + rmSync(dir, { recursive: true, force: true }); + } + } +}); + +describe("createFileSourceFetcher", () => { + test("reads fs paths for old and new sides", async () => { + const dir = createTempDir("hunk-source-fs-"); + const left = join(dir, "before.txt"); + const right = join(dir, "after.txt"); + writeFileSync(left, "old contents\n"); + writeFileSync(right, "new contents\n"); + + const fetcher = createFileSourceFetcher({ + old: { kind: "fs", absolutePath: left }, + new: { kind: "fs", absolutePath: right }, + }); + + expect(await fetcher.getFullText("old")).toBe("old contents\n"); + expect(await fetcher.getFullText("new")).toBe("new contents\n"); + }); + + test("returns null for `none` specs", async () => { + const fetcher = createFileSourceFetcher({ + old: { kind: "none" }, + new: { kind: "none" }, + }); + + expect(await fetcher.getFullText("old")).toBeNull(); + expect(await fetcher.getFullText("new")).toBeNull(); + }); + + test("returns null when an fs path cannot be read", async () => { + const dir = createTempDir("hunk-source-fs-missing-"); + const fetcher = createFileSourceFetcher({ + old: { kind: "fs", absolutePath: join(dir, "missing.txt") }, + new: { kind: "none" }, + }); + + expect(await fetcher.getFullText("old")).toBeNull(); + }); + + test("reads git blob contents for both sides via `git show`", async () => { + const repoRoot = createTempRepo("hunk-source-git-"); + const filePath = "note.txt"; + + writeFileSync(join(repoRoot, filePath), "first revision\n"); + git(repoRoot, "add", "."); + git(repoRoot, "commit", "-m", "first"); + writeFileSync(join(repoRoot, filePath), "second revision\n"); + git(repoRoot, "add", "."); + git(repoRoot, "commit", "-m", "second"); + + const fetcher = createFileSourceFetcher({ + old: { kind: "git-blob", repoRoot, ref: "HEAD~1", path: filePath }, + new: { kind: "git-blob", repoRoot, ref: "HEAD", path: filePath }, + }); + + expect(await fetcher.getFullText("old")).toBe("first revision\n"); + expect(await fetcher.getFullText("new")).toBe("second revision\n"); + }); + + test("reads git index contents through an explicit index spec", async () => { + const repoRoot = createTempRepo("hunk-source-git-index-"); + const filePath = "note.txt"; + + writeFileSync(join(repoRoot, filePath), "committed\n"); + git(repoRoot, "add", "."); + git(repoRoot, "commit", "-m", "first"); + writeFileSync(join(repoRoot, filePath), "staged\n"); + git(repoRoot, "add", filePath); + writeFileSync(join(repoRoot, filePath), "working tree\n"); + + const fetcher = createFileSourceFetcher({ + old: { kind: "git-index", repoRoot, path: filePath }, + new: { kind: "fs", absolutePath: join(repoRoot, filePath) }, + }); + + expect(await fetcher.getFullText("old")).toBe("staged\n"); + expect(await fetcher.getFullText("new")).toBe("working tree\n"); + }); + + test("returns null when a git blob cannot be resolved", async () => { + const repoRoot = createTempRepo("hunk-source-git-missing-"); + writeFileSync(join(repoRoot, "tracked.txt"), "x\n"); + git(repoRoot, "add", "."); + git(repoRoot, "commit", "-m", "first"); + + const fetcher = createFileSourceFetcher({ + old: { kind: "git-blob", repoRoot, ref: "HEAD", path: "missing-from-history.txt" }, + new: { kind: "none" }, + }); + + const loggedErrors = await captureConsoleErrors(async () => { + expect(await fetcher.getFullText("old")).toBeNull(); + }); + expect(loggedErrors).toHaveLength(0); + }); + + test("logs unexpected git source failures with object context", async () => { + const repoRoot = createTempDir("hunk-source-git-not-repo-"); + const fetcher = createFileSourceFetcher({ + old: { kind: "git-blob", repoRoot, ref: "HEAD", path: "note.txt" }, + new: { kind: "none" }, + }); + + const loggedErrors = await captureConsoleErrors(async () => { + expect(await fetcher.getFullText("old")).toBeNull(); + }); + + expect(loggedErrors).toHaveLength(1); + expect(String(loggedErrors[0]?.[0])).toContain("HEAD:note.txt"); + expect(String(loggedErrors[0]?.[0])).toContain(repoRoot); + }); + + test("caches resolved text per side", async () => { + const dir = createTempDir("hunk-source-cache-"); + const target = join(dir, "value.txt"); + writeFileSync(target, "first\n"); + + const fetcher = createFileSourceFetcher({ + old: { kind: "none" }, + new: { kind: "fs", absolutePath: target }, + }); + + const initial = await fetcher.getFullText("new"); + writeFileSync(target, "rewritten\n"); + const cached = await fetcher.getFullText("new"); + + expect(initial).toBe("first\n"); + expect(cached).toBe("first\n"); + }); +}); diff --git a/src/core/fileSource.ts b/src/core/fileSource.ts new file mode 100644 index 00000000..8d5addf7 --- /dev/null +++ b/src/core/fileSource.ts @@ -0,0 +1,153 @@ +/** + * Resolve full file contents for one diff file across input modes. + * + * Each `DiffFile` may carry a `FileSourceFetcher` that knows how to read the + * file's "old" and "new" sides without re-running the original diff. Returns + * `null` when source content is unreachable. + */ + +export type FileSourceSpec = + | { kind: "none" } + | { kind: "fs"; absolutePath: string } + | { kind: "git-blob"; repoRoot: string; ref: string; path: string } + | { kind: "git-index"; repoRoot: string; path: string }; + +export type FileSourceSide = "old" | "new"; + +export interface FileSourceFetcher { + /** + * Returns the file's full source text on the requested side, or `null` when + * the side is not reachable (deleted side, missing path, git error). Built-in + * fetchers resolve `null` instead of rejecting, but UI callers still handle + * custom fetcher rejection defensively. + */ + getFullText(side: FileSourceSide): Promise; +} + +interface ResolvedSpecs { + old: FileSourceSpec; + new: FileSourceSpec; +} + +/** Return the first useful diagnostic line from a failed source read. */ +function firstDiagnosticLine(text: string) { + return text + .split("\n") + .map((line) => line.trim()) + .find(Boolean); +} + +/** Keep source-load diagnostics terse enough to be useful in logs. */ +function logSourceDiagnostic(message: string, detail?: unknown) { + if (detail instanceof Error) { + console.error(`hunk: ${message}: ${detail.message}`, detail); + return; + } + + const detailText = typeof detail === "string" ? firstDiagnosticLine(detail) : undefined; + console.error(detailText ? `hunk: ${message}: ${detailText}` : `hunk: ${message}`); +} + +/** Return whether a Git failure is an expected missing source side/path. */ +function isExpectedMissingGitSource(stderr: string) { + const normalized = stderr.toLowerCase(); + return [ + "exists on disk, but not in", + "does not exist in", + "invalid object name", + "needed a single revision", + "unknown revision or path not in the working tree", + ].some((fragment) => normalized.includes(fragment)); +} + +async function readFsSpec(spec: Extract): Promise { + try { + const file = Bun.file(spec.absolutePath); + if (!(await file.exists())) { + return null; + } + + return await file.text(); + } catch (error) { + logSourceDiagnostic(`failed to read source file ${spec.absolutePath}`, error); + return null; + } +} + +function readGitBlobSpec( + spec: Extract, + gitExecutable = "git", +): string | null { + return readGitObjectSpec(spec.repoRoot, `${spec.ref}:${spec.path}`, gitExecutable); +} + +function readGitIndexSpec( + spec: Extract, + gitExecutable = "git", +): string | null { + return readGitObjectSpec(spec.repoRoot, `:${spec.path}`, gitExecutable); +} + +/** Read a blob-like Git object spec such as `HEAD:path` or `:path`. */ +function readGitObjectSpec( + repoRoot: string, + objectName: string, + gitExecutable = "git", +): string | null { + let proc: ReturnType; + + try { + proc = Bun.spawnSync([gitExecutable, "show", objectName], { + cwd: repoRoot, + stdin: "ignore", + stdout: "pipe", + stderr: "pipe", + }); + } catch (error) { + logSourceDiagnostic(`failed to run Git while reading source ${objectName}`, error); + return null; + } + + if (proc.exitCode !== 0) { + const stderr = Buffer.from(proc.stderr ?? []).toString("utf8"); + if (!isExpectedMissingGitSource(stderr)) { + logSourceDiagnostic(`failed to read Git source ${objectName} in ${repoRoot}`, stderr); + } + return null; + } + + return Buffer.from(proc.stdout ?? []).toString("utf8"); +} + +async function readSpec(spec: FileSourceSpec): Promise { + if (spec.kind === "none") { + return null; + } + + if (spec.kind === "fs") { + return readFsSpec(spec); + } + + if (spec.kind === "git-index") { + return readGitIndexSpec(spec); + } + + return readGitBlobSpec(spec); +} + +/** Build a per-file source fetcher that caches each side's resolved text. */ +export function createFileSourceFetcher(specs: ResolvedSpecs): FileSourceFetcher { + const cache = new Map(); + + return { + async getFullText(side) { + if (cache.has(side)) { + return cache.get(side) ?? null; + } + + const text = await readSpec(specs[side]); + cache.set(side, text); + return text; + }, + }; +} diff --git a/src/core/git.test.ts b/src/core/git.test.ts index 45713747..2eefacad 100644 --- a/src/core/git.test.ts +++ b/src/core/git.test.ts @@ -1,6 +1,55 @@ -import { describe, expect, test } from "bun:test"; -import { buildGitStashShowArgs, runGitText } from "./git"; +import { afterEach, describe, expect, test } from "bun:test"; +import { mkdtempSync, rmSync, writeFileSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { buildGitStashShowArgs, resolveGitDiffEndpoints, runGitText } from "./git"; +import type { VcsCommandInput } from "./types"; +const tempDirs: string[] = []; + +function git(cwd: string, ...cmd: string[]) { + const proc = Bun.spawnSync(["git", ...cmd], { + cwd, + stdout: "pipe", + stderr: "pipe", + stdin: "ignore", + }); + + if (proc.exitCode !== 0) { + const stderr = Buffer.from(proc.stderr).toString("utf8"); + throw new Error(stderr.trim() || `git ${cmd.join(" ")} failed`); + } + + return Buffer.from(proc.stdout).toString("utf8"); +} + +function createTempRepo(prefix: string) { + const dir = mkdtempSync(join(tmpdir(), prefix)); + tempDirs.push(dir); + git(dir, "init"); + git(dir, "config", "user.name", "Test User"); + git(dir, "config", "user.email", "test@example.com"); + git(dir, "config", "commit.gpgSign", "false"); + return dir; +} + +function makeGitInput(overrides: Partial = {}): VcsCommandInput { + return { + kind: "vcs", + staged: false, + options: { mode: "auto" }, + ...overrides, + }; +} + +afterEach(() => { + while (tempDirs.length > 0) { + const dir = tempDirs.pop(); + if (dir) { + rmSync(dir, { recursive: true, force: true }); + } + } +}); describe("git command helpers", () => { test("disables external diff tools for stash patches", () => { const args = buildGitStashShowArgs({ @@ -27,3 +76,179 @@ describe("git command helpers", () => { ); }); }); + +describe("resolveGitDiffEndpoints", () => { + test("staged diffs compare HEAD against the index", () => { + const repoRoot = createTempRepo("hunk-endpoints-staged-"); + writeFileSync(join(repoRoot, "x.txt"), "x\n"); + git(repoRoot, "add", "x.txt"); + git(repoRoot, "commit", "-m", "initial"); + const headSha = git(repoRoot, "rev-parse", "HEAD").trim(); + + expect( + resolveGitDiffEndpoints(makeGitInput({ staged: true }), { cwd: repoRoot, repoRoot }), + ).toEqual({ old: { kind: "git-ref", ref: headSha }, new: { kind: "index" } }); + }); + + test("staged diffs in an unborn repo compare missing old source against the index", () => { + const repoRoot = createTempRepo("hunk-endpoints-staged-unborn-"); + writeFileSync(join(repoRoot, "x.txt"), "x\n"); + git(repoRoot, "add", "x.txt"); + + expect( + resolveGitDiffEndpoints(makeGitInput({ staged: true }), { cwd: repoRoot, repoRoot }), + ).toEqual({ old: { kind: "none" }, new: { kind: "index" } }); + }); + + test("staged diffs against an explicit ref compare that ref against the index", () => { + const repoRoot = createTempRepo("hunk-endpoints-staged-ref-"); + writeFileSync(join(repoRoot, "x.txt"), "first\n"); + git(repoRoot, "add", "x.txt"); + git(repoRoot, "commit", "-m", "first"); + const firstSha = git(repoRoot, "rev-parse", "HEAD").trim(); + + writeFileSync(join(repoRoot, "x.txt"), "second\n"); + git(repoRoot, "add", "x.txt"); + git(repoRoot, "commit", "-m", "second"); + + writeFileSync(join(repoRoot, "x.txt"), "staged\n"); + git(repoRoot, "add", "x.txt"); + + expect( + resolveGitDiffEndpoints(makeGitInput({ staged: true, range: firstSha }), { + cwd: repoRoot, + repoRoot, + }), + ).toEqual({ old: { kind: "git-ref", ref: firstSha }, new: { kind: "index" } }); + }); + + test("no range diffs the index against the working tree", () => { + const repoRoot = createTempRepo("hunk-endpoints-no-range-"); + expect(resolveGitDiffEndpoints(makeGitInput(), { cwd: repoRoot, repoRoot })).toEqual({ + old: { kind: "index" }, + new: { kind: "worktree" }, + }); + }); + + test("a single rev compares that rev against the working tree", () => { + const repoRoot = createTempRepo("hunk-endpoints-single-rev-"); + writeFileSync(join(repoRoot, "x.txt"), "first\n"); + git(repoRoot, "add", "x.txt"); + git(repoRoot, "commit", "-m", "first"); + const headSha = git(repoRoot, "rev-parse", "HEAD").trim(); + + const endpoints = resolveGitDiffEndpoints(makeGitInput({ range: "HEAD" }), { + cwd: repoRoot, + repoRoot, + }); + + expect(endpoints).not.toBeNull(); + expect(endpoints!.new).toEqual({ kind: "worktree" }); + expect(endpoints!.old).toEqual({ kind: "git-ref", ref: headSha }); + }); + + test("two-dot ranges resolve to oldRef..newRef", () => { + const repoRoot = createTempRepo("hunk-endpoints-two-dot-"); + writeFileSync(join(repoRoot, "x.txt"), "first\n"); + git(repoRoot, "add", "x.txt"); + git(repoRoot, "commit", "-m", "first"); + const firstSha = git(repoRoot, "rev-parse", "HEAD").trim(); + + writeFileSync(join(repoRoot, "x.txt"), "second\n"); + git(repoRoot, "add", "x.txt"); + git(repoRoot, "commit", "-m", "second"); + const secondSha = git(repoRoot, "rev-parse", "HEAD").trim(); + + const endpoints = resolveGitDiffEndpoints( + makeGitInput({ range: `${firstSha}..${secondSha}` }), + { cwd: repoRoot, repoRoot }, + ); + + expect(endpoints).toEqual({ + old: { kind: "git-ref", ref: firstSha }, + new: { kind: "git-ref", ref: secondSha }, + }); + }); + + test("rev^! resolves to the commit's parent..commit pair", () => { + const repoRoot = createTempRepo("hunk-endpoints-bang-"); + writeFileSync(join(repoRoot, "x.txt"), "first\n"); + git(repoRoot, "add", "x.txt"); + git(repoRoot, "commit", "-m", "first"); + const firstSha = git(repoRoot, "rev-parse", "HEAD").trim(); + + writeFileSync(join(repoRoot, "x.txt"), "second\n"); + git(repoRoot, "add", "x.txt"); + git(repoRoot, "commit", "-m", "second"); + const secondSha = git(repoRoot, "rev-parse", "HEAD").trim(); + + const endpoints = resolveGitDiffEndpoints(makeGitInput({ range: "HEAD^!" }), { + cwd: repoRoot, + repoRoot, + }); + + expect(endpoints).toEqual({ + old: { kind: "git-ref", ref: firstSha }, + new: { kind: "git-ref", ref: secondSha }, + }); + }); + + test("symmetric difference (A...B) resolves to merge-base(A, B) on the old side and B on the new side", () => { + const repoRoot = createTempRepo("hunk-endpoints-three-dot-"); + writeFileSync(join(repoRoot, "x.txt"), "base\n"); + git(repoRoot, "add", "x.txt"); + git(repoRoot, "commit", "-m", "base"); + const baseBranch = git(repoRoot, "rev-parse", "--abbrev-ref", "HEAD").trim(); + const baseSha = git(repoRoot, "rev-parse", "HEAD").trim(); + + git(repoRoot, "checkout", "-q", "-b", "feature"); + writeFileSync(join(repoRoot, "x.txt"), "feature\n"); + git(repoRoot, "commit", "-am", "feature"); + const featureSha = git(repoRoot, "rev-parse", "HEAD").trim(); + + git(repoRoot, "checkout", "-q", baseBranch); + writeFileSync(join(repoRoot, "x.txt"), "main-2\n"); + git(repoRoot, "commit", "-am", "main-2"); + + // base and feature have diverged: merge-base remains the original `base` SHA, + // and `A...B` should compare that merge-base to the right-hand ref. + const endpoints = resolveGitDiffEndpoints(makeGitInput({ range: `${baseBranch}...feature` }), { + cwd: repoRoot, + repoRoot, + }); + + expect(endpoints).toEqual({ + old: { kind: "git-ref", ref: baseSha }, + new: { kind: "git-ref", ref: featureSha }, + }); + // Sanity-check that this matches what `git merge-base` would say. + expect(baseSha).toBe(git(repoRoot, "merge-base", baseBranch, "feature").trim()); + expect(featureSha).not.toBe(baseSha); + }); + + test("returns null for multi-rev ranges that cannot be mapped to a single old/new pair", () => { + const repoRoot = createTempRepo("hunk-endpoints-multi-"); + writeFileSync(join(repoRoot, "x.txt"), "first\n"); + git(repoRoot, "add", "x.txt"); + git(repoRoot, "commit", "-m", "first"); + const firstSha = git(repoRoot, "rev-parse", "HEAD").trim(); + + writeFileSync(join(repoRoot, "x.txt"), "second\n"); + git(repoRoot, "add", "x.txt"); + git(repoRoot, "commit", "-m", "second"); + + writeFileSync(join(repoRoot, "x.txt"), "third\n"); + git(repoRoot, "add", "x.txt"); + git(repoRoot, "commit", "-m", "third"); + + // Two positive revs (no negatives) is a shape we cannot represent as one + // old/new pair. Return null so callers disable source-by-ref expansion + // instead of silently reading from HEAD/the working tree. + expect( + resolveGitDiffEndpoints(makeGitInput({ range: `${firstSha} HEAD` }), { + cwd: repoRoot, + repoRoot, + }), + ).toBeNull(); + }); +}); diff --git a/src/core/git.ts b/src/core/git.ts index f2f6243a..acf9778e 100644 --- a/src/core/git.ts +++ b/src/core/git.ts @@ -13,6 +13,7 @@ export interface RunGitTextOptions { } interface RunGitCommandResult { + stderr: string; stdout: string; exitCode: number; } @@ -184,6 +185,7 @@ function isUnknownRevisionMessage(stderr: string) { "bad revision", "unknown revision or path not in the working tree", "ambiguous argument", + "Needed a single revision", ].some((fragment) => stderr.includes(fragment)); } @@ -262,7 +264,10 @@ function translateGitExitFailure(input: GitBackedInput, stderr: string) { return createMissingRepoError(input); } - if (input.kind === "stash-show" && isNoStashEntriesMessage(stderr)) { + if ( + input.kind === "stash-show" && + (isNoStashEntriesMessage(stderr) || isUnknownRevisionMessage(stderr)) + ) { return createMissingStashError(input); } @@ -313,6 +318,7 @@ function runGitCommand({ } return { + stderr, stdout, exitCode: proc.exitCode, }; @@ -481,3 +487,196 @@ export function resolveGitRepoRoot( ...options, }).trim(); } + +/** Resolve one commit-ish ref to the exact commit object used for later blob reads. */ +export function resolveGitCommitRef( + input: GitBackedInput, + ref: string, + options: Omit = {}, +) { + return runGitText({ + input, + args: ["rev-parse", "--verify", "--end-of-options", `${ref}^{commit}`], + ...options, + }) + .split("\n")[0]! + .trim(); +} + +/** Resolve a commit-ish ref, returning null when that ref does not exist. */ +function tryResolveGitCommitRef( + input: GitBackedInput, + ref: string, + options: Omit = {}, +) { + const result = runGitCommand({ + input, + args: ["rev-parse", "--verify", "--end-of-options", `${ref}^{commit}`], + ...options, + acceptedExitCodes: [0, 1, 128], + }); + + if (result.exitCode === 0) { + return result.stdout.split("\n")[0]!.trim(); + } + + if (isUnknownRevisionMessage(result.stderr)) { + return null; + } + + throw translateGitExitFailure(input, result.stderr.trim() || `Could not resolve Git ref ${ref}.`); +} + +export type GitDiffEndpoint = + | { kind: "none" } + | { kind: "git-ref"; ref: string } + | { kind: "index" } + | { kind: "worktree" }; + +/** Endpoints describing what each diff side compares for one VCS diff input. */ +export interface GitDiffEndpoints { + old: GitDiffEndpoint; + new: GitDiffEndpoint; +} + +/** Parse "A...B" into its two refs, defaulting empty sides to HEAD as Git does. */ +function parseSymmetricDiffRange(range: string): { left: string; right: string } | null { + // Runs of four or more dots are not a valid range; bail rather than + // silently treating the first three as a symmetric-diff separator. + if (/\.{4,}/.test(range)) { + return null; + } + + const parts = range.split("..."); + if (parts.length !== 2) { + return null; + } + + return { left: parts[0] || "HEAD", right: parts[1] || "HEAD" }; +} + +/** Resolve rev-parse output into positive and negative revisions for one diff range. */ +function resolveRangeRevisions( + input: VcsCommandInput, + range: string, + { + cwd = process.cwd(), + gitExecutable = "git", + repoRoot, + }: Omit & { repoRoot?: string } = {}, +): { positives: string[]; negatives: string[] } { + const revs = runGitText({ + input, + args: ["rev-parse", "--revs-only", range], + cwd: repoRoot ?? cwd, + gitExecutable, + }) + .split("\n") + .map((line) => line.trim()) + .filter(Boolean); + + return { + positives: revs.filter((rev) => !rev.startsWith("^")), + negatives: revs.filter((rev) => rev.startsWith("^")).map((rev) => rev.slice(1)), + }; +} + +/** + * Resolve the old/new endpoints implied by a `hunk diff` invocation. + * + * Returns `null` when the range maps to a shape we cannot represent as a + * single old/new pair. Callers should treat that as "do not attempt to read + * source by ref" rather than silently falling back to the working tree. + */ +export function resolveGitDiffEndpoints( + input: VcsCommandInput, + { + cwd = process.cwd(), + gitExecutable = "git", + repoRoot, + }: Omit & { repoRoot?: string } = {}, +): GitDiffEndpoints | null { + if (input.staged) { + if (!input.range) { + const headRef = tryResolveGitCommitRef(input, "HEAD", { + cwd: repoRoot ?? cwd, + gitExecutable, + }); + + return { + old: headRef ? { kind: "git-ref", ref: headRef } : { kind: "none" }, + new: { kind: "index" }, + }; + } + + const { positives, negatives } = resolveRangeRevisions(input, input.range, { + cwd, + gitExecutable, + repoRoot, + }); + + if (positives.length === 1 && negatives.length === 0) { + return { old: { kind: "git-ref", ref: positives[0]! }, new: { kind: "index" } }; + } + + return null; + } + + if (!input.range) { + return { old: { kind: "index" }, new: { kind: "worktree" } }; + } + + // `git diff A...B` compares merge-base(A, B) against B, not HEAD or the + // working tree. Resolve the merge base explicitly so expanded source rows + // read from the same revisions the diff was computed from. + const symmetric = parseSymmetricDiffRange(input.range); + if (symmetric) { + const mergeBase = runGitText({ + input, + args: ["merge-base", symmetric.left, symmetric.right], + cwd: repoRoot ?? cwd, + gitExecutable, + }) + .split("\n")[0] + ?.trim(); + if (!mergeBase) { + return null; + } + + const rightRef = resolveGitCommitRef(input, symmetric.right, { + cwd: repoRoot ?? cwd, + gitExecutable, + }); + + return { + old: { kind: "git-ref", ref: mergeBase }, + new: { kind: "git-ref", ref: rightRef }, + }; + } + + // Real rev-parse failures (bogus refs, missing repo) propagate to the caller + // so the user sees a clear error instead of a silent working-tree fallback. + const { positives, negatives } = resolveRangeRevisions(input, input.range, { + cwd, + gitExecutable, + repoRoot, + }); + + if (positives.length === 1 && negatives.length === 0) { + // Single revision diffs against the working tree. + return { old: { kind: "git-ref", ref: positives[0]! }, new: { kind: "worktree" } }; + } + + if (positives.length === 1 && negatives.length === 1) { + return { + old: { kind: "git-ref", ref: negatives[0]! }, + new: { kind: "git-ref", ref: positives[0]! }, + }; + } + + // Multi-revision ranges that succeeded rev-parse but don't fit a simple + // old/new pair (octopus merges, multi-positive sets) have no safe mapping. + // Returning null disables source-by-ref reads so we never render source + // from the wrong revision. + return null; +} diff --git a/src/core/loaders.test.ts b/src/core/loaders.test.ts index 5cdfac2a..4af540a5 100644 --- a/src/core/loaders.test.ts +++ b/src/core/loaders.test.ts @@ -730,6 +730,54 @@ describe("loadAppBootstrap", () => { expect(bootstrap.changeset.files.map((file) => file.path)).toEqual(["alpha.ts"]); }); + test("loads staged new files before the first commit", async () => { + const dir = createTempRepo("hunk-git-staged-unborn-"); + + writeFileSync(join(dir, "alpha.ts"), "export const alpha = 1;\n"); + git(dir, "add", "alpha.ts"); + + const bootstrap = await loadFromRepo(dir, { + kind: "vcs", + staged: true, + options: { mode: "auto" }, + }); + + const file = bootstrap.changeset.files[0]; + expect(bootstrap.changeset.files.map((entry) => entry.path)).toEqual(["alpha.ts"]); + expect(file?.metadata.type).toBe("new"); + expect(file?.sourceFetcher).toBeDefined(); + expect(await file?.sourceFetcher?.getFullText("old")).toBeNull(); + expect(await file?.sourceFetcher?.getFullText("new")).toBe("export const alpha = 1;\n"); + }); + + test("staged diffs against an explicit ref fetch old source from that ref", async () => { + const dir = createTempRepo("hunk-git-staged-ref-source-"); + + writeFileSync(join(dir, "alpha.ts"), "export const alpha = 1;\n"); + git(dir, "add", "alpha.ts"); + git(dir, "commit", "-m", "first"); + const firstRef = git(dir, "rev-parse", "HEAD").trim(); + + writeFileSync(join(dir, "alpha.ts"), "export const alpha = 2;\n"); + git(dir, "add", "alpha.ts"); + git(dir, "commit", "-m", "second"); + + writeFileSync(join(dir, "alpha.ts"), "export const alpha = 3;\n"); + git(dir, "add", "alpha.ts"); + + const bootstrap = await loadFromRepo(dir, { + kind: "vcs", + staged: true, + range: firstRef, + options: { mode: "auto" }, + }); + + const file = bootstrap.changeset.files[0]; + expect(file?.path).toBe("alpha.ts"); + expect(await file?.sourceFetcher?.getFullText("old")).toBe("export const alpha = 1;\n"); + expect(await file?.sourceFetcher?.getFullText("new")).toBe("export const alpha = 3;\n"); + }); + test("loads staged-only git diffs when diff.noprefix is enabled", async () => { const dir = createTempRepo("hunk-git-staged-noprefix-"); @@ -1304,3 +1352,230 @@ describe("loadAppBootstrap", () => { expect(bootstrap.changeset.files[0]?.path).toBe("a/inner.ts"); }); }); + +describe("loadAppBootstrap source fetcher attachment", () => { + test("file-pair diffs attach a fetcher that resolves both fs sides", async () => { + const dir = mkdtempSync(join(tmpdir(), "hunk-source-pair-")); + tempDirs.push(dir); + + const left = join(dir, "before.ts"); + const right = join(dir, "after.ts"); + writeFileSync(left, "old\n"); + writeFileSync(right, "new\n"); + + const bootstrap = await loadAppBootstrap({ + kind: "diff", + left, + right, + options: { mode: "auto" }, + }); + + const file = bootstrap.changeset.files[0]; + expect(file?.sourceFetcher).toBeDefined(); + expect(await file?.sourceFetcher?.getFullText("old")).toBe("old\n"); + expect(await file?.sourceFetcher?.getFullText("new")).toBe("new\n"); + }); + + test("git working-tree diffs read the new side from the working tree and the old side from the index", async () => { + const dir = createTempRepo("hunk-source-git-wt-"); + writeFileSync(join(dir, "value.txt"), "first\n"); + git(dir, "add", "value.txt"); + git(dir, "commit", "-m", "initial"); + + writeFileSync(join(dir, "value.txt"), "second\n"); + + const bootstrap = await loadFromRepo(dir, { + kind: "vcs", + staged: false, + options: { mode: "auto" }, + }); + + const file = bootstrap.changeset.files[0]; + expect(file?.sourceFetcher).toBeDefined(); + expect(await file?.sourceFetcher?.getFullText("new")).toBe("second\n"); + expect(await file?.sourceFetcher?.getFullText("old")).toBe("first\n"); + }); + + test("unstaged working-tree diffs read old source from the index when it differs from HEAD", async () => { + const dir = createTempRepo("hunk-source-git-wt-index-"); + writeFileSync(join(dir, "value.txt"), "committed\n"); + git(dir, "add", "value.txt"); + git(dir, "commit", "-m", "initial"); + + writeFileSync(join(dir, "value.txt"), "staged\n"); + git(dir, "add", "value.txt"); + writeFileSync(join(dir, "value.txt"), "working tree\n"); + + const bootstrap = await loadFromRepo(dir, { + kind: "vcs", + staged: false, + options: { mode: "auto" }, + }); + + const file = bootstrap.changeset.files[0]; + expect(file?.sourceFetcher).toBeDefined(); + expect(await file?.sourceFetcher?.getFullText("old")).toBe("staged\n"); + expect(await file?.sourceFetcher?.getFullText("new")).toBe("working tree\n"); + }); + + test("`hunk show ` resolves both sides through git blobs", async () => { + const dir = createTempRepo("hunk-source-show-"); + writeFileSync(join(dir, "value.txt"), "first\n"); + git(dir, "add", "value.txt"); + git(dir, "commit", "-m", "initial"); + + writeFileSync(join(dir, "value.txt"), "second\n"); + git(dir, "add", "value.txt"); + git(dir, "commit", "-m", "second"); + + const bootstrap = await loadFromRepo(dir, { + kind: "show", + ref: "HEAD", + options: { mode: "auto" }, + }); + + const file = bootstrap.changeset.files[0]; + expect(file?.sourceFetcher).toBeDefined(); + expect(await file?.sourceFetcher?.getFullText("new")).toBe("second\n"); + expect(await file?.sourceFetcher?.getFullText("old")).toBe("first\n"); + }); + + test("`hunk show ` pins expansion sources after the ref moves", async () => { + const dir = createTempRepo("hunk-source-show-pinned-"); + writeFileSync(join(dir, "value.txt"), "first\n"); + git(dir, "add", "value.txt"); + git(dir, "commit", "-m", "initial"); + + writeFileSync(join(dir, "value.txt"), "second\n"); + git(dir, "add", "value.txt"); + git(dir, "commit", "-m", "second"); + + const bootstrap = await loadFromRepo(dir, { + kind: "show", + ref: "HEAD", + options: { mode: "auto" }, + }); + + writeFileSync(join(dir, "value.txt"), "third\n"); + git(dir, "add", "value.txt"); + git(dir, "commit", "-m", "third"); + + const file = bootstrap.changeset.files[0]; + expect(await file?.sourceFetcher?.getFullText("new")).toBe("second\n"); + expect(await file?.sourceFetcher?.getFullText("old")).toBe("first\n"); + }); + + test("`hunk stash show` pins expansion sources after stash@{0} moves", async () => { + const dir = createTempRepo("hunk-source-stash-pinned-"); + writeFileSync(join(dir, "value.txt"), "base\n"); + git(dir, "add", "value.txt"); + git(dir, "commit", "-m", "initial"); + + writeFileSync(join(dir, "value.txt"), "first stash\n"); + git(dir, "stash", "push", "-m", "first stash"); + + const bootstrap = await loadFromRepo(dir, { + kind: "stash-show", + options: { mode: "auto" }, + }); + + writeFileSync(join(dir, "value.txt"), "second stash\n"); + git(dir, "stash", "push", "-m", "second stash"); + + const file = bootstrap.changeset.files[0]; + expect(await file?.sourceFetcher?.getFullText("new")).toBe("first stash\n"); + expect(await file?.sourceFetcher?.getFullText("old")).toBe("base\n"); + }); + + test("untracked files attach a fetcher whose old side is null", async () => { + const dir = createTempRepo("hunk-source-untracked-"); + writeFileSync(join(dir, "tracked.ts"), "tracked\n"); + git(dir, "add", "tracked.ts"); + git(dir, "commit", "-m", "initial"); + + writeFileSync(join(dir, "added.txt"), "added contents\n"); + + const bootstrap = await loadFromRepo(dir, { + kind: "vcs", + staged: false, + options: { mode: "auto" }, + }); + + const untracked = bootstrap.changeset.files.find((entry) => entry.isUntracked); + expect(untracked?.sourceFetcher).toBeDefined(); + expect(await untracked?.sourceFetcher?.getFullText("new")).toBe("added contents\n"); + expect(await untracked?.sourceFetcher?.getFullText("old")).toBeNull(); + }); + + test("deleted files attach a fetcher with new=null and old reading the prior contents", async () => { + const dir = createTempRepo("hunk-source-deleted-"); + writeFileSync(join(dir, "victim.txt"), "going away\n"); + git(dir, "add", "victim.txt"); + git(dir, "commit", "-m", "add victim"); + git(dir, "rm", "victim.txt"); + git(dir, "commit", "-m", "remove victim"); + + const bootstrap = await loadFromRepo(dir, { + kind: "show", + ref: "HEAD", + options: { mode: "auto" }, + }); + + const file = bootstrap.changeset.files[0]; + expect(file?.metadata.type).toBe("deleted"); + expect(file?.sourceFetcher).toBeDefined(); + expect(await file?.sourceFetcher?.getFullText("new")).toBeNull(); + expect(await file?.sourceFetcher?.getFullText("old")).toBe("going away\n"); + }); + + test("renamed files read the old side from previousPath blob", async () => { + const dir = createTempRepo("hunk-source-renamed-"); + writeFileSync(join(dir, "before.txt"), "shared\nold-only\nshared\n"); + git(dir, "add", "before.txt"); + git(dir, "commit", "-m", "add before"); + git(dir, "mv", "before.txt", "after.txt"); + writeFileSync(join(dir, "after.txt"), "shared\nnew-only\nshared\n"); + git(dir, "add", "after.txt"); + git(dir, "commit", "-m", "rename and modify"); + + const bootstrap = await loadFromRepo(dir, { + kind: "show", + ref: "HEAD", + options: { mode: "auto" }, + }); + + const file = bootstrap.changeset.files[0]; + expect(file?.path).toBe("after.txt"); + expect(file?.previousPath).toBe("before.txt"); + expect(file?.sourceFetcher).toBeDefined(); + expect(await file?.sourceFetcher?.getFullText("old")).toBe("shared\nold-only\nshared\n"); + expect(await file?.sourceFetcher?.getFullText("new")).toBe("shared\nnew-only\nshared\n"); + }); + + test("raw patch input does not attach a source fetcher", async () => { + const dir = mkdtempSync(join(tmpdir(), "hunk-source-patch-")); + tempDirs.push(dir); + + const patch = join(dir, "change.patch"); + writeFileSync( + patch, + [ + "diff --git a/a.txt b/a.txt", + "--- a/a.txt", + "+++ b/a.txt", + "@@ -1 +1 @@", + "-old", + "+new", + "", + ].join("\n"), + ); + + const bootstrap = await loadAppBootstrap({ + kind: "patch", + file: patch, + options: { mode: "auto" }, + }); + + expect(bootstrap.changeset.files[0]?.sourceFetcher).toBeUndefined(); + }); +}); diff --git a/src/core/loaders.ts b/src/core/loaders.ts index c53e6880..9b9ab269 100644 --- a/src/core/loaders.ts +++ b/src/core/loaders.ts @@ -12,15 +12,20 @@ import { findAgentFileContext, loadAgentContext } from "./agent"; import { createSkippedBinaryMetadata, isProbablyBinaryFile, patchLooksBinary } from "./binary"; import { normalizeDiffMetadataPaths, normalizeDiffPath } from "./diffPaths"; import { HunkUserError } from "./errors"; +import { createFileSourceFetcher, type FileSourceFetcher, type FileSourceSpec } from "./fileSource"; import { buildGitDiffArgs, buildGitDiffNumstatArgs, buildGitShowArgs, buildGitStashShowArgs, listGitUntrackedFiles, + resolveGitDiffEndpoints, + resolveGitCommitRef, resolveGitRepoRoot, runGitText, runGitUntrackedFileDiffText, + type GitDiffEndpoint, + type GitDiffEndpoints, } from "./git"; import { buildJjDiffArgs, @@ -207,15 +212,81 @@ function findPatchChunk(metadata: FileDiffMetadata, chunks: string[], index: num ); } +/** Per-file context exposed to source-fetcher builders after path normalization. */ +export interface DiffFileSourceContext { + path: string; + previousPath?: string; + type: FileDiffMetadata["type"]; + isUntracked: boolean; + isBinary: boolean; +} + interface BuildDiffFileOptions { isUntracked?: boolean; previousPath?: string; isBinary?: boolean; + sourceFetcherBuilder?: (file: DiffFileSourceContext) => FileSourceFetcher | undefined; isTooLarge?: boolean; stats?: DiffFile["stats"]; statsTruncated?: boolean; } +interface ResolvedFileSourceSpecs { + old: FileSourceSpec; + new: FileSourceSpec; +} + +/** Build a binary-aware source-fetcher factory from per-file source specs. */ +function createSourceFetcherBuilder( + resolveSpecs: (file: DiffFileSourceContext) => ResolvedFileSourceSpecs | undefined, +): NonNullable { + return (file) => { + if (file.isBinary) { + return undefined; + } + + const specs = resolveSpecs(file); + return specs ? createFileSourceFetcher(specs) : undefined; + }; +} + +/** Convert one Git diff endpoint into the corresponding file source lookup. */ +function gitEndpointSourceSpec( + endpoint: GitDiffEndpoint, + repoRoot: string, + filePath: string, +): FileSourceSpec { + switch (endpoint.kind) { + case "none": + return { kind: "none" }; + case "git-ref": + return { kind: "git-blob", repoRoot, ref: endpoint.ref, path: filePath }; + case "index": + return { kind: "git-index", repoRoot, path: filePath }; + case "worktree": + return { kind: "fs", absolutePath: join(repoRoot, filePath) }; + } +} + +/** Build source fetchers for Git-backed diffs from explicit old/new endpoints. */ +function buildGitEndpointSourceFetcherBuilder( + repoRoot: string, + endpoints: GitDiffEndpoints, +): NonNullable { + return createSourceFetcherBuilder(({ path, previousPath, type }) => { + const oldPath = previousPath ?? path; + + return { + old: + type === "new" ? { kind: "none" } : gitEndpointSourceSpec(endpoints.old, repoRoot, oldPath), + new: + type === "deleted" + ? { kind: "none" } + : gitEndpointSourceSpec(endpoints.new, repoRoot, path), + }; + }); +} + /** Build the normalized per-file model used by the UI regardless of input mode. */ function buildDiffFile( metadata: FileDiffMetadata, @@ -227,6 +298,7 @@ function buildDiffFile( isUntracked, previousPath, isBinary, + sourceFetcherBuilder, isTooLarge, stats, statsTruncated, @@ -235,6 +307,15 @@ function buildDiffFile( const normalizedMetadata = normalizeDiffMetadataPaths(metadata); const path = normalizedMetadata.name; const resolvedPreviousPath = normalizeDiffPath(previousPath) ?? normalizedMetadata.prevName; + const resolvedIsBinary = isBinary ?? patchLooksBinary(patch); + + const sourceFetcher = sourceFetcherBuilder?.({ + path, + previousPath: resolvedPreviousPath, + type: normalizedMetadata.type, + isUntracked: Boolean(isUntracked), + isBinary: resolvedIsBinary, + }); return { id: `${sourcePrefix}:${index}:${path}`, @@ -246,9 +327,10 @@ function buildDiffFile( metadata: normalizedMetadata, agent: findAgentFileContext(agentContext, path, resolvedPreviousPath), isUntracked, - isBinary: isBinary ?? patchLooksBinary(patch), + isBinary: resolvedIsBinary, isTooLarge, statsTruncated, + sourceFetcher, }; } @@ -770,6 +852,10 @@ function buildUntrackedDiffFile( agentContext, { isUntracked: true, + sourceFetcherBuilder: createSourceFetcherBuilder(() => ({ + old: { kind: "none" }, + new: { kind: "fs", absolutePath: join(repoRoot, filePath) }, + })), }, ); } @@ -817,6 +903,7 @@ function normalizePatchChangeset( title: string, sourceLabel: string, agentContext: AgentContext | null, + perFileOptions?: Pick, ): Changeset { const normalizedPatchText = normalizeGitPatchPrefixes( stripGitLogMetadata(stripTerminalControl(patchText.replaceAll("\r\n", "\n"))), @@ -856,6 +943,7 @@ function normalizePatchChangeset( index, sourceLabel, agentContext, + perFileOptions, ), ), }; @@ -961,6 +1049,10 @@ async function loadFileDiffChangeset( files: [ buildDiffFile(metadata, patch, 0, displayPath, agentContext, { previousPath: basename(input.left), + sourceFetcherBuilder: createSourceFetcherBuilder(() => ({ + old: { kind: "fs", absolutePath: leftPath }, + new: { kind: "fs", absolutePath: rightPath }, + })), }), ], } satisfies Changeset; @@ -982,6 +1074,13 @@ async function loadGitChangeset( const largeTrackedFiles = parseGitNumstat( runGitText({ input, args: buildGitDiffNumstatArgs(input), cwd }), ).filter((file) => shouldSkipLargeTrackedDiff(file, repoRoot)); + const endpoints = resolveGitDiffEndpoints(input, { cwd, repoRoot }); + // When the range maps to a shape we can't represent as a single old/new pair + // (e.g. octopus, multi-positive sets), omit the source fetcher entirely so + // expansion is disabled rather than reading from the wrong revision. + const sourceFetcherBuilder = endpoints + ? buildGitEndpointSourceFetcherBuilder(repoRoot, endpoints) + : undefined; const trackedChangeset = normalizePatchChangeset( runGitText({ input, @@ -994,6 +1093,7 @@ async function loadGitChangeset( title, repoRoot, agentContext, + sourceFetcherBuilder ? { sourceFetcherBuilder } : undefined, ); const trackedFiles = [ ...trackedChangeset.files, @@ -1033,6 +1133,17 @@ async function loadGitChangeset( } satisfies Changeset; } +function buildRefRangeSourceFetcherBuilder( + repoRoot: string, + oldRef: string, + newRef: string, +): NonNullable { + return buildGitEndpointSourceFetcherBuilder(repoRoot, { + old: { kind: "git-ref", ref: oldRef }, + new: { kind: "git-ref", ref: newRef }, + }); +} + /** Build a changeset from the current Jujutsu working-copy commit or a revset. */ async function loadJjDiffChangeset( input: VcsCommandInput, @@ -1063,12 +1174,18 @@ async function loadShowChangeset( ) { const repoRoot = resolveGitRepoRoot(input, { cwd }); const repoName = basename(repoRoot); + const requestedRef = input.ref ?? "HEAD"; + const newRef = resolveGitCommitRef(input, requestedRef, { cwd: repoRoot }); + const showInput = { ...input, ref: newRef }; return normalizePatchChangeset( - runGitText({ input, args: buildGitShowArgs(input), cwd }), + runGitText({ input, args: buildGitShowArgs(showInput), cwd }), input.ref ? `${repoName} show ${input.ref}` : `${repoName} show HEAD`, repoRoot, agentContext, + { + sourceFetcherBuilder: buildRefRangeSourceFetcherBuilder(repoRoot, `${newRef}^`, newRef), + }, ); } @@ -1104,12 +1221,18 @@ async function loadStashShowChangeset( const repoRoot = resolveGitRepoRoot(input, { cwd }); const repoName = basename(repoRoot); + const requestedRef = input.ref ?? "stash@{0}"; + const newRef = resolveGitCommitRef(input, requestedRef, { cwd: repoRoot }); + const stashInput = { ...input, ref: newRef }; return normalizePatchChangeset( - runGitText({ input, args: buildGitStashShowArgs(input), cwd }), + runGitText({ input, args: buildGitStashShowArgs(stashInput), cwd }), input.ref ? `${repoName} stash ${input.ref}` : `${repoName} stash`, repoRoot, agentContext, + { + sourceFetcherBuilder: buildRefRangeSourceFetcherBuilder(repoRoot, `${newRef}^`, newRef), + }, ); } diff --git a/src/core/types.ts b/src/core/types.ts index 51442aaf..89d1d746 100644 --- a/src/core/types.ts +++ b/src/core/types.ts @@ -1,4 +1,5 @@ import type { FileDiffMetadata } from "@pierre/diffs"; +import type { FileSourceFetcher } from "./fileSource"; export type LayoutMode = "auto" | "split" | "stack"; export type VcsMode = "git" | "jj"; @@ -45,6 +46,9 @@ export interface DiffFile { isBinary?: boolean; isTooLarge?: boolean; statsTruncated?: boolean; + // Optional capability for fetching the file's full text on either side. + // Loaders attach this when source content is reachable; absent when not. + sourceFetcher?: FileSourceFetcher; } export interface Changeset { From 67dadd1dcb993e0f99ce04fb1e665e4d0959852b Mon Sep 17 00:00:00 2001 From: Adit Chandra Date: Wed, 13 May 2026 02:15:47 -0400 Subject: [PATCH 2/4] feat(diff): represent expanded context in row geometry Model folded context as explicit diff rows before render planning so line gutters, hunk anchors, row windowing, and section geometry all share the same structure. --- src/ui/diff/codeColumns.test.ts | 51 +++- src/ui/diff/codeColumns.ts | 24 ++ src/ui/diff/expandCollapsedRows.test.ts | 315 ++++++++++++++++++++++++ src/ui/diff/expandCollapsedRows.ts | 230 +++++++++++++++++ src/ui/diff/pierre.test.ts | 111 ++++++++- src/ui/diff/pierre.ts | 163 +++++++++++- src/ui/diff/plannedReviewRows.ts | 25 +- src/ui/diff/reviewRenderPlan.ts | 19 +- src/ui/lib/diffSectionGeometry.test.ts | 139 +++++++++++ src/ui/lib/diffSectionGeometry.ts | 83 +++++-- 10 files changed, 1111 insertions(+), 49 deletions(-) create mode 100644 src/ui/diff/expandCollapsedRows.test.ts create mode 100644 src/ui/diff/expandCollapsedRows.ts diff --git a/src/ui/diff/codeColumns.test.ts b/src/ui/diff/codeColumns.test.ts index c0857fff..f03bf183 100644 --- a/src/ui/diff/codeColumns.test.ts +++ b/src/ui/diff/codeColumns.test.ts @@ -1,6 +1,7 @@ import { describe, expect, test } from "bun:test"; import type { DiffFile } from "../../core/types"; -import { maxFileCodeLineWidth } from "./codeColumns"; +import { findMaxLineNumberInRows, maxFileCodeLineWidth } from "./codeColumns"; +import type { DiffRow } from "./pierre"; /** Generate a large diff metadata fixture without checking a huge file into the repo. */ function createLargeLineFixture(lineCount: number, widestLine: string): DiffFile { @@ -29,3 +30,51 @@ describe("code column measurement", () => { expect(maxFileCodeLineWidth(file)).toBe("the widest generated line".length); }); }); + +describe("findMaxLineNumberInRows", () => { + test("accounts for collapsed gap ranges that can later expand", () => { + const rows: DiffRow[] = [ + { + type: "split-line", + key: "file:line:0", + fileId: "file", + hunkIndex: 0, + left: { kind: "deletion", sign: "-", lineNumber: 5, spans: [{ text: "old" }] }, + right: { kind: "addition", sign: "+", lineNumber: 5, spans: [{ text: "new" }] }, + }, + { + type: "collapsed", + key: "file:collapsed:trailing", + fileId: "file", + hunkIndex: 0, + oldRange: [6, 1000], + newRange: [6, 1000], + position: "trailing", + text: "995 unchanged lines", + }, + ]; + + expect(findMaxLineNumberInRows(rows)).toBe(1000); + }); + + test("accounts for synthesized stack expansion rows", () => { + const rows: DiffRow[] = [ + { + type: "stack-line", + key: "file:expanded:trailing:0:0", + fileId: "file", + hunkIndex: 0, + isExpansionRow: true, + cell: { + kind: "context", + sign: " ", + oldLineNumber: 998, + newLineNumber: 1002, + spans: [{ text: "context" }], + }, + }, + ]; + + expect(findMaxLineNumberInRows(rows, 9)).toBe(1002); + }); +}); diff --git a/src/ui/diff/codeColumns.ts b/src/ui/diff/codeColumns.ts index ebf080df..8e396669 100644 --- a/src/ui/diff/codeColumns.ts +++ b/src/ui/diff/codeColumns.ts @@ -1,4 +1,5 @@ import type { DiffFile, LayoutMode } from "../../core/types"; +import type { DiffRow } from "./pierre"; export const DIFF_CODE_TAB_WIDTH = 2; export const DIFF_RAIL_PREFIX_WIDTH = 1; @@ -47,6 +48,29 @@ export function findMaxLineNumber(file: DiffFile) { return Math.max(highest, 1); } +/** Find the widest line-number gutter needed for an already-expanded row stream. */ +export function findMaxLineNumberInRows(rows: Iterable, fallback = 1) { + let highest = fallback; + + for (const row of rows) { + if (row.type === "collapsed") { + highest = Math.max(highest, row.oldRange[1], row.newRange[1]); + continue; + } + + if (row.type === "split-line") { + highest = Math.max(highest, row.left.lineNumber ?? 0, row.right.lineNumber ?? 0); + continue; + } + + if (row.type === "stack-line") { + highest = Math.max(highest, row.cell.oldLineNumber ?? 0, row.cell.newLineNumber ?? 0); + } + } + + return Math.max(highest, 1); +} + /** Split-view panes reserve one rail column on the left and one separator column in the middle. */ export function resolveSplitPaneWidths(width: number) { const usableWidth = Math.max(0, width - DIFF_RAIL_PREFIX_WIDTH - DIFF_SPLIT_SEPARATOR_WIDTH); diff --git a/src/ui/diff/expandCollapsedRows.test.ts b/src/ui/diff/expandCollapsedRows.test.ts new file mode 100644 index 00000000..daf3d200 --- /dev/null +++ b/src/ui/diff/expandCollapsedRows.test.ts @@ -0,0 +1,315 @@ +import { describe, expect, test } from "bun:test"; +import { expandCollapsedRows, gapKey, selectGapForKeyboardToggle } from "./expandCollapsedRows"; +import type { DiffRow } from "./pierre"; + +function makeCollapsedRow( + position: "before" | "trailing", + hunkIndex: number, + oldRange: [number, number], + newRange: [number, number], +): Extract { + return { + type: "collapsed", + key: `f:collapsed:${position}:${hunkIndex}`, + fileId: "f", + hunkIndex, + text: `${oldRange[1] - oldRange[0] + 1} unchanged lines`, + position, + oldRange, + newRange, + }; +} + +function makeHunkHeader(hunkIndex: number): Extract { + return { + type: "hunk-header", + key: `f:header:${hunkIndex}`, + fileId: "f", + hunkIndex, + text: `@@ hunk ${hunkIndex} @@`, + }; +} + +const SOURCE = ["alpha", "beta", "gamma", "delta", "epsilon", "zeta"].join("\n") + "\n"; + +describe("expandCollapsedRows", () => { + test("returns rows unchanged when no gaps are expanded", () => { + const rows: DiffRow[] = [makeCollapsedRow("before", 0, [1, 2], [1, 2]), makeHunkHeader(0)]; + + const result = expandCollapsedRows(rows, { + layout: "split", + expandedKeys: new Set(), + sourceStatus: { kind: "loaded", text: SOURCE }, + side: "new", + }); + + expect(result).toBe(rows); + }); + + test("leaves the row unchanged when expansion is requested before status arrives", () => { + const rows: DiffRow[] = [makeCollapsedRow("before", 0, [1, 2], [1, 2]), makeHunkHeader(0)]; + + const result = expandCollapsedRows(rows, { + layout: "split", + expandedKeys: new Set([gapKey("before", 0)]), + sourceStatus: undefined, + side: "new", + }); + + expect(result.map((row) => row.type)).toEqual(["collapsed", "hunk-header"]); + const collapsed = result[0]; + if (!collapsed || collapsed.type !== "collapsed") { + throw new Error("expected first row to be collapsed"); + } + expect(collapsed.text.toLowerCase()).not.toContain("hide"); + expect(collapsed.text.toLowerCase()).not.toContain("loading"); + }); + + test("rewrites the label to 'Loading…' while source is being fetched", () => { + const rows: DiffRow[] = [makeCollapsedRow("before", 0, [1, 3], [1, 3]), makeHunkHeader(0)]; + + const result = expandCollapsedRows(rows, { + layout: "split", + expandedKeys: new Set([gapKey("before", 0)]), + sourceStatus: { kind: "loading" }, + side: "new", + }); + + expect(result.map((row) => row.type)).toEqual(["collapsed", "hunk-header"]); + const collapsed = result[0]; + if (!collapsed || collapsed.type !== "collapsed") { + throw new Error("expected first row to be collapsed"); + } + expect(collapsed.text.toLowerCase()).toContain("loading"); + }); + + test("rewrites the label when source could not be loaded", () => { + const rows: DiffRow[] = [makeCollapsedRow("before", 0, [1, 3], [1, 3]), makeHunkHeader(0)]; + + const result = expandCollapsedRows(rows, { + layout: "split", + expandedKeys: new Set([gapKey("before", 0)]), + sourceStatus: { kind: "error" }, + side: "new", + }); + + expect(result.map((row) => row.type)).toEqual(["collapsed", "hunk-header"]); + const collapsed = result[0]; + if (!collapsed || collapsed.type !== "collapsed") { + throw new Error("expected first row to be collapsed"); + } + expect(collapsed.text.toLowerCase()).toContain("could not load"); + }); + + test("inserts split-line context rows after the expanded collapsed row", () => { + const rows: DiffRow[] = [makeCollapsedRow("before", 0, [1, 3], [1, 3]), makeHunkHeader(0)]; + + const result = expandCollapsedRows(rows, { + layout: "split", + expandedKeys: new Set([gapKey("before", 0)]), + sourceStatus: { kind: "loaded", text: SOURCE }, + side: "new", + }); + + expect(result.length).toBe(rows.length + 3); + expect(result[0]?.type).toBe("collapsed"); + + const inserted = result.slice(1, 4); + expect(inserted.every((row) => row.type === "split-line")).toBe(true); + + const first = inserted[0]; + if (!first || first.type !== "split-line") { + throw new Error("expected split-line context rows"); + } + + expect(first.left.kind).toBe("context"); + expect(first.right.kind).toBe("context"); + expect(first.left.lineNumber).toBe(1); + expect(first.right.lineNumber).toBe(1); + expect(first.left.spans[0]?.text).toBe("alpha"); + expect(first.right.spans[0]?.text).toBe("alpha"); + + const third = inserted[2]; + if (!third || third.type !== "split-line") { + throw new Error("expected three context rows"); + } + expect(third.left.lineNumber).toBe(3); + expect(third.right.spans[0]?.text).toBe("gamma"); + }); + + test("inserts stack-line context rows when layout is stack", () => { + const rows: DiffRow[] = [makeCollapsedRow("before", 0, [2, 3], [2, 3]), makeHunkHeader(0)]; + + const result = expandCollapsedRows(rows, { + layout: "stack", + expandedKeys: new Set([gapKey("before", 0)]), + sourceStatus: { kind: "loaded", text: SOURCE }, + side: "new", + }); + + const inserted = result.slice(1, 3); + expect(inserted.every((row) => row.type === "stack-line")).toBe(true); + + const first = inserted[0]; + if (!first || first.type !== "stack-line") { + throw new Error("expected stack-line context rows"); + } + expect(first.cell.kind).toBe("context"); + expect(first.cell.oldLineNumber).toBe(2); + expect(first.cell.newLineNumber).toBe(2); + expect(first.cell.spans[0]?.text).toBe("beta"); + }); + + test("changes the collapsed-row label to indicate expansion", () => { + const rows: DiffRow[] = [makeCollapsedRow("before", 0, [1, 2], [1, 2]), makeHunkHeader(0)]; + + const result = expandCollapsedRows(rows, { + layout: "split", + expandedKeys: new Set([gapKey("before", 0)]), + sourceStatus: { kind: "loaded", text: SOURCE }, + side: "new", + }); + + const collapsed = result[0]; + if (!collapsed || collapsed.type !== "collapsed") { + throw new Error("expected first row to be the collapsed marker"); + } + expect(collapsed.text.toLowerCase()).toContain("hide"); + }); + + test("expands trailing gaps from the requested side", () => { + const rows: DiffRow[] = [makeHunkHeader(0), makeCollapsedRow("trailing", 0, [4, 6], [4, 6])]; + + const result = expandCollapsedRows(rows, { + layout: "stack", + expandedKeys: new Set([gapKey("trailing", 0)]), + sourceStatus: { kind: "loaded", text: SOURCE }, + side: "new", + }); + + expect(result.length).toBe(rows.length + 3); + const last = result[result.length - 1]; + if (!last || last.type !== "stack-line") { + throw new Error("expected synthesized stack-line rows after the trailing collapsed row"); + } + expect(last.cell.spans[0]?.text).toBe("zeta"); + expect(last.cell.newLineNumber).toBe(6); + }); + + test("uses the old-side range when side is `old`", () => { + const rows: DiffRow[] = [makeCollapsedRow("before", 0, [2, 3], [10, 11]), makeHunkHeader(0)]; + + const result = expandCollapsedRows(rows, { + layout: "split", + expandedKeys: new Set([gapKey("before", 0)]), + sourceStatus: { kind: "loaded", text: SOURCE }, + side: "old", + }); + + const inserted = result.slice(1, 3); + const first = inserted[0]; + if (!first || first.type !== "split-line") { + throw new Error("expected split-line context rows"); + } + expect(first.left.lineNumber).toBe(2); + expect(first.right.lineNumber).toBe(10); + expect(first.left.spans[0]?.text).toBe("beta"); + expect(first.right.spans[0]?.text).toBe("beta"); + }); + + test("normalizes CRLF so expanded rows do not carry a stray carriage return", () => { + const sourceWithCrlf = "alpha\r\nbeta\r\ngamma\r\n"; + const rows: DiffRow[] = [makeCollapsedRow("before", 0, [1, 2], [1, 2]), makeHunkHeader(0)]; + + const result = expandCollapsedRows(rows, { + layout: "stack", + expandedKeys: new Set([gapKey("before", 0)]), + sourceStatus: { kind: "loaded", text: sourceWithCrlf }, + side: "new", + }); + + const inserted = result[1]; + if (!inserted || inserted.type !== "stack-line") { + throw new Error("expected stack-line context row"); + } + expect(inserted.cell.spans[0]?.text).toBe("alpha"); + }); + + test("expands tabs in source lines so terminal cells stay aligned", () => { + const sourceWithTab = "a\tb\nfollow\n"; + const rows: DiffRow[] = [makeCollapsedRow("before", 0, [1, 1], [1, 1]), makeHunkHeader(0)]; + + const result = expandCollapsedRows(rows, { + layout: "stack", + expandedKeys: new Set([gapKey("before", 0)]), + sourceStatus: { kind: "loaded", text: sourceWithTab }, + side: "new", + }); + + const inserted = result[1]; + if (!inserted || inserted.type !== "stack-line") { + throw new Error("expected one stack-line row"); + } + expect(inserted.cell.spans[0]?.text.includes("\t")).toBe(false); + }); + + test("uses caller-provided spans for expanded source lines", () => { + const rows: DiffRow[] = [makeCollapsedRow("before", 0, [2, 3], [2, 3]), makeHunkHeader(0)]; + const calls: Array<{ line: string | undefined; sourceLineNumber: number }> = []; + + const result = expandCollapsedRows(rows, { + layout: "stack", + expandedKeys: new Set([gapKey("before", 0)]), + sourceStatus: { kind: "loaded", text: SOURCE }, + sourceLineSpans: (line, sourceLineNumber) => { + calls.push({ line, sourceLineNumber }); + return [{ text: `highlighted:${line ?? ""}`, fg: "#abcdef" }]; + }, + side: "new", + }); + + expect(calls).toEqual([ + { line: "beta", sourceLineNumber: 1 }, + { line: "gamma", sourceLineNumber: 2 }, + ]); + + const inserted = result[1]; + if (!inserted || inserted.type !== "stack-line") { + throw new Error("expected stack-line context row"); + } + expect(inserted.cell.spans).toEqual([{ text: "highlighted:beta", fg: "#abcdef" }]); + }); +}); + +describe("selectGapForKeyboardToggle", () => { + test("returns the leading gap of the selected hunk when one exists", () => { + const hunks = [{ collapsedBefore: 3 }, { collapsedBefore: 0 }]; + expect(selectGapForKeyboardToggle(hunks, 0, false)).toBe(gapKey("before", 0)); + }); + + test("falls forward to the next hunk's leading gap when the selected hunk has none", () => { + const hunks = [{ collapsedBefore: 0 }, { collapsedBefore: 5 }, { collapsedBefore: 0 }]; + expect(selectGapForKeyboardToggle(hunks, 0, false)).toBe(gapKey("before", 1)); + }); + + test("falls back to the trailing gap when no later leading gap exists", () => { + const hunks = [{ collapsedBefore: 0 }, { collapsedBefore: 0 }]; + expect(selectGapForKeyboardToggle(hunks, 0, true)).toBe(gapKey("trailing", 1)); + }); + + test("returns null when no leading or trailing gap is reachable", () => { + const hunks = [{ collapsedBefore: 0 }, { collapsedBefore: 0 }]; + expect(selectGapForKeyboardToggle(hunks, 0, false)).toBeNull(); + }); + + test("returns null for an empty hunk list", () => { + expect(selectGapForKeyboardToggle([], 0, false)).toBeNull(); + }); + + test("clamps a stale selectedHunkIndex into the valid range", () => { + const hunks = [{ collapsedBefore: 4 }, { collapsedBefore: 0 }]; + // Stale index 99 clamps to the last hunk (1); that hunk has no leading gap, + // so the trailing gap is the only reachable target. + expect(selectGapForKeyboardToggle(hunks, 99, true)).toBe(gapKey("trailing", 1)); + }); +}); diff --git a/src/ui/diff/expandCollapsedRows.ts b/src/ui/diff/expandCollapsedRows.ts new file mode 100644 index 00000000..c1abed3a --- /dev/null +++ b/src/ui/diff/expandCollapsedRows.ts @@ -0,0 +1,230 @@ +import { expandDiffTabs } from "./codeColumns"; +import type { + CollapsedGapPosition, + DiffRow, + RenderSpan, + SplitLineCell, + StackLineCell, +} from "./pierre"; + +export type ExpansionLayout = "split" | "stack"; + +/** Per-file load status for the source text used to fill expanded gaps. */ +export type FileSourceStatus = + | { kind: "loading" } + | { kind: "loaded"; text: string } + | { kind: "error" }; + +export interface ExpandCollapsedRowsOptions { + layout: ExpansionLayout; + expandedKeys: ReadonlySet; + sourceStatus: FileSourceStatus | undefined; + /** Optional syntax-aware span resolver for a zero-based source line. */ + sourceLineSpans?: (line: string | undefined, sourceLineNumber: number) => RenderSpan[]; + // Whose side's line indices in the source text. Defaults to "new". + // For deleted files (no new side) callers should pass "old" instead. + side?: "old" | "new"; +} + +/** Stable identifier for one collapsed gap inside a single file. */ +export function gapKey(position: CollapsedGapPosition, hunkIndex: number) { + return `${position}:${hunkIndex}`; +} + +/** + * Pick the gap key that the keyboard shortcut should toggle for the selected + * hunk. Looks at the leading gap of the current hunk first, then the leading + * gaps of subsequent hunks, and finally the trailing gap of the file. Returns + * `null` when no reachable gap exists. + */ +export function selectGapForKeyboardToggle( + hunks: ReadonlyArray<{ collapsedBefore: number }>, + selectedHunkIndex: number, + hasTrailingGap: boolean, +): string | null { + if (hunks.length === 0) { + return null; + } + + const startIndex = Math.max(0, Math.min(selectedHunkIndex, hunks.length - 1)); + for (let index = startIndex; index < hunks.length; index += 1) { + if ((hunks[index]?.collapsedBefore ?? 0) > 0) { + return gapKey("before", index); + } + } + + if (hasTrailingGap) { + return gapKey("trailing", hunks.length - 1); + } + + return null; +} + +function expandedRowText(lineCount: number) { + return `Hide ${lineCount} unchanged ${lineCount === 1 ? "line" : "lines"}`; +} + +function loadingRowText(lineCount: number) { + return `Loading ${lineCount} unchanged ${lineCount === 1 ? "line" : "lines"}…`; +} + +function errorRowText(lineCount: number) { + return `Could not load ${lineCount} unchanged ${lineCount === 1 ? "line" : "lines"}`; +} + +function sliceLines(sourceText: string) { + // Normalize CRLF so Windows-authored sources don't leak `\r` into rendered spans. + const normalized = sourceText.replaceAll("\r\n", "\n"); + const trimmed = normalized.endsWith("\n") ? normalized.slice(0, -1) : normalized; + return trimmed.length === 0 ? [] : trimmed.split("\n"); +} + +function spansFor(line: string | undefined): RenderSpan[] { + const text = expandDiffTabs(line ?? ""); + return text.length > 0 ? [{ text }] : []; +} + +function buildSplitContextRow( + fileId: string, + hunkIndex: number, + position: CollapsedGapPosition, + index: number, + oldLineNumber: number, + newLineNumber: number, + spans: RenderSpan[], +): Extract { + const cell = (lineNumber: number): SplitLineCell => ({ + kind: "context", + sign: " ", + lineNumber, + spans, + }); + + return { + type: "split-line", + key: `${fileId}:expanded:${position}:${hunkIndex}:${index}`, + fileId, + hunkIndex, + left: cell(oldLineNumber), + right: cell(newLineNumber), + isExpansionRow: true, + }; +} + +function buildStackContextRow( + fileId: string, + hunkIndex: number, + position: CollapsedGapPosition, + index: number, + oldLineNumber: number, + newLineNumber: number, + spans: RenderSpan[], +): Extract { + const cell: StackLineCell = { + kind: "context", + sign: " ", + oldLineNumber, + newLineNumber, + spans, + }; + + return { + type: "stack-line", + key: `${fileId}:expanded:${position}:${hunkIndex}:${index}`, + fileId, + hunkIndex, + cell, + isExpansionRow: true, + }; +} + +/** + * Replace each expanded collapsed row with the actual unchanged file lines it + * represents. The original collapsed row stays in place as a status row, and + * synthesized context rows follow it when source has loaded. When source is + * still loading or failed, only the row label changes so the user sees the + * state of the request. + */ +export function expandCollapsedRows( + rows: DiffRow[], + options: ExpandCollapsedRowsOptions, +): DiffRow[] { + const { layout, expandedKeys, sourceLineSpans, sourceStatus, side = "new" } = options; + + if (expandedKeys.size === 0) { + return rows; + } + + const sourceLines = sourceStatus?.kind === "loaded" ? sliceLines(sourceStatus.text) : []; + const result: DiffRow[] = []; + + for (const row of rows) { + if (row.type !== "collapsed") { + result.push(row); + continue; + } + + const key = gapKey(row.position, row.hunkIndex); + if (!expandedKeys.has(key)) { + result.push(row); + continue; + } + + const range = side === "old" ? row.oldRange : row.newRange; + const lineCount = Math.max(0, range[1] - range[0] + 1); + + if (sourceStatus?.kind === "loading") { + result.push({ ...row, text: loadingRowText(lineCount) }); + continue; + } + + if (sourceStatus?.kind === "error") { + result.push({ ...row, text: errorRowText(lineCount) }); + continue; + } + + if (sourceStatus === undefined) { + // expandedKeys can briefly contain a key before the controller's load + // status is committed; keep the original label until status arrives. + result.push(row); + continue; + } + + result.push({ + ...row, + text: expandedRowText(lineCount), + }); + + for (let offset = 0; offset < lineCount; offset += 1) { + const oldLineNumber = row.oldRange[0] + offset; + const newLineNumber = row.newRange[0] + offset; + const sourceLineNumber = (side === "old" ? oldLineNumber : newLineNumber) - 1; + const text = sourceLines[sourceLineNumber]; + const spans = sourceLineSpans?.(text, sourceLineNumber) ?? spansFor(text); + + result.push( + layout === "split" + ? buildSplitContextRow( + row.fileId, + row.hunkIndex, + row.position, + offset, + oldLineNumber, + newLineNumber, + spans, + ) + : buildStackContextRow( + row.fileId, + row.hunkIndex, + row.position, + offset, + oldLineNumber, + newLineNumber, + spans, + ), + ); + } + } + + return result; +} diff --git a/src/ui/diff/pierre.test.ts b/src/ui/diff/pierre.test.ts index cb64908f..7e04d56c 100644 --- a/src/ui/diff/pierre.test.ts +++ b/src/ui/diff/pierre.test.ts @@ -1,7 +1,14 @@ import { describe, expect, test } from "bun:test"; import { parseDiffFromFile } from "@pierre/diffs"; import type { DiffFile } from "../../core/types"; -import { buildSplitRows, buildStackRows, loadHighlightedDiff, type DiffRow } from "./pierre"; +import { + buildSplitRows, + buildStackRows, + loadHighlightedDiff, + loadHighlightedSourceLines, + spansForHighlightedSourceLine, + type DiffRow, +} from "./pierre"; import { resolveTheme } from "../themes"; function createDiffFile(): DiffFile { @@ -181,6 +188,27 @@ describe("Pierre diff rows", () => { } }); + test("builds syntax spans for highlighted full-source lines", async () => { + const file = createDiffFile(); + const theme = resolveTheme("midnight", null); + const text = "export const hiddenMarker = true;\n"; + const highlighted = await loadHighlightedSourceLines({ + file, + text, + appearance: theme.appearance, + }); + const spans = spansForHighlightedSourceLine( + "export const hiddenMarker = true;", + highlighted.lines[0], + theme, + ); + + expect(spans.map((span) => span.text).join("")).toBe("export const hiddenMarker = true;"); + expect(spans.some((span) => span.text.includes("export") && typeof span.fg === "string")).toBe( + true, + ); + }); + test("remaps Pierre markdown reds and greens away from diff-semantic hues", async () => { const file = createMarkdownDiffFile(); @@ -225,6 +253,87 @@ describe("Pierre diff rows", () => { } }); + test("collapsed rows carry line ranges and position on both layouts", () => { + // Fixture: a 30-line file with a single change at line 5, context=3. + // Pierre produces one hunk covering old/new lines 2..8 (1 change + 3 lines of + // surrounding context). One leading gap (line 1) and one trailing gap + // (lines 9..30) should appear as collapsed rows with explicit ranges. + const before = Array.from({ length: 30 }, (_, i) => `line ${i + 1}\n`).join(""); + const after = before.replace("line 5\n", "line 5 modified\n"); + + const metadata = parseDiffFromFile( + { name: "f.txt", contents: before, cacheKey: "single-change-before" }, + { name: "f.txt", contents: after, cacheKey: "single-change-after" }, + { context: 3 }, + true, + ); + + const file: DiffFile = { + id: "single-change", + path: "f.txt", + patch: "", + stats: { additions: 1, deletions: 1 }, + metadata, + agent: null, + }; + + const theme = resolveTheme("midnight", null); + + for (const buildRows of [buildSplitRows, buildStackRows]) { + const rows = buildRows(file, null, theme); + const collapsedRows = rows.filter( + (row): row is Extract => row.type === "collapsed", + ); + + const leading = collapsedRows.find((row) => row.position === "before"); + const trailing = collapsedRows.find((row) => row.position === "trailing"); + + expect(leading).toBeDefined(); + expect(trailing).toBeDefined(); + + expect(leading?.oldRange).toEqual([1, 1]); + expect(leading?.newRange).toEqual([1, 1]); + expect(trailing?.oldRange?.[0]).toBe(9); + expect(trailing?.newRange?.[0]).toBe(9); + } + }); + + test("between-hunks collapsed row spans the unchanged region between two hunks", () => { + // Fixture: changes at lines 5 and 25 with context=3 produce two hunks + // separated by lines 9..21 of unchanged context. + const before = Array.from({ length: 30 }, (_, i) => `line ${i + 1}\n`).join(""); + const after = before + .replace("line 5\n", "line 5 changed\n") + .replace("line 25\n", "line 25 changed\n"); + + const metadata = parseDiffFromFile( + { name: "f.txt", contents: before, cacheKey: "two-hunks-before" }, + { name: "f.txt", contents: after, cacheKey: "two-hunks-after" }, + { context: 3 }, + true, + ); + + const file: DiffFile = { + id: "two-hunks", + path: "f.txt", + patch: "", + stats: { additions: 2, deletions: 2 }, + metadata, + agent: null, + }; + + const theme = resolveTheme("midnight", null); + const rows = buildSplitRows(file, null, theme); + const between = rows.find( + (row): row is Extract => + row.type === "collapsed" && row.position === "before" && row.hunkIndex === 1, + ); + + expect(between).toBeDefined(); + expect(between?.oldRange).toEqual([9, 21]); + expect(between?.newRange).toEqual([9, 21]); + }); + test("keeps reserved-color remaps isolated across dark themes", async () => { const file = createMarkdownDiffFile(); const highlighted = await loadHighlightedDiff(file, "dark"); diff --git a/src/ui/diff/pierre.ts b/src/ui/diff/pierre.ts index 7d99b3d2..44e56a91 100644 --- a/src/ui/diff/pierre.ts +++ b/src/ui/diff/pierre.ts @@ -3,6 +3,8 @@ import { getHighlighterOptions, getSharedHighlighter, renderDiffWithHighlighter, + renderFileWithHighlighter, + type FileContents, type FileDiffMetadata, } from "@pierre/diffs"; import { formatHunkHeader } from "../../core/hunkHeader"; @@ -67,6 +69,10 @@ export interface HighlightedDiffCode { additionLines: Array; } +export interface HighlightedSourceCode { + lines: Array; +} + export interface RenderSpan { text: string; fg?: string; @@ -88,9 +94,25 @@ export interface StackLineCell { spans: RenderSpan[]; } +export type CollapsedGapPosition = "before" | "trailing"; + export type DiffRow = | { - type: "collapsed" | "hunk-header"; + type: "collapsed"; + key: string; + fileId: string; + hunkIndex: number; + text: string; + // Where this gap sits relative to the surrounding hunks; "before" attaches to + // the gap leading into hunkIndex, "trailing" sits after the final hunk. + position: CollapsedGapPosition; + // 1-based inclusive file-line ranges this gap covers on each side. Expansion + // uses these to slice the file contents that fill the gap. + oldRange: [number, number]; + newRange: [number, number]; + } + | { + type: "hunk-header"; key: string; fileId: string; hunkIndex: number; @@ -103,6 +125,10 @@ export type DiffRow = hunkIndex: number; left: SplitLineCell; right: SplitLineCell; + // True when this row was synthesized to fill an expanded collapsed gap. + // Expanded rows carry the neighbor hunk's index for ordering but must not + // count toward that hunk's bounds or anchor position. + isExpansionRow?: true; } | { type: "stack-line"; @@ -110,6 +136,7 @@ export type DiffRow = fileId: string; hunkIndex: number; cell: StackLineCell; + isExpansionRow?: true; }; /** Replace tabs with fixed spaces so terminal cell widths stay predictable. */ @@ -402,13 +429,40 @@ function makeStackCell( } satisfies StackLineCell; } -/** Describe a collapsed unchanged region between visible hunks. */ +/** Describe one collapsed unchanged region in the diff stream. */ function collapsedRowText(lines: number) { return `${lines} unchanged ${lines === 1 ? "line" : "lines"}`; } +/** Compute the file-line ranges covered by the gap leading into one hunk. */ +function leadingCollapsedRanges(hunk: FileDiffMetadata["hunks"][number]) { + return { + oldRange: [hunk.deletionStart - hunk.collapsedBefore, hunk.deletionStart - 1] as [ + number, + number, + ], + newRange: [hunk.additionStart - hunk.collapsedBefore, hunk.additionStart - 1] as [ + number, + number, + ], + }; +} + +/** Compute the file-line ranges covered by the trailing gap after the final hunk. */ +function trailingCollapsedRanges( + lastHunk: FileDiffMetadata["hunks"][number], + trailingLines: number, +) { + const oldStart = lastHunk.deletionStart + lastHunk.deletionCount; + const newStart = lastHunk.additionStart + lastHunk.additionCount; + return { + oldRange: [oldStart, oldStart + trailingLines - 1] as [number, number], + newRange: [newStart, newStart + trailingLines - 1] as [number, number], + }; +} + /** Count hidden unchanged lines after the final visible hunk when Pierre omits them. */ -function trailingCollapsedLines(metadata: FileDiffMetadata) { +export function trailingCollapsedLines(metadata: FileDiffMetadata) { const lastHunk = metadata.hunks.at(-1); if (!lastHunk || metadata.isPartial) { return 0; @@ -450,10 +504,10 @@ async function prepareHighlighter( } /** Queue highlight rendering so startup work stays serialized in request order. */ -function queueHighlightedDiff(run: () => HighlightedDiffCode) { +function queueHighlightedWork(run: () => T) { const queued = queuedHighlightWork.then( () => - new Promise((resolve, reject) => { + new Promise((resolve, reject) => { queueMicrotask(() => { try { resolve(run()); @@ -472,6 +526,26 @@ function queueHighlightedDiff(run: () => HighlightedDiffCode) { return queued; } +/** Normalize source text the same way expanded-row slicing does before highlighting. */ +function normalizeSourceText(text: string) { + return text.replaceAll("\r\n", "\n"); +} + +/** Build Pierre file contents for a full-source highlight request. */ +function sourceFileContents(file: DiffFile, text: string, language: string | undefined) { + const contents: FileContents = { + name: file.path, + contents: normalizeSourceText(text), + cacheKey: `${file.id}:${file.path}:${language ?? ""}:${text.length}`, + }; + + if (language) { + contents.lang = language as FileContents["lang"]; + } + + return contents; +} + /** * Pierre highlights unchanged context on both diff sides even though split/stack rendering later * cares only about the styled code spans. Reuse one side's line node for both arrays so identical @@ -517,7 +591,7 @@ export async function loadHighlightedDiff( ): Promise { try { const highlighter = await prepareHighlighter(file.language, appearance); - return queueHighlightedDiff(() => { + return queueHighlightedWork(() => { const highlighted = renderDiffWithHighlighter( file.metadata, highlighter, @@ -530,7 +604,7 @@ export async function loadHighlightedDiff( }); } catch { const highlighter = await prepareHighlighter("text", appearance); - return queueHighlightedDiff(() => { + return queueHighlightedWork(() => { const highlighted = renderDiffWithHighlighter( { ...file.metadata, lang: "text" }, highlighter, @@ -544,6 +618,63 @@ export async function loadHighlightedDiff( } } +/** Highlight a full source file for unchanged lines synthesized during gap expansion. */ +export async function loadHighlightedSourceLines({ + file, + text, + appearance = "dark", +}: { + file: DiffFile; + text: string; + appearance?: AppTheme["appearance"]; +}): Promise { + try { + const highlighter = await prepareHighlighter(file.language, appearance); + return queueHighlightedWork(() => { + const highlighted = renderFileWithHighlighter( + sourceFileContents(file, text, file.language), + highlighter, + pierreRenderOptions(appearance), + ); + return { + lines: highlighted.code as Array, + }; + }); + } catch { + const highlighter = await prepareHighlighter("text", appearance); + return queueHighlightedWork(() => { + const highlighted = renderFileWithHighlighter( + sourceFileContents(file, text, "text"), + highlighter, + pierreRenderOptions(appearance), + ); + return { + lines: highlighted.code as Array, + }; + }); + } +} + +/** Convert one highlighted full-source line into the spans used by expanded context rows. */ +export function spansForHighlightedSourceLine( + rawLine: string | undefined, + highlightedLine: HastNode | undefined, + theme: AppTheme, +): RenderSpan[] { + if (highlightedLine === undefined) { + const fallbackText = cleanDiffLine(rawLine); + return fallbackText.length > 0 ? [{ text: fallbackText }] : []; + } + + const spans = flattenHighlightedLine(highlightedLine, theme, theme.contextContentBg); + if (spans.length > 0) { + return spans; + } + + const fallbackText = cleanDiffLine(rawLine); + return fallbackText.length > 0 ? [{ text: fallbackText }] : []; +} + /** Expand Pierre metadata into the flat split-view row stream consumed by the renderer. */ export function buildSplitRows( file: DiffFile, @@ -562,6 +693,8 @@ export function buildSplitRows( fileId: file.id, hunkIndex, text: collapsedRowText(hunk.collapsedBefore), + position: "before", + ...leadingCollapsedRanges(hunk), }); } @@ -650,13 +783,16 @@ export function buildSplitRows( } const trailingLines = trailingCollapsedLines(file.metadata); - if (trailingLines > 0) { + const lastHunk = file.metadata.hunks.at(-1); + if (trailingLines > 0 && lastHunk) { rows.push({ type: "collapsed", key: `${file.id}:collapsed:trailing`, fileId: file.id, - hunkIndex: Math.max(file.metadata.hunks.length - 1, 0), + hunkIndex: file.metadata.hunks.length - 1, text: collapsedRowText(trailingLines), + position: "trailing", + ...trailingCollapsedRanges(lastHunk, trailingLines), }); } @@ -681,6 +817,8 @@ export function buildStackRows( fileId: file.id, hunkIndex, text: collapsedRowText(hunk.collapsedBefore), + position: "before", + ...leadingCollapsedRanges(hunk), }); } @@ -765,13 +903,16 @@ export function buildStackRows( } const trailingLines = trailingCollapsedLines(file.metadata); - if (trailingLines > 0) { + const lastHunk = file.metadata.hunks.at(-1); + if (trailingLines > 0 && lastHunk) { rows.push({ type: "collapsed", key: `${file.id}:stack:collapsed:trailing`, fileId: file.id, - hunkIndex: Math.max(file.metadata.hunks.length - 1, 0), + hunkIndex: file.metadata.hunks.length - 1, text: collapsedRowText(trailingLines), + position: "trailing", + ...trailingCollapsedRanges(lastHunk, trailingLines), }); } diff --git a/src/ui/diff/plannedReviewRows.ts b/src/ui/diff/plannedReviewRows.ts index 5d8a9856..28d671d9 100644 --- a/src/ui/diff/plannedReviewRows.ts +++ b/src/ui/diff/plannedReviewRows.ts @@ -25,10 +25,25 @@ export interface PlannedHunkBounds extends VerticalBounds { export type PlannedSectionGeometry = SectionGeometry; /** Return whether this planned row should count toward a hunk's own visible extent. */ -function rowContributesToHunkBounds(row: PlannedReviewRow) { - // Collapsed gap rows belong between hunks, so they affect total section height but not a hunk's - // own visible extent. - return !(row.kind === "diff-row" && row.row.type === "collapsed"); +export function plannedReviewRowContributesToHunkBounds(row: PlannedReviewRow) { + if (row.kind !== "diff-row") { + return true; + } + + // Collapsed gap rows sit outside hunk bodies, so they affect total section height but not a + // hunk's own visible extent. Expanded rows share that property: they fill a gap, not a hunk. + if (row.row.type === "collapsed") { + return false; + } + + if ( + (row.row.type === "split-line" || row.row.type === "stack-line") && + row.row.isExpansionRow === true + ) { + return false; + } + + return true; } /** Measure how many terminal rows one planned review row will occupy once rendered. */ @@ -86,7 +101,7 @@ export function measurePlannedSectionGeometry( const rowHeight = plannedReviewRowHeight(row, options); - if (rowHeight > 0 && rowContributesToHunkBounds(row)) { + if (rowHeight > 0 && plannedReviewRowContributesToHunkBounds(row)) { const rowId = reviewRowId(row.key); const existingBounds = hunkBounds.get(row.hunkIndex); diff --git a/src/ui/diff/reviewRenderPlan.ts b/src/ui/diff/reviewRenderPlan.ts index ba2e89ed..56b94d65 100644 --- a/src/ui/diff/reviewRenderPlan.ts +++ b/src/ui/diff/reviewRenderPlan.ts @@ -95,11 +95,7 @@ function contextLineStableKey(hunkIndex: number, oldLineNumber?: number, newLine /** Resolve the stable anchor keys for one rendered diff row across split and stack layouts. */ function diffRowStableKeys(row: DiffRow) { if (row.type === "collapsed") { - return [ - row.key.endsWith(":trailing") - ? `meta:collapsed:trailing:${row.hunkIndex}` - : `meta:collapsed:before:${row.hunkIndex}`, - ]; + return [`meta:collapsed:${row.position}:${row.hunkIndex}`]; } if (row.type === "hunk-header") { @@ -129,10 +125,6 @@ function diffRowStableKeys(row: DiffRow) { ]); } - if (row.type !== "stack-line") { - return [`row:${row.key}`]; - } - const contextKey = contextLineStableKey( row.hunkIndex, row.cell.oldLineNumber, @@ -314,7 +306,14 @@ function rowCanAnchorHunk(row: DiffRow, showHunkHeaders: boolean) { return row.type === "hunk-header"; } - return row.type !== "collapsed" && row.type !== "hunk-header"; + if (row.type === "collapsed" || row.type === "hunk-header") { + return false; + } + + // Synthesized collapsed-gap rows carry the neighbor hunk's index for placement, + // but anchoring on them would scroll navigation to the top of the expanded gap + // instead of the actual changed code. + return row.isExpansionRow !== true; } /** diff --git a/src/ui/lib/diffSectionGeometry.test.ts b/src/ui/lib/diffSectionGeometry.test.ts index 9c085bd8..2758d33c 100644 --- a/src/ui/lib/diffSectionGeometry.test.ts +++ b/src/ui/lib/diffSectionGeometry.test.ts @@ -118,4 +118,143 @@ describe("measureDiffSectionGeometry", () => { expect(metrics.rowBounds).toHaveLength(1); expect(metrics.rowBounds[0]?.key).toContain(":header:"); }); + + test("expanding a collapsed gap grows section height by the synthesized row count", () => { + // 30-line file with one change at line 5; trailing gap covers most of the file. + const before = Array.from({ length: 30 }, (_, i) => `line ${i + 1}\n`).join(""); + const after = before.replace("line 5\n", "line 5 modified\n"); + const file = createTestDiffFile({ + after, + before, + id: "expand", + path: "expand.txt", + }); + + const collapsedGeometry = measureDiffSectionGeometry(file, "split", true, theme); + const expandedGeometry = measureDiffSectionGeometry( + file, + "split", + true, + theme, + [], + 0, + true, + false, + new Set(["trailing:0"]), + { kind: "loaded", text: after }, + ); + + const synthesizedRowCount = + expandedGeometry.rowBounds.length - collapsedGeometry.rowBounds.length; + expect(synthesizedRowCount).toBeGreaterThan(0); + expect(expandedGeometry.bodyHeight).toBe(collapsedGeometry.bodyHeight + synthesizedRowCount); + // The leading hunk's anchor stays put because expansion happens after it. + expect(expandedGeometry.hunkAnchorRows.get(0)).toBe(collapsedGeometry.hunkAnchorRows.get(0)); + // The trailing gap belongs to neither hunk: expanding it must not stretch + // the preceding hunk's measured bounds. + expect(expandedGeometry.hunkBounds.get(0)?.height).toBe( + collapsedGeometry.hunkBounds.get(0)?.height, + ); + }); + + test("expanding a leading between-hunk gap does not shift the following hunk's anchor or bounds", () => { + // File with one change near the end so a long leading gap precedes hunk 0. + const before = Array.from({ length: 40 }, (_, i) => `line ${i + 1}\n`).join(""); + const after = before.replace("line 35\n", "line 35 modified\n"); + const file = createTestDiffFile({ + after, + before, + id: "expand-leading", + path: "leading.txt", + }); + + // Hide hunk headers so any "anchorable" row preceding the hunk's first + // diff line can win the anchor — that's exactly the path the bug fix + // needs to guard against. + const showHunkHeaders = false; + const collapsedGeometry = measureDiffSectionGeometry( + file, + "split", + showHunkHeaders, + theme, + [], + 0, + true, + false, + ); + const expandedGeometry = measureDiffSectionGeometry( + file, + "split", + showHunkHeaders, + theme, + [], + 0, + true, + false, + new Set(["before:0"]), + { kind: "loaded", text: after }, + ); + + const synthesizedRowCount = + expandedGeometry.rowBounds.length - collapsedGeometry.rowBounds.length; + expect(synthesizedRowCount).toBeGreaterThan(0); + // The total body grows by the synthesized row count. + expect(expandedGeometry.bodyHeight).toBe(collapsedGeometry.bodyHeight + synthesizedRowCount); + // But hunk 0's bounds describe only the changed code — they must not + // grow when the gap before it is expanded. + expect(expandedGeometry.hunkBounds.get(0)?.height).toBe( + collapsedGeometry.hunkBounds.get(0)?.height, + ); + // And hunk 0's anchor lands on the first real diff row of the hunk, + // pushed down by exactly the synthesized expansion rows, not on the + // first expanded gap line itself. + const collapsedAnchor = collapsedGeometry.hunkAnchorRows.get(0) ?? 0; + const expandedAnchor = expandedGeometry.hunkAnchorRows.get(0) ?? 0; + expect(expandedAnchor).toBe(collapsedAnchor + synthesizedRowCount); + }); + + test("expanded trailing context uses the expanded line-number width for wrap geometry", () => { + const beforeLines = Array.from({ length: 1000 }, () => "x"); + beforeLines[4] = "old"; + beforeLines[999] = "abcdefghij"; + const afterLines = [...beforeLines]; + afterLines[4] = "new"; + const before = lines(...beforeLines); + const after = lines(...afterLines); + const file = createTestDiffFile({ + after, + before, + id: "large-expanded-gutter", + path: "large-expanded-gutter.txt", + }); + const expandedKeys = new Set(["trailing:0"]); + const sourceStatus = { kind: "loaded", text: after } as const; + + const nowrapGeometry = measureDiffSectionGeometry( + file, + "stack", + true, + theme, + [], + 20, + true, + false, + expandedKeys, + sourceStatus, + ); + const wrappedGeometry = measureDiffSectionGeometry( + file, + "stack", + true, + theme, + [], + 20, + true, + true, + expandedKeys, + sourceStatus, + ); + + expect(wrappedGeometry.bodyHeight).toBe(nowrapGeometry.bodyHeight + 1); + }); }); diff --git a/src/ui/lib/diffSectionGeometry.ts b/src/ui/lib/diffSectionGeometry.ts index f97ad63e..b2508634 100644 --- a/src/ui/lib/diffSectionGeometry.ts +++ b/src/ui/lib/diffSectionGeometry.ts @@ -1,15 +1,21 @@ import type { DiffFile, LayoutMode } from "../../core/types"; import { measureAgentInlineNoteHeight } from "../components/panes/AgentInlineNote"; -import { findMaxLineNumber } from "../diff/codeColumns"; +import { findMaxLineNumber, findMaxLineNumberInRows } from "../diff/codeColumns"; +import { expandCollapsedRows, type FileSourceStatus } from "../diff/expandCollapsedRows"; import { buildSplitRows, buildStackRows } from "../diff/pierre"; import { measureRenderedRowHeight } from "../diff/renderRows"; -import type { PlannedHunkBounds } from "../diff/plannedReviewRows"; +import { + plannedReviewRowContributesToHunkBounds, + type PlannedHunkBounds, +} from "../diff/plannedReviewRows"; import { buildReviewRenderPlan, type PlannedReviewRow } from "../diff/reviewRenderPlan"; import type { SectionGeometry, VerticalBounds } from "./diffSpatial"; import { reviewRowId } from "./ids"; import type { VisibleAgentNote } from "./agentAnnotations"; import type { AppTheme } from "../themes"; +const EMPTY_EXPANDED_GAP_KEYS: ReadonlySet = new Set(); + export interface DiffSectionRowBounds extends VerticalBounds { key: string; stableKey: string; @@ -35,17 +41,48 @@ function buildBasePlannedRows( showHunkHeaders: boolean, theme: AppTheme, visibleAgentNotes: VisibleAgentNote[], + expandedKeys: ReadonlySet, + sourceStatus: FileSourceStatus | undefined, ) { - const rows = + const baseRows = layout === "split" ? buildSplitRows(file, null, theme) : buildStackRows(file, null, theme); + const side = file.metadata.type === "deleted" ? "old" : "new"; + const rows = expandCollapsedRows(baseRows, { + layout, + expandedKeys, + sourceStatus, + side, + }); - return buildReviewRenderPlan({ - fileId: file.id, + return { + plannedRows: buildReviewRenderPlan({ + fileId: file.id, + rows, + selectedHunkIndex: -1, + showHunkHeaders, + visibleAgentNotes, + }), rows, - selectedHunkIndex: -1, - showHunkHeaders, - visibleAgentNotes, - }); + }; +} + +/** Stable suffix that captures expansion state for the geometry cache key. */ +function expansionCacheKey( + expandedKeys: ReadonlySet, + sourceStatus: FileSourceStatus | undefined, +) { + if (expandedKeys.size === 0) { + return ""; + } + + const sortedKeys = [...expandedKeys].sort().join(","); + const statusKey = + sourceStatus === undefined + ? "pending" + : sourceStatus.kind === "loaded" + ? `loaded:${sourceStatus.text.length}` + : sourceStatus.kind; + return `:${sortedKeys}:${statusKey}`; } /** Measure how many terminal rows one planned review row occupies for the current view settings. */ @@ -83,13 +120,6 @@ function plannedRowHeight( ); } -/** Return whether a measured review row should count toward the visible extent of its hunk. */ -function rowContributesToHunkBounds(row: PlannedReviewRow) { - // Collapsed gap rows sit between hunks, so they affect total section height but should not make a - // selected hunk look taller than the rows that actually belong to it. - return !(row.kind === "diff-row" && row.row.type === "collapsed"); -} - /** Measure one file section from the same render plan used by PierreDiffView. */ export function measureDiffSectionGeometry( file: DiffFile, @@ -100,6 +130,8 @@ export function measureDiffSectionGeometry( width = 0, showLineNumbers = true, wrapLines = false, + expandedKeys: ReadonlySet = EMPTY_EXPANDED_GAP_KEYS, + sourceStatus: FileSourceStatus | undefined = undefined, ): DiffSectionGeometry { if (file.metadata.hunks.length === 0) { return { @@ -113,8 +145,9 @@ export function measureDiffSectionGeometry( } // Width, wrapping, and line-number visibility all affect rendered row heights, so they must - // participate in the cache key alongside the structural file/layout inputs. - const cacheKey = `${file.id}:${layout}:${showHunkHeaders ? 1 : 0}:${theme.id}:${width}:${showLineNumbers ? 1 : 0}:${wrapLines ? 1 : 0}`; + // participate in the cache key alongside the structural file/layout inputs. Expansion state + // changes the row stream, so it has to participate too. + const cacheKey = `${file.id}:${layout}:${showHunkHeaders ? 1 : 0}:${theme.id}:${width}:${showLineNumbers ? 1 : 0}:${wrapLines ? 1 : 0}${expansionCacheKey(expandedKeys, sourceStatus)}`; if (visibleAgentNotes.length > 0) { const cachedByNotes = NOTE_AWARE_SECTION_GEOMETRY_CACHE.get(visibleAgentNotes); const cached = cachedByNotes?.get(cacheKey); @@ -123,13 +156,21 @@ export function measureDiffSectionGeometry( } } - const plannedRows = buildBasePlannedRows(file, layout, showHunkHeaders, theme, visibleAgentNotes); + const { plannedRows, rows } = buildBasePlannedRows( + file, + layout, + showHunkHeaders, + theme, + visibleAgentNotes, + expandedKeys, + sourceStatus, + ); const hunkAnchorRows = new Map(); const hunkBounds = new Map(); const rowBounds: DiffSectionRowBounds[] = []; const rowBoundsByKey = new Map(); const rowBoundsByStableKey = new Map(); - const lineNumberDigits = String(findMaxLineNumber(file)).length; + const lineNumberDigits = String(findMaxLineNumberInRows(rows, findMaxLineNumber(file))).length; let bodyHeight = 0; for (const row of plannedRows) { @@ -168,7 +209,7 @@ export function measureDiffSectionGeometry( } } - if (height > 0 && rowContributesToHunkBounds(row)) { + if (height > 0 && plannedReviewRowContributesToHunkBounds(row)) { const rowId = reviewRowId(row.key); const existingBounds = hunkBounds.get(row.hunkIndex); From 1638d0e49fe0f68e72811bce500aac4ddfff2740 Mon Sep 17 00:00:00 2001 From: Adit Chandra Date: Wed, 13 May 2026 02:15:56 -0400 Subject: [PATCH 3/4] feat(review): toggle unchanged context inline Let the review controller own gap state and source-load lifecycles so keyboard, mouse, reload invalidation, and visible loading/error rows stay in one review flow. --- CHANGELOG.md | 2 + src/ui/App.tsx | 4 + src/ui/components/chrome/HelpDialog.tsx | 1 + src/ui/components/panes/DiffPane.tsx | 33 +- src/ui/components/panes/DiffSection.tsx | 12 + src/ui/components/ui-components.test.tsx | 250 ++++++++++- src/ui/diff/PierreDiffView.tsx | 59 ++- src/ui/diff/renderRows.tsx | 34 +- src/ui/diff/useHighlightedSource.ts | 76 ++++ src/ui/hooks/useAppKeyboardShortcuts.ts | 7 + src/ui/hooks/useReviewController.test.tsx | 511 +++++++++++++++++----- src/ui/hooks/useReviewController.ts | 176 ++++++++ test/helpers/diff-helpers.ts | 31 ++ test/pty/harness.ts | 20 + test/pty/ui-integration.test.ts | 38 ++ 15 files changed, 1134 insertions(+), 120 deletions(-) create mode 100644 src/ui/diff/useHighlightedSource.ts diff --git a/CHANGELOG.md b/CHANGELOG.md index 89505edf..41fc6b33 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ All notable user-visible changes to Hunk are documented in this file. ### Added +- Added inline expansion for collapsed unchanged file content. Click an unchanged-context row (`▾ N unchanged lines` when expandable, otherwise the static `··· N unchanged lines ···` form) or press `e` while a hunk is selected to reveal surrounding and trailing file lines without leaving the review. The affordance is shown only for input modes that have reachable source content (`hunk diff`, `show`, `stash show`, file-pair `diff` and `difftool`, untracked files); raw `hunk patch` input still renders as before. Failed and in-flight loads surface a one-line status ("Loading…", "Could not load N unchanged lines") on the gap row. Expanded context rows use the same syntax highlighting as the surrounding diff. + ### Changed ### Fixed diff --git a/src/ui/App.tsx b/src/ui/App.tsx index 87dba698..ef550998 100644 --- a/src/ui/App.tsx +++ b/src/ui/App.tsx @@ -562,6 +562,7 @@ export function App({ switchMenu, toggleAgentNotes, toggleFocusArea, + toggleGapForSelectedHunk: review.toggleSelectedHunkGap, toggleHelp, toggleHunkHeaders, toggleLineNumbers, @@ -702,6 +703,7 @@ export function App({ review.selectHunk(fileId, hunkIndex, { preserveViewport: true }) } diff --git a/src/ui/components/chrome/HelpDialog.tsx b/src/ui/components/chrome/HelpDialog.tsx index 1ed1f595..e415ca53 100644 --- a/src/ui/components/chrome/HelpDialog.tsx +++ b/src/ui/components/chrome/HelpDialog.tsx @@ -44,6 +44,7 @@ export function HelpDialog({ ["1 / 2 / 0", "split / stack / auto"], ["s / t", "sidebar / theme"], ["a", "toggle AI notes"], + ["e", "toggle unchanged context"], ["l / w / m", "lines / wrap / metadata"], ], }, diff --git a/src/ui/components/panes/DiffPane.tsx b/src/ui/components/panes/DiffPane.tsx index dec8fd1c..819126a4 100644 --- a/src/ui/components/panes/DiffPane.tsx +++ b/src/ui/components/panes/DiffPane.tsx @@ -10,6 +10,7 @@ import { type RefObject, } from "react"; import type { DiffFile, LayoutMode } from "../../../core/types"; +import type { FileSourceStatus } from "../../diff/expandCollapsedRows"; import type { VisibleAgentNote } from "../../lib/agentAnnotations"; import { computeHunkRevealScrollTop } from "../../lib/hunkScroll"; import { @@ -125,10 +126,16 @@ function buildHighlightPrefetchFileIds({ return next; } +const EMPTY_EXPANDED_GAP_KEYS: ReadonlySet = new Set(); +const EMPTY_EXPANDED_GAPS_BY_FILE_ID: Record> = {}; +const EMPTY_SOURCE_STATUS_BY_FILE_ID: Record = {}; +const NOOP_TOGGLE_GAP = () => {}; + /** Render the main multi-file review stream. */ export function DiffPane({ codeHorizontalOffset = 0, diffContentWidth, + expandedGapsByFileId = EMPTY_EXPANDED_GAPS_BY_FILE_ID, files, headerLabelWidth, headerStatsWidth, @@ -142,6 +149,7 @@ export function DiffPane({ showAgentNotes, showLineNumbers, showHunkHeaders, + sourceStatusByFileId = EMPTY_SOURCE_STATUS_BY_FILE_ID, wrapLines, wrapToggleScrollTop, layoutToggleScrollTop = null, @@ -153,10 +161,12 @@ export function DiffPane({ onOpenAgentNotesAtHunk, onScrollCodeHorizontally = () => {}, onSelectFile, + onToggleGap = NOOP_TOGGLE_GAP, onViewportCenteredHunkChange, }: { codeHorizontalOffset?: number; diffContentWidth: number; + expandedGapsByFileId?: Record>; files: DiffFile[]; headerLabelWidth: number; headerStatsWidth: number; @@ -170,6 +180,7 @@ export function DiffPane({ showAgentNotes: boolean; showLineNumbers: boolean; showHunkHeaders: boolean; + sourceStatusByFileId?: Record; wrapLines: boolean; wrapToggleScrollTop: number | null; layoutToggleScrollTop?: number | null; @@ -181,6 +192,7 @@ export function DiffPane({ onOpenAgentNotesAtHunk: (fileId: string, hunkIndex: number) => void; onScrollCodeHorizontally?: (delta: number) => void; onSelectFile: (fileId: string) => void; + onToggleGap?: (fileId: string, gapKey: string) => void; onViewportCenteredHunkChange?: (fileId: string, hunkIndex: number) => void; }) { const renderer = useRenderer(); @@ -398,9 +410,21 @@ export function DiffPane({ diffContentWidth, showLineNumbers, wrapLines, + expandedGapsByFileId[file.id] ?? EMPTY_EXPANDED_GAP_KEYS, + sourceStatusByFileId[file.id], ), ), - [diffContentWidth, files, layout, showHunkHeaders, showLineNumbers, theme, wrapLines], + [ + diffContentWidth, + expandedGapsByFileId, + files, + layout, + showHunkHeaders, + showLineNumbers, + sourceStatusByFileId, + theme, + wrapLines, + ], ); const baseEstimatedBodyHeights = useMemo( () => baseSectionGeometry.map((metrics) => metrics.bodyHeight), @@ -466,16 +490,20 @@ export function DiffPane({ diffContentWidth, showLineNumbers, wrapLines, + expandedGapsByFileId[file.id] ?? EMPTY_EXPANDED_GAP_KEYS, + sourceStatusByFileId[file.id], ); }), [ allAgentNotesByFile, baseSectionGeometry, diffContentWidth, + expandedGapsByFileId, files, layout, showHunkHeaders, showLineNumbers, + sourceStatusByFileId, theme, wrapLines, ], @@ -1163,6 +1191,7 @@ export function DiffPane({ 0} showLineNumbers={showLineNumbers} showHunkHeaders={showHunkHeaders} + sourceStatus={sourceStatusByFileId[file.id]} wrapLines={wrapLines} theme={theme} viewWidth={diffContentWidth} @@ -1186,6 +1216,7 @@ export function DiffPane({ onOpenAgentNotesAtHunk(file.id, hunkIndex) } onSelect={() => onSelectFile(file.id)} + onToggleGap={(gapKey) => onToggleGap(file.id, gapKey)} /> ); })} diff --git a/src/ui/components/panes/DiffSection.tsx b/src/ui/components/panes/DiffSection.tsx index 88c021df..617163d2 100644 --- a/src/ui/components/panes/DiffSection.tsx +++ b/src/ui/components/panes/DiffSection.tsx @@ -1,6 +1,7 @@ import { memo } from "react"; import type { DiffFile, LayoutMode } from "../../../core/types"; import { PierreDiffView } from "../../diff/PierreDiffView"; +import type { FileSourceStatus } from "../../diff/expandCollapsedRows"; import type { VisibleBodyBounds } from "../../diff/rowWindowing"; import type { DiffSectionGeometry } from "../../lib/diffSectionGeometry"; import { getAnnotatedHunkIndices, type VisibleAgentNote } from "../../lib/agentAnnotations"; @@ -11,6 +12,7 @@ import { DiffFileHeaderRow } from "./DiffFileHeaderRow"; interface DiffSectionProps { codeHorizontalOffset: number; + expandedGapKeys: ReadonlySet; file: DiffFile; headerLabelWidth: number; headerStatsWidth: number; @@ -21,6 +23,7 @@ interface DiffSectionProps { separatorWidth: number; showLineNumbers: boolean; showHunkHeaders: boolean; + sourceStatus: FileSourceStatus | undefined; wrapLines: boolean; showHeader: boolean; showSeparator: boolean; @@ -30,11 +33,13 @@ interface DiffSectionProps { viewWidth: number; onOpenAgentNotesAtHunk: (hunkIndex: number) => void; onSelect: () => void; + onToggleGap: (gapKey: string) => void; } /** Render one file section in the main review stream. */ function DiffSectionComponent({ codeHorizontalOffset, + expandedGapKeys, file, headerLabelWidth, headerStatsWidth, @@ -45,6 +50,7 @@ function DiffSectionComponent({ separatorWidth, showLineNumbers, showHunkHeaders, + sourceStatus, wrapLines, showHeader, showSeparator, @@ -54,6 +60,7 @@ function DiffSectionComponent({ viewWidth, onOpenAgentNotesAtHunk, onSelect, + onToggleGap, }: DiffSectionProps) { const annotatedHunkIndices = getAnnotatedHunkIndices(file); @@ -92,10 +99,12 @@ function DiffSectionComponent({ ) : null} { // This comparator relies on stable upstream object identity for files and visible-note arrays. return ( previous.codeHorizontalOffset === next.codeHorizontalOffset && + previous.expandedGapKeys === next.expandedGapKeys && previous.file === next.file && previous.headerLabelWidth === next.headerLabelWidth && previous.headerStatsWidth === next.headerStatsWidth && @@ -129,6 +140,7 @@ export const DiffSection = memo(DiffSectionComponent, (previous, next) => { previous.separatorWidth === next.separatorWidth && previous.showLineNumbers === next.showLineNumbers && previous.showHunkHeaders === next.showHunkHeaders && + previous.sourceStatus === next.sourceStatus && previous.wrapLines === next.wrapLines && previous.showHeader === next.showHeader && previous.showSeparator === next.showSeparator && diff --git a/src/ui/components/ui-components.test.tsx b/src/ui/components/ui-components.test.tsx index 668a3f15..9f873602 100644 --- a/src/ui/components/ui-components.test.tsx +++ b/src/ui/components/ui-components.test.tsx @@ -4,7 +4,11 @@ import { testRender } from "@opentui/react/test-utils"; import { act, createRef, useEffect, useState, type ReactNode } from "react"; import type { AppBootstrap, DiffFile } from "../../core/types"; import { createTestVcsAppBootstrap } from "../../../test/helpers/app-bootstrap"; -import { createTestDiffFile as buildTestDiffFile, lines } from "../../../test/helpers/diff-helpers"; +import { + createTestDiffFile as buildTestDiffFile, + createTestSourceFetcher, + lines, +} from "../../../test/helpers/diff-helpers"; import { hexColorDistance } from "../lib/color"; import { resolveTheme } from "../themes"; import { measureDiffSectionGeometry } from "../lib/diffSectionGeometry"; @@ -204,6 +208,21 @@ function createCollapsedTopDiffFile( return createTestDiffFile(id, path, lines(...beforeLines), lines(...afterLines)); } +/** Build a file whose first hunk leaves a collapsed gap that can be expanded. */ +function createExpandableContextDiffFile( + id: string, + path: string, + sourceFetcher?: DiffFile["sourceFetcher"], +) { + const before = Array.from({ length: 30 }, (_, i) => `line ${i + 1}\n`).join(""); + const after = before.replace("line 5\n", "line 5 modified\n"); + + return { + after, + file: buildTestDiffFile({ after, before, context: 3, id, path, sourceFetcher }), + }; +} + function createDiffPaneProps( files: DiffFile[], theme = resolveTheme("midnight", null), @@ -762,6 +781,86 @@ describe("UI components", () => { } }); + test("DiffPane positions later files after expanded context rows", async () => { + const theme = resolveTheme("midnight", null); + const beforeLines = Array.from({ length: 30 }, (_, index) => `first line ${index + 1}`); + const afterLines = [...beforeLines]; + afterLines[4] = "first line 5 changed"; + const after = lines(...afterLines); + const firstFile = buildTestDiffFile({ + after, + before: lines(...beforeLines), + context: 0, + id: "first-expanded", + path: "first-expanded.ts", + }); + const secondFile = createTestDiffFile( + "second-after-expanded", + "second-after-expanded.ts", + "export const second = 1;\n", + "export const second = 2;\n", + ); + const files = [firstFile, secondFile]; + const expandedKeys = new Set(["trailing:0"]); + const sourceStatus = { kind: "loaded", text: after } as const; + const firstGeometry = measureDiffSectionGeometry( + firstFile, + "split", + true, + theme, + [], + 88, + true, + false, + expandedKeys, + sourceStatus, + ); + const secondGeometry = measureDiffSectionGeometry(secondFile, "split", true, theme, [], 88); + const fileSectionLayouts = buildFileSectionLayouts( + files, + [firstGeometry.bodyHeight, secondGeometry.bodyHeight], + buildInStreamFileHeaderHeights(files), + ); + const scrollRef = createRef(); + const props = createDiffPaneProps(files, theme, { + diffContentWidth: 88, + expandedGapsByFileId: { [firstFile.id]: expandedKeys }, + headerLabelWidth: 48, + headerStatsWidth: 16, + scrollRef, + separatorWidth: 84, + sourceStatusByFileId: { [firstFile.id]: sourceStatus }, + width: 92, + }); + const setup = await testRender(, { + width: 96, + height: 10, + }); + + try { + await settleDiffPane(setup); + + let frame = setup.captureCharFrame(); + expect(frame).toContain("Hide 25 unchanged lines"); + expect(frame).toContain("first line 6"); + expect(frame).not.toContain("second-after-expanded.ts"); + + await act(async () => { + scrollRef.current?.scrollTo(fileSectionLayouts[1]!.sectionTop); + }); + await settleDiffPane(setup); + + frame = await waitForFrame(setup, (nextFrame) => + nextFrame.includes("second-after-expanded.ts"), + ); + expect(frame).toContain("second-after-expanded.ts"); + } finally { + await act(async () => { + setup.renderer.destroy(); + }); + } + }); + test("DiffPane advances the review stream under the always-pinned file header above a collapsed gap", async () => { const theme = resolveTheme("midnight", null); const firstFile = createCollapsedTopDiffFile("late", "late.ts", 400, 366); @@ -2026,6 +2125,155 @@ describe("UI components", () => { expect(binaryFileFrame).toContain("Binary file skipped"); }); + test("PierreDiffView shows the expand chevron only when a source fetcher is attached", async () => { + const { file: baseFile } = createExpandableContextDiffFile("expand-affordance", "expand.ts"); + const theme = resolveTheme("midnight", null); + + const noFetcherFrame = await captureFrame( + , + 120, + 40, + ); + expect(noFetcherFrame).toContain("unchanged lines"); + expect(noFetcherFrame).not.toContain("▾"); + + const fileWithFetcher = { + ...baseFile, + sourceFetcher: createTestSourceFetcher(() => null), + }; + + const expandableFrame = await captureFrame( + {}} + />, + 120, + 40, + ); + expect(expandableFrame).toContain("▾"); + }); + + test("PierreDiffView toggles a collapsed gap when clicked", async () => { + const expandable = createExpandableContextDiffFile("expand-click", "expand-click.ts"); + const file = { + ...expandable.file, + sourceFetcher: createTestSourceFetcher(() => expandable.after), + }; + const toggledGaps: string[] = []; + const theme = resolveTheme("midnight", null); + const setup = await testRender( + { + toggledGaps.push(gapKey); + }} + />, + { width: 120, height: 40 }, + ); + + try { + await act(async () => { + await setup.renderOnce(); + }); + + const frame = setup.captureCharFrame(); + const gapLineIndex = frame.split("\n").findIndex((line) => line.includes("▾")); + expect(gapLineIndex).toBeGreaterThanOrEqual(0); + + for (const y of [gapLineIndex, gapLineIndex + 1]) { + for (const x of [2, 8, 24]) { + await act(async () => { + await setup.mockMouse.click(x, y); + }); + if (toggledGaps.length > 0) { + break; + } + } + if (toggledGaps.length > 0) { + break; + } + } + + expect(toggledGaps).toEqual(["before:0"]); + } finally { + await act(async () => { + setup.renderer.destroy(); + }); + } + }); + + test("PierreDiffView highlights expanded unchanged source rows", async () => { + const beforeLines = Array.from({ length: 30 }, (_, index) => + index === 0 + ? "export const expandedMarker = 1;" + : `export const line${index + 1} = ${index + 1};`, + ); + const afterLines = [...beforeLines]; + afterLines[4] = "export const line5 = 999;"; + const after = lines(...afterLines); + const file = buildTestDiffFile({ + after, + before: lines(...beforeLines), + context: 3, + id: "expanded-highlight", + path: "expanded-highlight.ts", + }); + const theme = resolveTheme("midnight", null); + const setup = await testRender( + , + { width: 144, height: 20 }, + ); + + try { + let highlighted = false; + for (let iteration = 0; iteration < 400; iteration += 1) { + await act(async () => { + await setup.renderOnce(); + await Bun.sleep(0); + await setup.renderOnce(); + await Bun.sleep(0); + }); + + if (frameHasHighlightedMarker(setup.captureSpans(), "expandedMarker")) { + highlighted = true; + break; + } + } + + expect(highlighted).toBe(true); + } finally { + await act(async () => { + setup.renderer.destroy(); + }); + } + }); + test("PierreDiffView renders word-diff spans with a visibly different background in split view", async () => { const file = createTestDiffFile( "word-diff", diff --git a/src/ui/diff/PierreDiffView.tsx b/src/ui/diff/PierreDiffView.tsx index 0060525d..58a25624 100644 --- a/src/ui/diff/PierreDiffView.tsx +++ b/src/ui/diff/PierreDiffView.tsx @@ -1,30 +1,36 @@ -import { useMemo } from "react"; +import { useCallback, useMemo } from "react"; import type { DiffFile, LayoutMode } from "../../core/types"; import { AgentInlineNote, AgentInlineNoteGuideCap } from "../components/panes/AgentInlineNote"; import type { VisibleAgentNote } from "../lib/agentAnnotations"; import type { DiffSectionGeometry } from "../lib/diffSectionGeometry"; import { reviewRowId } from "../lib/ids"; import type { AppTheme } from "../themes"; -import { findMaxLineNumber } from "./codeColumns"; -import { buildSplitRows, buildStackRows } from "./pierre"; +import { findMaxLineNumber, findMaxLineNumberInRows } from "./codeColumns"; +import { expandCollapsedRows, type FileSourceStatus } from "./expandCollapsedRows"; +import { buildSplitRows, buildStackRows, spansForHighlightedSourceLine } from "./pierre"; import { plannedReviewRowVisible } from "./plannedReviewRows"; import { buildReviewRenderPlan } from "./reviewRenderPlan"; import { resolveVisiblePlannedRowWindow, type VisibleBodyBounds } from "./rowWindowing"; import { diffMessage, DiffRowView, fitText } from "./renderRows"; import { useHighlightedDiff } from "./useHighlightedDiff"; +import { useHighlightedSource } from "./useHighlightedSource"; const EMPTY_ANNOTATED_HUNK_INDICES = new Set(); const EMPTY_VISIBLE_AGENT_NOTES: VisibleAgentNote[] = []; +const EMPTY_EXPANDED_GAP_KEYS: ReadonlySet = new Set(); /** Render a file diff in split or stack mode, with inline agent notes inserted between diff rows. */ export function PierreDiffView({ annotatedHunkIndices = EMPTY_ANNOTATED_HUNK_INDICES, codeHorizontalOffset = 0, + expandedGapKeys = EMPTY_EXPANDED_GAP_KEYS, file, layout, onOpenAgentNotesAtHunk, + onToggleGap, showLineNumbers = true, showHunkHeaders = true, + sourceStatus, wrapLines = false, theme, visibleAgentNotes = EMPTY_VISIBLE_AGENT_NOTES, @@ -37,11 +43,14 @@ export function PierreDiffView({ }: { annotatedHunkIndices?: Set; codeHorizontalOffset?: number; + expandedGapKeys?: ReadonlySet; file: DiffFile | undefined; layout: Exclude; onOpenAgentNotesAtHunk?: (hunkIndex: number) => void; + onToggleGap?: (gapKey: string) => void; showLineNumbers?: boolean; showHunkHeaders?: boolean; + sourceStatus?: FileSourceStatus | undefined; wrapLines?: boolean; theme: AppTheme; visibleAgentNotes?: VisibleAgentNote[]; @@ -57,6 +66,23 @@ export function PierreDiffView({ appearance: theme.appearance, shouldLoadHighlight, }); + const sourceTextForHighlight = + sourceStatus?.kind === "loaded" && expandedGapKeys.size > 0 ? sourceStatus.text : undefined; + const resolvedHighlightedSource = useHighlightedSource({ + file, + text: sourceTextForHighlight, + appearance: theme.appearance, + shouldLoadHighlight: shouldLoadHighlight && expandedGapKeys.size > 0, + }); + const sourceLineSpans = useCallback( + (line: string | undefined, sourceLineNumber: number) => + spansForHighlightedSourceLine( + line, + resolvedHighlightedSource?.lines[sourceLineNumber], + theme, + ), + [resolvedHighlightedSource, theme], + ); const rows = useMemo( () => @@ -67,19 +93,39 @@ export function PierreDiffView({ : [], [file, layout, resolvedHighlighted, theme], ); + const expansionSide = file?.metadata.type === "deleted" ? "old" : "new"; + const fileHasSourceFetcher = Boolean(file?.sourceFetcher); + const gapToggleHandler = useMemo( + () => (fileHasSourceFetcher ? onToggleGap : undefined), + [fileHasSourceFetcher, onToggleGap], + ); + const expandedRows = useMemo( + () => + expandCollapsedRows(rows, { + layout, + expandedKeys: expandedGapKeys, + sourceLineSpans, + sourceStatus, + side: expansionSide, + }), + [rows, layout, expandedGapKeys, sourceLineSpans, sourceStatus, expansionSide], + ); const plannedRows = useMemo( () => file ? buildReviewRenderPlan({ fileId: file.id, - rows, + rows: expandedRows, showHunkHeaders, visibleAgentNotes, }) : [], - [file, rows, showHunkHeaders, visibleAgentNotes], + [file, expandedRows, showHunkHeaders, visibleAgentNotes], + ); + const lineNumberDigits = useMemo( + () => String(findMaxLineNumberInRows(expandedRows, file ? findMaxLineNumber(file) : 1)).length, + [expandedRows, file], ); - const lineNumberDigits = useMemo(() => String(file ? findMaxLineNumber(file) : 1).length, [file]); const visiblePlannedRowWindow = useMemo(() => { // Fall back to the full row list unless all three row-windowing inputs are ready: // - the complete planned row stream for this file @@ -196,6 +242,7 @@ export function PierreDiffView({ anchorId={plannedRow.anchorId} noteGuideSide={plannedRow.noteGuideSide} onOpenAgentNotesAtHunk={onOpenAgentNotesAtHunk} + onToggleGap={gapToggleHandler} /> ); diff --git a/src/ui/diff/renderRows.tsx b/src/ui/diff/renderRows.tsx index 3576e074..afa631e0 100644 --- a/src/ui/diff/renderRows.tsx +++ b/src/ui/diff/renderRows.tsx @@ -6,6 +6,7 @@ import { resolveSplitPaneWidths, resolveStackCellGeometry, } from "./codeColumns"; +import { gapKey } from "./expandCollapsedRows"; import type { DiffRow, RenderSpan, SplitLineCell, StackLineCell } from "./pierre"; import { diffRailMarker, @@ -446,6 +447,17 @@ export function diffMessage(file: DiffFile) { return "No textual hunks to render for this file."; } +/** Build the rendered label text for one collapsed gap row. */ +function collapsedRowLabel(text: string, expandable: boolean) { + if (!expandable) { + return `··· ${text} ···`; + } + + // The leading chevron hints that the row is interactive on terminals that + // render Unicode glyphs. The label still reads naturally on plain VT100. + return `▾ ${text}`; +} + /** Render collapsed and hunk-header rows, including the optional AI badge target. */ function renderHeaderRow( row: Extract, @@ -455,13 +467,18 @@ function renderHeaderRow( annotated: boolean, anchorId?: string, onOpenAgentNotesAtHunk?: (hunkIndex: number) => void, + onToggleGap?: (gapKey: string) => void, ) { const badgeText = annotated ? "[AI]" : ""; const badgeWidth = annotated ? badgeText.length + 1 : 0; - const label = - row.type === "collapsed" - ? fitText(`··· ${row.text} ···`, Math.max(0, width - 1 - badgeWidth)) - : fitText(row.text, Math.max(0, width - 1 - badgeWidth)); + const collapsedExpandable = row.type === "collapsed" && Boolean(onToggleGap); + const labelText = + row.type === "collapsed" ? collapsedRowLabel(row.text, collapsedExpandable) : row.text; + const label = fitText(labelText, Math.max(0, width - 1 - badgeWidth)); + const handleCollapsedClick = + row.type === "collapsed" && onToggleGap + ? () => onToggleGap(gapKey(row.position, row.hunkIndex)) + : undefined; if (!annotated) { return ( @@ -473,6 +490,7 @@ function renderHeaderRow( height: 1, backgroundColor: theme.panelAlt, }} + onMouseUp={handleCollapsedClick} > void, + onToggleGap?: (gapKey: string) => void, ) { let baseRow: ReactNode; @@ -620,6 +639,7 @@ function renderRow( annotated, anchorId, onOpenAgentNotesAtHunk, + onToggleGap, ); } else if (row.type === "hunk-header") { baseRow = showHunkHeaders @@ -840,6 +860,7 @@ interface DiffRowViewProps { anchorId?: string; noteGuideSide?: "old" | "new"; onOpenAgentNotesAtHunk?: (hunkIndex: number) => void; + onToggleGap?: (gapKey: string) => void; } /** Render one diff row, memoized to avoid unnecessary rerenders. */ @@ -858,6 +879,7 @@ export const DiffRowView = memo( anchorId, noteGuideSide, onOpenAgentNotesAtHunk, + onToggleGap, }: DiffRowViewProps) { return renderRow( row, @@ -873,6 +895,7 @@ export const DiffRowView = memo( anchorId, noteGuideSide, onOpenAgentNotesAtHunk, + onToggleGap, ); }, (previous, next) => { @@ -888,7 +911,8 @@ export const DiffRowView = memo( previous.selected === next.selected && previous.annotated === next.annotated && previous.anchorId === next.anchorId && - previous.noteGuideSide === next.noteGuideSide + previous.noteGuideSide === next.noteGuideSide && + previous.onToggleGap === next.onToggleGap ); }, ); diff --git a/src/ui/diff/useHighlightedSource.ts b/src/ui/diff/useHighlightedSource.ts new file mode 100644 index 00000000..5cbd3325 --- /dev/null +++ b/src/ui/diff/useHighlightedSource.ts @@ -0,0 +1,76 @@ +import { useLayoutEffect, useMemo, useState } from "react"; +import type { DiffFile } from "../../core/types"; +import { loadHighlightedSourceLines, type HighlightedSourceCode } from "./pierre"; + +interface HighlightedSourceState { + cacheKey: string; + highlighted: HighlightedSourceCode; +} + +/** Summarize loaded source text for expansion highlighting invalidation. */ +function sourceTextFingerprint(text: string) { + let hash = 2166136261; + + for (let index = 0; index < text.length; index += 1) { + hash ^= text.charCodeAt(index); + hash = Math.imul(hash, 16777619); + } + + return `${text.length}:${(hash >>> 0).toString(36)}`; +} + +/** Cache key for full-source highlights used by expanded unchanged rows. */ +function buildSourceCacheKey(appearance: string, file: DiffFile, text: string) { + return `${appearance}:${file.id}:${file.path}:${file.language ?? ""}:${sourceTextFingerprint(text)}`; +} + +/** Resolve highlighted full-source content for expanded unchanged rows. */ +export function useHighlightedSource({ + file, + text, + appearance, + shouldLoadHighlight, +}: { + file: DiffFile | undefined; + text: string | undefined; + appearance: "light" | "dark"; + shouldLoadHighlight?: boolean; +}) { + const [state, setState] = useState(null); + const cacheKey = useMemo( + () => (file && text !== undefined ? buildSourceCacheKey(appearance, file, text) : null), + [appearance, file, text], + ); + + useLayoutEffect(() => { + if (!file || text === undefined || !cacheKey) { + setState(null); + return; + } + + if (state?.cacheKey === cacheKey || !shouldLoadHighlight) { + return; + } + + let cancelled = false; + setState(null); + + loadHighlightedSourceLines({ file, text, appearance }) + .then((highlighted) => { + if (!cancelled) { + setState({ cacheKey, highlighted }); + } + }) + .catch(() => { + if (!cancelled) { + setState({ cacheKey, highlighted: { lines: [] } }); + } + }); + + return () => { + cancelled = true; + }; + }, [appearance, cacheKey, file, shouldLoadHighlight, state?.cacheKey, text]); + + return state?.cacheKey === cacheKey ? state.highlighted : null; +} diff --git a/src/ui/hooks/useAppKeyboardShortcuts.ts b/src/ui/hooks/useAppKeyboardShortcuts.ts index f2349d9e..d7f3f24a 100644 --- a/src/ui/hooks/useAppKeyboardShortcuts.ts +++ b/src/ui/hooks/useAppKeyboardShortcuts.ts @@ -41,6 +41,7 @@ export interface UseAppKeyboardShortcutsOptions { switchMenu: (delta: number) => void; toggleAgentNotes: () => void; toggleFocusArea: () => void; + toggleGapForSelectedHunk: () => void; toggleHelp: () => void; toggleHunkHeaders: () => void; toggleLineNumbers: () => void; @@ -72,6 +73,7 @@ export function useAppKeyboardShortcuts({ switchMenu, toggleAgentNotes, toggleFocusArea, + toggleGapForSelectedHunk, toggleHelp, toggleHunkHeaders, toggleLineNumbers, @@ -366,6 +368,11 @@ export function useAppKeyboardShortcuts({ return; } + if (key.name === "e" || key.sequence === "e") { + runAndCloseMenu(toggleGapForSelectedHunk); + return; + } + if (key.name === "[") { runAndCloseMenu(() => moveToHunk(-1)); return; diff --git a/src/ui/hooks/useReviewController.test.tsx b/src/ui/hooks/useReviewController.test.tsx index d0885f59..be7a1596 100644 --- a/src/ui/hooks/useReviewController.test.tsx +++ b/src/ui/hooks/useReviewController.test.tsx @@ -1,50 +1,34 @@ import { describe, expect, test } from "bun:test"; import { testRender } from "@opentui/react/test-utils"; -import { parseDiffFromFile } from "@pierre/diffs"; import { act, useEffect, useState } from "react"; import type { DiffFile } from "../../core/types"; +import { + createTestDeferred, + createTestDiffFile, + createTestSourceFetcher, + lines, +} from "../../../test/helpers/diff-helpers"; import { useReviewController, type ReviewController } from "./useReviewController"; -/** Build a minimal DiffFile with real parsed hunks and optional agent annotations. */ +/** Build a DiffFile with real parsed hunks using the controller's preferred defaults. */ function createDiffFile( id: string, path: string, before: string, after: string, agent: DiffFile["agent"] = null, + sourceFetcher?: DiffFile["sourceFetcher"], ): DiffFile { - const metadata = parseDiffFromFile( - { name: path, contents: before, cacheKey: `${id}:before` }, - { name: path, contents: after, cacheKey: `${id}:after` }, - { context: 3 }, - true, - ); - - let additions = 0; - let deletions = 0; - for (const hunk of metadata.hunks) { - for (const content of hunk.hunkContent) { - if (content.type === "change") { - additions += content.additions; - deletions += content.deletions; - } - } - } - - return { + return createTestDiffFile({ + after, + agent, + before, + context: 3, id, - path, - patch: "", language: "typescript", - stats: { additions, deletions }, - metadata, - agent, - }; -} - -/** Build a stable multi-line string fixture. */ -function lines(...values: string[]) { - return `${values.join("\n")}\n`; + path, + sourceFetcher, + }); } /** Build one file with two hunks so selection clamping can be verified across reload-like updates. */ @@ -72,6 +56,18 @@ function createSingleHunkFile() { return createDiffFile("alpha", "alpha.ts", lines(...beforeLines), lines(...afterLines)); } +/** Build the small one-hunk alpha fixture used by source-loading tests. */ +function createAlphaFile(sourceFetcher?: DiffFile["sourceFetcher"]) { + return createDiffFile( + "alpha", + "alpha.ts", + "export const alpha = 1;\n", + "export const alpha = 2;\n", + null, + sourceFetcher, + ); +} + /** Let deferred filters and follow-up effects settle before reading controller state. */ async function flush(setup: Awaited>) { await act(async () => { @@ -110,31 +106,37 @@ function ReviewControllerHarness({ return null; } +/** Render the controller hook and expose its latest state to tests. */ +async function renderReviewController(initialFiles: DiffFile[]) { + const controllerRef: { current: ReviewController | null } = { current: null }; + const setFilesRef: { current: ((nextFiles: DiffFile[]) => void) | null } = { current: null }; + const setup = await testRender( + { + controllerRef.current = nextController; + }} + onSetFiles={(nextSetFiles) => { + setFilesRef.current = nextSetFiles; + }} + />, + { width: 80, height: 4 }, + ); + + return { controllerRef, setFilesRef, setup }; +} + describe("useReviewController", () => { test("reselects the first visible file when filtering hides the current selection", async () => { - const controllerRef: { current: ReviewController | null } = { current: null }; - const setup = await testRender( - { - controllerRef.current = nextController; - }} - />, - { width: 80, height: 4 }, - ); + const { controllerRef, setup } = await renderReviewController([ + createDiffFile("alpha", "alpha.ts", "export const alpha = 1;\n", "export const alpha = 2;\n"), + createDiffFile( + "beta", + "beta.ts", + "export const beta = 1;\n", + "export const betaValue = 2;\n", + ), + ]); try { await flush(setup); @@ -159,20 +161,9 @@ describe("useReviewController", () => { }); test("clamps the selected hunk index when files update under a soft reload", async () => { - const controllerRef: { current: ReviewController | null } = { current: null }; - const setFilesRef: { current: ((nextFiles: DiffFile[]) => void) | null } = { current: null }; - const setup = await testRender( - { - controllerRef.current = nextController; - }} - onSetFiles={(nextSetFiles) => { - setFilesRef.current = nextSetFiles; - }} - />, - { width: 80, height: 4 }, - ); + const { controllerRef, setFilesRef, setup } = await renderReviewController([ + createTwoHunkFile(), + ]); try { await flush(setup); @@ -199,24 +190,10 @@ describe("useReviewController", () => { }); test("live comment mutations update annotated navigation without remounting the app", async () => { - const controllerRef: { current: ReviewController | null } = { current: null }; - const setup = await testRender( - { - controllerRef.current = nextController; - }} - />, - { width: 80, height: 4 }, - ); + const { controllerRef, setup } = await renderReviewController([ + createDiffFile("alpha", "alpha.ts", "export const alpha = 1;\n", "export const alpha = 2;\n"), + createDiffFile("beta", "beta.ts", "export const beta = 1;\n", "export const beta = 2;\n"), + ]); try { await flush(setup); @@ -271,16 +248,7 @@ describe("useReviewController", () => { }); test("batch live comments validate together and reveal the first applied hunk", async () => { - const controllerRef: { current: ReviewController | null } = { current: null }; - const setup = await testRender( - { - controllerRef.current = nextController; - }} - />, - { width: 80, height: 4 }, - ); + const { controllerRef, setup } = await renderReviewController([createTwoHunkFile()]); try { await flush(setup); @@ -320,16 +288,7 @@ describe("useReviewController", () => { }); test("batch live comments do not mutate state when any target is invalid", async () => { - const controllerRef: { current: ReviewController | null } = { current: null }; - const setup = await testRender( - { - controllerRef.current = nextController; - }} - />, - { width: 80, height: 4 }, - ); + const { controllerRef, setup } = await renderReviewController([createTwoHunkFile()]); try { await flush(setup); @@ -363,4 +322,342 @@ describe("useReviewController", () => { }); } }); + + test("toggleGap flips per-file expansion state and lazily loads source text", async () => { + const fakeFetcher = createTestSourceFetcher((side) => + side === "new" ? "alpha\nbeta\ngamma\n" : null, + ); + + const { controllerRef, setup } = await renderReviewController([createAlphaFile(fakeFetcher)]); + + try { + await flush(setup); + + await act(async () => { + expectValue(controllerRef.current).toggleGap("alpha", "before:0"); + }); + await flush(setup); + + const expanded = expectValue(controllerRef.current).expandedGapsByFileId["alpha"]; + expect(expanded?.has("before:0")).toBe(true); + const status = expectValue(controllerRef.current).sourceStatusByFileId["alpha"]; + expect(status?.kind).toBe("loaded"); + if (status?.kind === "loaded") { + expect(status.text).toBe("alpha\nbeta\ngamma\n"); + } + expect(fakeFetcher.calls.length).toBeGreaterThanOrEqual(1); + + await act(async () => { + expectValue(controllerRef.current).toggleGap("alpha", "before:0"); + }); + await flush(setup); + + const reCollapsed = expectValue(controllerRef.current).expandedGapsByFileId["alpha"]; + expect(reCollapsed?.has("before:0")).toBe(false); + } finally { + await act(async () => { + setup.renderer.destroy(); + }); + } + }); + + test("toggleGap is a no-op for files without a source fetcher", async () => { + const { controllerRef, setup } = await renderReviewController([createAlphaFile()]); + + try { + await flush(setup); + + await act(async () => { + expectValue(controllerRef.current).toggleGap("alpha", "before:0"); + }); + await flush(setup); + + expect(expectValue(controllerRef.current).expandedGapsByFileId["alpha"]).toBeUndefined(); + expect(expectValue(controllerRef.current).sourceStatusByFileId["alpha"]).toBeUndefined(); + } finally { + await act(async () => { + setup.renderer.destroy(); + }); + } + }); + + test("toggleSelectedHunkGap expands the nearest gap for the current selection", async () => { + const beforeLines = Array.from({ length: 30 }, (_, index) => `line ${index + 1}`); + const afterLines = [...beforeLines]; + afterLines[4] = "line 5 changed"; + const after = lines(...afterLines); + const sourceFetcher = createTestSourceFetcher((side) => (side === "new" ? after : null)); + const file = createTestDiffFile({ + after, + before: lines(...beforeLines), + context: 3, + id: "alpha", + path: "alpha.ts", + sourceFetcher, + }); + + const { controllerRef, setup } = await renderReviewController([file]); + + try { + await flush(setup); + + await act(async () => { + expectValue(controllerRef.current).toggleSelectedHunkGap(); + }); + await flush(setup); + + const expanded = expectValue(controllerRef.current).expandedGapsByFileId["alpha"]; + expect(expanded?.has("before:0")).toBe(true); + expect(sourceFetcher.calls).toEqual(["new"]); + } finally { + await act(async () => { + setup.renderer.destroy(); + }); + } + }); + + test("toggleGap surfaces an error status when the fetcher resolves null", async () => { + const failingFetcher = createTestSourceFetcher(() => null); + + const { controllerRef, setup } = await renderReviewController([ + createAlphaFile(failingFetcher), + ]); + + try { + await flush(setup); + + await act(async () => { + expectValue(controllerRef.current).toggleGap("alpha", "before:0"); + }); + await flush(setup); + + const status = expectValue(controllerRef.current).sourceStatusByFileId["alpha"]; + expect(status?.kind).toBe("error"); + } finally { + await act(async () => { + setup.renderer.destroy(); + }); + } + }); + + test("toggleGap surfaces an error status and logs context when the fetcher rejects", async () => { + const originalConsoleError = console.error; + const loggedErrors: unknown[][] = []; + console.error = (...args: unknown[]) => { + loggedErrors.push(args); + }; + + const failingFetcher = createTestSourceFetcher(() => { + throw new Error("source unavailable"); + }); + + const { controllerRef, setup } = await renderReviewController([ + createAlphaFile(failingFetcher), + ]); + + try { + await flush(setup); + + await act(async () => { + expectValue(controllerRef.current).toggleGap("alpha", "before:0"); + }); + await flush(setup); + + const status = expectValue(controllerRef.current).sourceStatusByFileId["alpha"]; + expect(status?.kind).toBe("error"); + expect(String(loggedErrors[0]?.[0])).toContain("alpha.ts"); + expect(String(loggedErrors[0]?.[0])).toContain("alpha"); + } finally { + console.error = originalConsoleError; + await act(async () => { + setup.renderer.destroy(); + }); + } + }); + + test("toggleGap caches loaded text and does not re-fetch on the second open", async () => { + let readCount = 0; + const trackedFetcher = createTestSourceFetcher((side) => { + readCount += 1; + return side === "new" ? `read-${readCount}\n` : null; + }); + + const { controllerRef, setup } = await renderReviewController([ + createAlphaFile(trackedFetcher), + ]); + + try { + await flush(setup); + + await act(async () => { + expectValue(controllerRef.current).toggleGap("alpha", "before:0"); + }); + await flush(setup); + const callsAfterFirst = trackedFetcher.calls.length; + + await act(async () => { + expectValue(controllerRef.current).toggleGap("alpha", "before:0"); + }); + await flush(setup); + + await act(async () => { + expectValue(controllerRef.current).toggleGap("alpha", "before:0"); + }); + await flush(setup); + + const status = expectValue(controllerRef.current).sourceStatusByFileId["alpha"]; + expect(status?.kind).toBe("loaded"); + if (status?.kind === "loaded") { + // Text reflects the first read, not a later one. + expect(status.text).toBe("read-1\n"); + } + expect(trackedFetcher.calls.length).toBe(callsAfterFirst); + } finally { + await act(async () => { + setup.renderer.destroy(); + }); + } + }); + + test("toggleGap requests old-side source for deleted files", async () => { + const trackedFetcher = createTestSourceFetcher((side) => (side === "old" ? "removed\n" : null)); + + const { controllerRef, setup } = await renderReviewController([ + createDiffFile("removed", "removed.ts", "removed\n", "", null, trackedFetcher), + ]); + + try { + await flush(setup); + + await act(async () => { + expectValue(controllerRef.current).toggleGap("removed", "trailing:0"); + }); + await flush(setup); + + expect(trackedFetcher.calls).toEqual(["old"]); + const status = expectValue(controllerRef.current).sourceStatusByFileId["removed"]; + expect(status?.kind).toBe("loaded"); + if (status?.kind === "loaded") { + expect(status.text).toBe("removed\n"); + } + } finally { + await act(async () => { + setup.renderer.destroy(); + }); + } + }); + + test("a soft reload that replaces a file's sourceFetcher invalidates cached source and expansion", async () => { + const firstFetcher = createTestSourceFetcher((side) => (side === "new" ? "first\n" : null)); + const secondFetcher = createTestSourceFetcher((side) => (side === "new" ? "second\n" : null)); + const baseFile = createAlphaFile(); + + const { controllerRef, setFilesRef, setup } = await renderReviewController([ + { ...baseFile, sourceFetcher: firstFetcher }, + ]); + + try { + await flush(setup); + + await act(async () => { + expectValue(controllerRef.current).toggleGap("alpha", "before:0"); + }); + await flush(setup); + + // First fetch resolved against the original fetcher. + const initialStatus = expectValue(controllerRef.current).sourceStatusByFileId["alpha"]; + expect(initialStatus?.kind).toBe("loaded"); + if (initialStatus?.kind === "loaded") { + expect(initialStatus.text).toBe("first\n"); + } + expect( + expectValue(controllerRef.current).expandedGapsByFileId["alpha"]?.has("before:0"), + ).toBe(true); + + // Simulate a soft reload: same file id, different sourceFetcher (and patch). + await act(async () => { + expectValue(setFilesRef.current)([{ ...baseFile, sourceFetcher: secondFetcher }]); + }); + await flush(setup); + + // The stale loaded text and stale expansion must be cleared so the + // renderer doesn't combine old source with the new patch. + expect(expectValue(controllerRef.current).sourceStatusByFileId["alpha"]).toBeUndefined(); + expect(expectValue(controllerRef.current).expandedGapsByFileId["alpha"]).toBeUndefined(); + + // Toggling again now fetches via the new fetcher and reports its text. + await act(async () => { + expectValue(controllerRef.current).toggleGap("alpha", "before:0"); + }); + await flush(setup); + + const refreshedStatus = expectValue(controllerRef.current).sourceStatusByFileId["alpha"]; + expect(refreshedStatus?.kind).toBe("loaded"); + if (refreshedStatus?.kind === "loaded") { + expect(refreshedStatus.text).toBe("second\n"); + } + expect(secondFetcher.calls.length).toBeGreaterThanOrEqual(1); + } finally { + await act(async () => { + setup.renderer.destroy(); + }); + } + }); + + test("a pending source load cannot repopulate state after a soft reload", async () => { + const firstLoad = createTestDeferred(); + const firstFetcher = createTestSourceFetcher(() => firstLoad.promise); + const secondFetcher = createTestSourceFetcher((side) => (side === "new" ? "second\n" : null)); + const baseFile = createAlphaFile(); + + const { controllerRef, setFilesRef, setup } = await renderReviewController([ + { ...baseFile, sourceFetcher: firstFetcher }, + ]); + + try { + await flush(setup); + + await act(async () => { + expectValue(controllerRef.current).toggleGap("alpha", "before:0"); + }); + await flush(setup); + + expect(expectValue(controllerRef.current).sourceStatusByFileId["alpha"]?.kind).toBe( + "loading", + ); + + await act(async () => { + expectValue(setFilesRef.current)([{ ...baseFile, sourceFetcher: secondFetcher }]); + }); + await flush(setup); + + expect(expectValue(controllerRef.current).sourceStatusByFileId["alpha"]).toBeUndefined(); + expect(expectValue(controllerRef.current).expandedGapsByFileId["alpha"]).toBeUndefined(); + + await act(async () => { + firstLoad.resolve("first\n"); + await firstLoad.promise; + }); + await flush(setup); + + expect(expectValue(controllerRef.current).sourceStatusByFileId["alpha"]).toBeUndefined(); + + await act(async () => { + expectValue(controllerRef.current).toggleGap("alpha", "before:0"); + }); + await flush(setup); + + const refreshedStatus = expectValue(controllerRef.current).sourceStatusByFileId["alpha"]; + expect(refreshedStatus?.kind).toBe("loaded"); + if (refreshedStatus?.kind === "loaded") { + expect(refreshedStatus.text).toBe("second\n"); + } + expect(firstFetcher.calls).toEqual(["new"]); + expect(secondFetcher.calls).toEqual(["new"]); + } finally { + await act(async () => { + setup.renderer.destroy(); + }); + } + }); }); diff --git a/src/ui/hooks/useReviewController.ts b/src/ui/hooks/useReviewController.ts index cd2508dc..892570e0 100644 --- a/src/ui/hooks/useReviewController.ts +++ b/src/ui/hooks/useReviewController.ts @@ -12,6 +12,7 @@ import { useDeferredValue, useEffect, useMemo, + useRef, useState, } from "react"; import { @@ -32,6 +33,9 @@ import type { RemovedCommentResult, SessionLiveCommentSummary, } from "../../hunk-session/types"; +import type { FileSourceStatus } from "../diff/expandCollapsedRows"; +import { selectGapForKeyboardToggle } from "../diff/expandCollapsedRows"; +import { trailingCollapsedLines } from "../diff/pierre"; import { findNextHunkCursor } from "../lib/hunks"; import { buildReviewState, @@ -46,6 +50,26 @@ function clamp(value: number, min: number, max: number) { return Math.min(Math.max(value, min), max); } +/** Return a new record with the given keys omitted, or the original when nothing changed. */ +function removeKeys(record: Record, keys: ReadonlySet): Record { + let changed = false; + const next: Record = {}; + for (const [key, value] of Object.entries(record)) { + if (keys.has(key)) { + changed = true; + } else { + next[key] = value; + } + } + return changed ? next : record; +} + +interface SourceLoadRequest { + fetcher: NonNullable; + requestId: number; + side: "old" | "new"; +} + export interface ReviewSelectionOptions { alignFileHeaderTop?: boolean; preserveViewport?: boolean; @@ -54,6 +78,7 @@ export interface ReviewSelectionOptions { export interface ReviewController { allFiles: DiffFile[]; + expandedGapsByFileId: Record>; filter: string; liveCommentCount: number; liveCommentSummaries: SessionLiveCommentSummary[]; @@ -69,6 +94,9 @@ export interface ReviewController { selectedHunk: DiffFile["metadata"]["hunks"][number] | undefined; selectedHunkIndex: number; sidebarEntries: ReviewState["sidebarEntries"]; + sourceStatusByFileId: Record; + toggleGap: (fileId: string, gapKey: string) => void; + toggleSelectedHunkGap: () => void; visibleFiles: DiffFile[]; addLiveComment: ( input: CommentToolInput, @@ -100,6 +128,49 @@ export function useReviewController({ files }: { files: DiffFile[] }): ReviewCon const [liveCommentsByFileId, setLiveCommentsByFileId] = useState>( {}, ); + const [expandedGapsByFileId, setExpandedGapsByFileId] = useState< + Record> + >({}); + const [sourceStatusByFileId, setSourceStatusByFileId] = useState< + Record + >({}); + // Mirror sourceStatusByFileId so toggleGap can dedup synchronously without + // waiting for React's state updater to commit. + const sourceStatusRef = useRef(sourceStatusByFileId); + sourceStatusRef.current = sourceStatusByFileId; + const sourceLoadRequestsRef = useRef(new Map()); + const nextSourceLoadRequestIdRef = useRef(1); + + // Track the files array we last reconciled against so we can invalidate + // expansion state when a soft reload replaces a file's sourceFetcher. + // Without this, the same file id could outlive a reload while its + // cached `loaded` source text refers to the previous patch, and toggleGap + // would short-circuit on stale state instead of re-fetching. + const [filesSnapshot, setFilesSnapshot] = useState(files); + if (filesSnapshot !== files) { + setFilesSnapshot(files); + const currentFetcherByFileId = new Map(); + for (const file of files) { + currentFetcherByFileId.set(file.id, file.sourceFetcher); + } + const staleFileIds = new Set(); + for (const previousFile of filesSnapshot) { + const currentFetcher = currentFetcherByFileId.get(previousFile.id); + // Either the file was removed, or its fetcher (and thus its patch) + // was replaced. Both invalidate any state keyed by file id. + if (currentFetcher !== previousFile.sourceFetcher) { + staleFileIds.add(previousFile.id); + } + } + if (staleFileIds.size > 0) { + for (const fileId of staleFileIds) { + sourceLoadRequestsRef.current.delete(fileId); + } + setSourceStatusByFileId((prev) => removeKeys(prev, staleFileIds)); + setExpandedGapsByFileId((prev) => removeKeys(prev, staleFileIds)); + } + } + const deferredFilter = useDeferredValue(filter); const { @@ -252,6 +323,107 @@ export function useReviewController({ files }: { files: DiffFile[] }): ReviewCon setFilter(""); }, []); + /** Toggle expansion of one collapsed gap and lazily load source when needed. */ + const toggleGap = useCallback( + (fileId: string, gapKey: string) => { + const file = allFiles.find((entry) => entry.id === fileId); + if (!file?.sourceFetcher) { + return; + } + + setExpandedGapsByFileId((prev) => { + const current = prev[fileId]; + const next = new Set(current ?? []); + if (next.has(gapKey)) { + next.delete(gapKey); + } else { + next.add(gapKey); + } + return { ...prev, [fileId]: next }; + }); + + // The fetcher caches its own resolved text; we mirror it into React state + // as a tagged status so the UI can distinguish loading, loaded, and error + // states. Skip the fetch when one is already in flight or has resolved + // to avoid redundant work and stale "loading" flicker. + const currentStatus = sourceStatusRef.current[fileId]?.kind; + if (currentStatus === "loaded" || currentStatus === "loading") { + return; + } + + const side = file.metadata.type === "deleted" ? "old" : "new"; + const request = { + fetcher: file.sourceFetcher, + requestId: nextSourceLoadRequestIdRef.current, + side, + } satisfies SourceLoadRequest; + nextSourceLoadRequestIdRef.current += 1; + sourceLoadRequestsRef.current.set(fileId, request); + + const loadingStatus = { kind: "loading" } satisfies FileSourceStatus; + sourceStatusRef.current = { ...sourceStatusRef.current, [fileId]: loadingStatus }; + setSourceStatusByFileId((prev) => ({ ...prev, [fileId]: loadingStatus })); + + const isCurrentRequest = () => { + const current = sourceLoadRequestsRef.current.get(fileId); + return ( + current?.requestId === request.requestId && + current.fetcher === request.fetcher && + current.side === request.side + ); + }; + + const setSettledStatus = (nextStatus: FileSourceStatus) => { + setSourceStatusByFileId((prev) => { + if (!isCurrentRequest()) { + return prev; + } + sourceLoadRequestsRef.current.delete(fileId); + sourceStatusRef.current = { ...sourceStatusRef.current, [fileId]: nextStatus }; + return { + ...prev, + [fileId]: nextStatus, + }; + }); + }; + + void file.sourceFetcher + .getFullText(side) + .then((text) => { + setSettledStatus(text === null ? { kind: "error" } : { kind: "loaded", text }); + }) + .catch((error: unknown) => { + if (!isCurrentRequest()) { + return; + } + + console.error( + `hunk: failed to load ${side} source for ${file.path} (${file.id}).`, + error, + ); + setSettledStatus({ kind: "error" }); + }); + }, + [allFiles], + ); + + /** Toggle the collapsed gap nearest to the current hunk selection. */ + const toggleSelectedHunkGap = useCallback(() => { + const file = selectedFile; + if (!file?.sourceFetcher) { + return; + } + + const target = selectGapForKeyboardToggle( + file.metadata.hunks, + selectedHunkIndex, + trailingCollapsedLines(file.metadata) > 0, + ); + if (target) { + toggleGap(file.id, target); + } + }, [selectedFile, selectedHunkIndex, toggleGap]); + /** Resolve one session-daemon navigation request against the current review state and select it. */ const navigateToLocation = useCallback( (input: NavigateToHunkToolInput): NavigatedSelectionResult => { @@ -485,6 +657,7 @@ export function useReviewController({ files }: { files: DiffFile[] }): ReviewCon return { allFiles, + expandedGapsByFileId, filter, liveCommentCount, liveCommentSummaries, @@ -497,6 +670,9 @@ export function useReviewController({ files }: { files: DiffFile[] }): ReviewCon selectedHunk, selectedHunkIndex, sidebarEntries, + sourceStatusByFileId, + toggleGap, + toggleSelectedHunkGap, visibleFiles, addLiveComment, addLiveCommentBatch, diff --git a/test/helpers/diff-helpers.ts b/test/helpers/diff-helpers.ts index fdbdd742..d7f0dc30 100644 --- a/test/helpers/diff-helpers.ts +++ b/test/helpers/diff-helpers.ts @@ -1,4 +1,5 @@ import { parseDiffFromFile } from "@pierre/diffs"; +import type { FileSourceFetcher, FileSourceSide } from "../../src/core/fileSource"; import type { AgentAnnotation, AgentFileContext, DiffFile } from "../../src/core/types"; function collectChangeStats(metadata: DiffFile["metadata"]) { @@ -53,6 +54,7 @@ export function createTestDiffFile({ previousPath, context = 0, agent = null, + sourceFetcher, }: { after?: string; before?: string; @@ -62,6 +64,7 @@ export function createTestDiffFile({ previousPath?: string; context?: number; agent?: DiffFile["agent"] | boolean; + sourceFetcher?: FileSourceFetcher; } = {}): DiffFile { const metadata = parseDiffFromFile( { cacheKey: `${id}:before`, contents: before, name: path }, @@ -78,10 +81,38 @@ export function createTestDiffFile({ patch: "", path, previousPath, + sourceFetcher, stats: collectChangeStats(metadata), }; } +/** Build a promise handle that lets async tests settle work manually. */ +export function createTestDeferred() { + let resolve!: (value: T | PromiseLike) => void; + let reject!: (reason?: unknown) => void; + const promise = new Promise((nextResolve, nextReject) => { + resolve = nextResolve; + reject = nextReject; + }); + + return { promise, reject, resolve }; +} + +/** Build a source fetcher that records every requested side. */ +export function createTestSourceFetcher( + read: (side: FileSourceSide) => string | null | Promise, +): FileSourceFetcher & { calls: FileSourceSide[] } { + const calls: FileSourceSide[] = []; + + return { + calls, + async getFullText(side) { + calls.push(side); + return read(side); + }, + }; +} + export function createTestHeaderOnlyDiffFile(): DiffFile { const file = createTestDiffFile({ before: "const alpha = 1;\n", diff --git a/test/pty/harness.ts b/test/pty/harness.ts index 5e841642..364391e9 100644 --- a/test/pty/harness.ts +++ b/test/pty/harness.ts @@ -179,6 +179,25 @@ export function createPtyHarness() { return { dir, before, after }; } + function createExpandableContextFilePair() { + const dir = makeTempDir("hunk-tuistory-expand-"); + const before = join(dir, "before.ts"); + const after = join(dir, "after.ts"); + + const beforeLines = Array.from({ length: 30 }, (_, index) => + index === 0 + ? "export const hiddenLine01 = 1;" + : `export const line${String(index + 1).padStart(2, "0")} = ${index + 1};`, + ); + const afterLines = [...beforeLines]; + afterLines[4] = "export const line05 = 500;"; + + writeText(before, `${beforeLines.join("\n")}\n`); + writeText(after, `${afterLines.join("\n")}\n`); + + return { dir, before, after }; + } + function createScrollableFilePair() { const dir = makeTempDir("hunk-tuistory-scroll-"); const before = join(dir, "before.ts"); @@ -517,6 +536,7 @@ export function createPtyHarness() { createAgentFilePair, createBottomClampedRepoFixture, createCollapsedTopRepoFixture, + createExpandableContextFilePair, createCrossFileHunkNavigationRepoFixture, createLongWrapFilePair, createMultiHunkFilePair, diff --git a/test/pty/ui-integration.test.ts b/test/pty/ui-integration.test.ts index 1696b963..41bed266 100644 --- a/test/pty/ui-integration.test.ts +++ b/test/pty/ui-integration.test.ts @@ -122,6 +122,44 @@ describe("live UI integration", () => { } }); + test("real PTY sessions can expand and collapse unchanged context", async () => { + const fixture = harness.createExpandableContextFilePair(); + const session = await harness.launchHunk({ + args: ["diff", fixture.before, fixture.after, "--mode", "split"], + cols: 140, + rows: 16, + }); + + try { + const initial = await session.waitForText(/View\s+Navigate\s+Theme\s+Agent\s+Help/, { + timeout: 15_000, + }); + + expect(initial).toContain("▾ 1 unchanged line"); + expect(initial).not.toContain("hiddenLine01"); + + await session.press("e"); + const expanded = await harness.waitForSnapshot( + session, + (text) => text.includes("Hide 1 unchanged line") && text.includes("hiddenLine01"), + 5_000, + ); + + expect(expanded).toContain("hiddenLine01"); + + await session.press("e"); + const collapsed = await harness.waitForSnapshot( + session, + (text) => text.includes("▾ 1 unchanged line") && !text.includes("hiddenLine01"), + 5_000, + ); + + expect(collapsed).not.toContain("hiddenLine01"); + } finally { + session.close(); + } + }); + test("backward cross-file hunk navigation reveals the target hunk in a real PTY", async () => { const fixture = harness.createCrossFileHunkNavigationRepoFixture(); const session = await harness.launchHunk({ From 952f1d877e032eb1660745d815c0a9dcf55365cb Mon Sep 17 00:00:00 2001 From: Adit Chandra Date: Thu, 14 May 2026 01:07:51 -0400 Subject: [PATCH 4/4] fix: harden source expansion loading --- src/core/fileSource.test.ts | 42 ++++++++++ src/core/fileSource.ts | 51 ++++++++---- src/core/loaders.test.ts | 56 +++++++++++++ src/core/loaders.ts | 73 +++++++++++------ src/ui/diff/expandCollapsedRows.test.ts | 37 +++++++++ src/ui/diff/expandCollapsedRows.ts | 16 ++++ src/ui/hooks/useReviewController.test.tsx | 98 +++++++++++++++++++++-- src/ui/hooks/useReviewController.ts | 25 +++--- src/ui/lib/diffSectionGeometry.test.ts | 61 ++++++++++++++ src/ui/lib/diffSectionGeometry.ts | 14 +++- 10 files changed, 417 insertions(+), 56 deletions(-) diff --git a/src/core/fileSource.test.ts b/src/core/fileSource.test.ts index 48523415..ecdf3323 100644 --- a/src/core/fileSource.test.ts +++ b/src/core/fileSource.test.ts @@ -140,6 +140,48 @@ describe("createFileSourceFetcher", () => { expect(await fetcher.getFullText("new")).toBe("working tree\n"); }); + test("passes custom git executable through async git source reads", async () => { + const originalSpawn = Bun.spawn; + const mutableBun = Bun as unknown as { spawn: typeof Bun.spawn }; + const spawnCalls: string[][] = []; + + mutableBun.spawn = ((cmds: string[]) => { + spawnCalls.push(cmds); + return originalSpawn( + [ + process.execPath, + "--eval", + `process.stdout.write(${JSON.stringify(`read:${cmds[2]}\n`)})`, + ], + { + stdin: "ignore", + stdout: "pipe", + stderr: "pipe", + }, + ); + }) as typeof Bun.spawn; + + try { + const fetcher = createFileSourceFetcher( + { + old: { kind: "git-blob", repoRoot: process.cwd(), ref: "HEAD", path: "note.txt" }, + new: { kind: "git-index", repoRoot: process.cwd(), path: "note.txt" }, + }, + { gitExecutable: "custom-git" }, + ); + + expect(await fetcher.getFullText("old")).toBe("read:HEAD:note.txt\n"); + expect(await fetcher.getFullText("new")).toBe("read::note.txt\n"); + } finally { + mutableBun.spawn = originalSpawn; + } + + expect(spawnCalls).toEqual([ + ["custom-git", "show", "HEAD:note.txt"], + ["custom-git", "show", ":note.txt"], + ]); + }); + test("returns null when a git blob cannot be resolved", async () => { const repoRoot = createTempRepo("hunk-source-git-missing-"); writeFileSync(join(repoRoot, "tracked.txt"), "x\n"); diff --git a/src/core/fileSource.ts b/src/core/fileSource.ts index 8d5addf7..8286f766 100644 --- a/src/core/fileSource.ts +++ b/src/core/fileSource.ts @@ -24,6 +24,10 @@ export interface FileSourceFetcher { getFullText(side: FileSourceSide): Promise; } +export interface FileSourceFetcherOptions { + gitExecutable?: string; +} + interface ResolvedSpecs { old: FileSourceSpec; new: FileSourceSpec; @@ -77,27 +81,27 @@ async function readFsSpec(spec: Extract): Promis function readGitBlobSpec( spec: Extract, gitExecutable = "git", -): string | null { +): Promise { return readGitObjectSpec(spec.repoRoot, `${spec.ref}:${spec.path}`, gitExecutable); } function readGitIndexSpec( spec: Extract, gitExecutable = "git", -): string | null { +): Promise { return readGitObjectSpec(spec.repoRoot, `:${spec.path}`, gitExecutable); } /** Read a blob-like Git object spec such as `HEAD:path` or `:path`. */ -function readGitObjectSpec( +async function readGitObjectSpec( repoRoot: string, objectName: string, gitExecutable = "git", -): string | null { - let proc: ReturnType; +): Promise { + let proc: Bun.ReadableSubprocess; try { - proc = Bun.spawnSync([gitExecutable, "show", objectName], { + proc = Bun.spawn([gitExecutable, "show", objectName], { cwd: repoRoot, stdin: "ignore", stdout: "pipe", @@ -108,18 +112,34 @@ function readGitObjectSpec( return null; } - if (proc.exitCode !== 0) { - const stderr = Buffer.from(proc.stderr ?? []).toString("utf8"); + let output: [number, string, string]; + try { + output = await Promise.all([ + proc.exited, + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + ]); + } catch (error) { + logSourceDiagnostic(`failed to collect Git source ${objectName}`, error); + return null; + } + + const [exitCode, stdout, stderr] = output; + + if (exitCode !== 0) { if (!isExpectedMissingGitSource(stderr)) { logSourceDiagnostic(`failed to read Git source ${objectName} in ${repoRoot}`, stderr); } return null; } - return Buffer.from(proc.stdout ?? []).toString("utf8"); + return stdout; } -async function readSpec(spec: FileSourceSpec): Promise { +async function readSpec( + spec: FileSourceSpec, + { gitExecutable = "git" }: FileSourceFetcherOptions = {}, +): Promise { if (spec.kind === "none") { return null; } @@ -129,14 +149,17 @@ async function readSpec(spec: FileSourceSpec): Promise { } if (spec.kind === "git-index") { - return readGitIndexSpec(spec); + return readGitIndexSpec(spec, gitExecutable); } - return readGitBlobSpec(spec); + return readGitBlobSpec(spec, gitExecutable); } /** Build a per-file source fetcher that caches each side's resolved text. */ -export function createFileSourceFetcher(specs: ResolvedSpecs): FileSourceFetcher { +export function createFileSourceFetcher( + specs: ResolvedSpecs, + { gitExecutable = "git" }: Readonly = {}, +): FileSourceFetcher { const cache = new Map(); return { @@ -145,7 +168,7 @@ export function createFileSourceFetcher(specs: ResolvedSpecs): FileSourceFetcher return cache.get(side) ?? null; } - const text = await readSpec(specs[side]); + const text = await readSpec(specs[side], { gitExecutable }); cache.set(side, text); return text; }, diff --git a/src/core/loaders.test.ts b/src/core/loaders.test.ts index 4af540a5..cad4eaa8 100644 --- a/src/core/loaders.test.ts +++ b/src/core/loaders.test.ts @@ -1396,6 +1396,62 @@ describe("loadAppBootstrap source fetcher attachment", () => { expect(await file?.sourceFetcher?.getFullText("old")).toBe("first\n"); }); + test("git source fetchers use the custom git executable from bootstrap loading", async () => { + const dir = createTempRepo("hunk-source-custom-git-"); + writeFileSync(join(dir, "value.txt"), "first\n"); + git(dir, "add", "value.txt"); + git(dir, "commit", "-m", "initial"); + writeFileSync(join(dir, "value.txt"), "second\n"); + + const gitExecutable = "hunk-custom-git"; + const syncCalls: string[][] = []; + const asyncCalls: string[][] = []; + const originalSpawnSync = Bun.spawnSync; + const originalSpawn = Bun.spawn; + const mutableBun = Bun as unknown as { + spawnSync: typeof Bun.spawnSync; + spawn: typeof Bun.spawn; + }; + + mutableBun.spawnSync = ((cmds: string[], options?: Parameters[1]) => { + if (cmds[0] === gitExecutable) { + syncCalls.push(cmds); + return originalSpawnSync(["git", ...cmds.slice(1)], options); + } + + return originalSpawnSync(cmds, options); + }) as typeof Bun.spawnSync; + mutableBun.spawn = ((cmds: string[], options?: Parameters[1]) => { + if (cmds[0] === gitExecutable) { + asyncCalls.push(cmds); + return originalSpawn(["git", ...cmds.slice(1)], options); + } + + return originalSpawn(cmds, options); + }) as typeof Bun.spawn; + + try { + const bootstrap = await loadAppBootstrap( + { + kind: "vcs", + staged: false, + options: { mode: "auto" }, + }, + { cwd: dir, gitExecutable }, + ); + + const file = bootstrap.changeset.files[0]; + expect(await file?.sourceFetcher?.getFullText("old")).toBe("first\n"); + } finally { + mutableBun.spawnSync = originalSpawnSync; + mutableBun.spawn = originalSpawn; + } + + expect(syncCalls.some((call) => call.includes("rev-parse"))).toBe(true); + expect(syncCalls.some((call) => call.includes("diff"))).toBe(true); + expect(asyncCalls).toContainEqual([gitExecutable, "show", ":value.txt"]); + }); + test("unstaged working-tree diffs read old source from the index when it differs from HEAD", async () => { const dir = createTempRepo("hunk-source-git-wt-index-"); writeFileSync(join(dir, "value.txt"), "committed\n"); diff --git a/src/core/loaders.ts b/src/core/loaders.ts index 9b9ab269..a01577d9 100644 --- a/src/core/loaders.ts +++ b/src/core/loaders.ts @@ -12,7 +12,12 @@ import { findAgentFileContext, loadAgentContext } from "./agent"; import { createSkippedBinaryMetadata, isProbablyBinaryFile, patchLooksBinary } from "./binary"; import { normalizeDiffMetadataPaths, normalizeDiffPath } from "./diffPaths"; import { HunkUserError } from "./errors"; -import { createFileSourceFetcher, type FileSourceFetcher, type FileSourceSpec } from "./fileSource"; +import { + createFileSourceFetcher, + type FileSourceFetcher, + type FileSourceFetcherOptions, + type FileSourceSpec, +} from "./fileSource"; import { buildGitDiffArgs, buildGitDiffNumstatArgs, @@ -50,6 +55,7 @@ import type { interface LoadAppBootstrapOptions { cwd?: string; + gitExecutable?: string; } const LARGE_DIFF_FILE_MAX_BYTES = 1_000_000; @@ -239,6 +245,7 @@ interface ResolvedFileSourceSpecs { /** Build a binary-aware source-fetcher factory from per-file source specs. */ function createSourceFetcherBuilder( resolveSpecs: (file: DiffFileSourceContext) => ResolvedFileSourceSpecs | undefined, + options: FileSourceFetcherOptions = {}, ): NonNullable { return (file) => { if (file.isBinary) { @@ -246,7 +253,7 @@ function createSourceFetcherBuilder( } const specs = resolveSpecs(file); - return specs ? createFileSourceFetcher(specs) : undefined; + return specs ? createFileSourceFetcher(specs, options) : undefined; }; } @@ -272,6 +279,7 @@ function gitEndpointSourceSpec( function buildGitEndpointSourceFetcherBuilder( repoRoot: string, endpoints: GitDiffEndpoints, + options: FileSourceFetcherOptions = {}, ): NonNullable { return createSourceFetcherBuilder(({ path, previousPath, type }) => { const oldPath = previousPath ?? path; @@ -284,7 +292,7 @@ function buildGitEndpointSourceFetcherBuilder( ? { kind: "none" } : gitEndpointSourceSpec(endpoints.new, repoRoot, path), }; - }); + }, options); } /** Build the normalized per-file model used by the UI regardless of input mode. */ @@ -821,6 +829,7 @@ function buildUntrackedDiffFile( repoRoot: string, sourcePrefix: string, agentContext: AgentContext | null, + gitExecutable = "git", ) { const largeFileCheck = inspectLargeUntrackedFile(repoRoot, filePath); if (largeFileCheck.shouldSkip) { @@ -840,7 +849,7 @@ function buildUntrackedDiffFile( } const patch = normalizeUntrackedPatchHeaders( - runGitUntrackedFileDiffText(input, filePath, { repoRoot }), + runGitUntrackedFileDiffText(input, filePath, { repoRoot, gitExecutable }), filePath, ); @@ -1063,8 +1072,9 @@ async function loadGitChangeset( input: VcsCommandInput, agentContext: AgentContext | null, cwd = process.cwd(), + gitExecutable = "git", ) { - const repoRoot = resolveGitRepoRoot(input, { cwd }); + const repoRoot = resolveGitRepoRoot(input, { cwd, gitExecutable }); const repoName = basename(repoRoot); const title = input.staged ? `${repoName} staged changes` @@ -1072,14 +1082,14 @@ async function loadGitChangeset( ? `${repoName} ${input.range}` : `${repoName} working tree`; const largeTrackedFiles = parseGitNumstat( - runGitText({ input, args: buildGitDiffNumstatArgs(input), cwd }), + runGitText({ input, args: buildGitDiffNumstatArgs(input), cwd, gitExecutable }), ).filter((file) => shouldSkipLargeTrackedDiff(file, repoRoot)); - const endpoints = resolveGitDiffEndpoints(input, { cwd, repoRoot }); + const endpoints = resolveGitDiffEndpoints(input, { cwd, repoRoot, gitExecutable }); // When the range maps to a shape we can't represent as a single old/new pair // (e.g. octopus, multi-positive sets), omit the source fetcher entirely so // expansion is disabled rather than reading from the wrong revision. const sourceFetcherBuilder = endpoints - ? buildGitEndpointSourceFetcherBuilder(repoRoot, endpoints) + ? buildGitEndpointSourceFetcherBuilder(repoRoot, endpoints, { gitExecutable }) : undefined; const trackedChangeset = normalizePatchChangeset( runGitText({ @@ -1089,6 +1099,7 @@ async function loadGitChangeset( largeTrackedFiles.map((file) => file.path), ), cwd, + gitExecutable, }), title, repoRoot, @@ -1106,7 +1117,7 @@ async function loadGitChangeset( ), ), ]; - const untrackedFiles = listGitUntrackedFiles(input, { cwd, repoRoot }); + const untrackedFiles = listGitUntrackedFiles(input, { cwd, repoRoot, gitExecutable }); if (untrackedFiles.length === 0) { return { @@ -1127,6 +1138,7 @@ async function loadGitChangeset( repoRoot, repoRoot, agentContext, + gitExecutable, ), ), ], @@ -1137,11 +1149,16 @@ function buildRefRangeSourceFetcherBuilder( repoRoot: string, oldRef: string, newRef: string, + options: FileSourceFetcherOptions = {}, ): NonNullable { - return buildGitEndpointSourceFetcherBuilder(repoRoot, { - old: { kind: "git-ref", ref: oldRef }, - new: { kind: "git-ref", ref: newRef }, - }); + return buildGitEndpointSourceFetcherBuilder( + repoRoot, + { + old: { kind: "git-ref", ref: oldRef }, + new: { kind: "git-ref", ref: newRef }, + }, + options, + ); } /** Build a changeset from the current Jujutsu working-copy commit or a revset. */ @@ -1171,20 +1188,23 @@ async function loadShowChangeset( input: ShowCommandInput, agentContext: AgentContext | null, cwd = process.cwd(), + gitExecutable = "git", ) { - const repoRoot = resolveGitRepoRoot(input, { cwd }); + const repoRoot = resolveGitRepoRoot(input, { cwd, gitExecutable }); const repoName = basename(repoRoot); const requestedRef = input.ref ?? "HEAD"; - const newRef = resolveGitCommitRef(input, requestedRef, { cwd: repoRoot }); + const newRef = resolveGitCommitRef(input, requestedRef, { cwd: repoRoot, gitExecutable }); const showInput = { ...input, ref: newRef }; return normalizePatchChangeset( - runGitText({ input, args: buildGitShowArgs(showInput), cwd }), + runGitText({ input, args: buildGitShowArgs(showInput), cwd, gitExecutable }), input.ref ? `${repoName} show ${input.ref}` : `${repoName} show HEAD`, repoRoot, agentContext, { - sourceFetcherBuilder: buildRefRangeSourceFetcherBuilder(repoRoot, `${newRef}^`, newRef), + sourceFetcherBuilder: buildRefRangeSourceFetcherBuilder(repoRoot, `${newRef}^`, newRef, { + gitExecutable, + }), }, ); } @@ -1212,6 +1232,7 @@ async function loadStashShowChangeset( input: StashShowCommandInput, agentContext: AgentContext | null, cwd = process.cwd(), + gitExecutable = "git", ) { if (input.options.vcs === "jj") { throw new HunkUserError("`hunk stash show` requires Git VCS mode.", [ @@ -1219,19 +1240,21 @@ async function loadStashShowChangeset( ]); } - const repoRoot = resolveGitRepoRoot(input, { cwd }); + const repoRoot = resolveGitRepoRoot(input, { cwd, gitExecutable }); const repoName = basename(repoRoot); const requestedRef = input.ref ?? "stash@{0}"; - const newRef = resolveGitCommitRef(input, requestedRef, { cwd: repoRoot }); + const newRef = resolveGitCommitRef(input, requestedRef, { cwd: repoRoot, gitExecutable }); const stashInput = { ...input, ref: newRef }; return normalizePatchChangeset( - runGitText({ input, args: buildGitStashShowArgs(stashInput), cwd }), + runGitText({ input, args: buildGitStashShowArgs(stashInput), cwd, gitExecutable }), input.ref ? `${repoName} stash ${input.ref}` : `${repoName} stash`, repoRoot, agentContext, { - sourceFetcherBuilder: buildRefRangeSourceFetcherBuilder(repoRoot, `${newRef}^`, newRef), + sourceFetcherBuilder: buildRefRangeSourceFetcherBuilder(repoRoot, `${newRef}^`, newRef, { + gitExecutable, + }), }, ); } @@ -1260,7 +1283,7 @@ async function loadPatchChangeset( /** Resolve CLI input into the fully loaded app bootstrap state. */ export async function loadAppBootstrap( input: CliInput, - { cwd = process.cwd() }: LoadAppBootstrapOptions = {}, + { cwd = process.cwd(), gitExecutable = "git" }: LoadAppBootstrapOptions = {}, ): Promise { const agentContext = await loadAgentContext(input.options.agentContext, { cwd }); @@ -1271,16 +1294,16 @@ export async function loadAppBootstrap( changeset = input.options.vcs === "jj" ? await loadJjDiffChangeset(input, agentContext, cwd) - : await loadGitChangeset(input, agentContext, cwd); + : await loadGitChangeset(input, agentContext, cwd, gitExecutable); break; case "show": changeset = input.options.vcs === "jj" ? await loadJjShowChangeset(input, agentContext, cwd) - : await loadShowChangeset(input, agentContext, cwd); + : await loadShowChangeset(input, agentContext, cwd, gitExecutable); break; case "stash-show": - changeset = await loadStashShowChangeset(input, agentContext, cwd); + changeset = await loadStashShowChangeset(input, agentContext, cwd, gitExecutable); break; case "diff": changeset = await loadFileDiffChangeset(input, agentContext, cwd); diff --git a/src/ui/diff/expandCollapsedRows.test.ts b/src/ui/diff/expandCollapsedRows.test.ts index daf3d200..a285842a 100644 --- a/src/ui/diff/expandCollapsedRows.test.ts +++ b/src/ui/diff/expandCollapsedRows.test.ts @@ -279,6 +279,43 @@ describe("expandCollapsedRows", () => { } expect(inserted.cell.spans).toEqual([{ text: "highlighted:beta", fg: "#abcdef" }]); }); + + test("shows an error row when loaded source is shorter than the collapsed range", () => { + const rows: DiffRow[] = [makeCollapsedRow("before", 0, [1, 3], [1, 3]), makeHunkHeader(0)]; + + const result = expandCollapsedRows(rows, { + layout: "stack", + expandedKeys: new Set([gapKey("before", 0)]), + sourceStatus: { kind: "loaded", text: "alpha\n" }, + side: "new", + }); + + expect(result.map((row) => row.type)).toEqual(["collapsed", "hunk-header"]); + const collapsed = result[0]; + if (!collapsed || collapsed.type !== "collapsed") { + throw new Error("expected first row to be collapsed"); + } + expect(collapsed.text.toLowerCase()).toContain("could not load"); + expect(collapsed.text.toLowerCase()).not.toContain("hide"); + }); + + test("shows an error row when old-side split expansion is out of bounds", () => { + const rows: DiffRow[] = [makeCollapsedRow("before", 0, [2, 3], [10, 11]), makeHunkHeader(0)]; + + const result = expandCollapsedRows(rows, { + layout: "split", + expandedKeys: new Set([gapKey("before", 0)]), + sourceStatus: { kind: "loaded", text: "alpha\n" }, + side: "old", + }); + + expect(result.map((row) => row.type)).toEqual(["collapsed", "hunk-header"]); + const collapsed = result[0]; + if (!collapsed || collapsed.type !== "collapsed") { + throw new Error("expected first row to be collapsed"); + } + expect(collapsed.text.toLowerCase()).toContain("could not load"); + }); }); describe("selectGapForKeyboardToggle", () => { diff --git a/src/ui/diff/expandCollapsedRows.ts b/src/ui/diff/expandCollapsedRows.ts index c1abed3a..bee9d73f 100644 --- a/src/ui/diff/expandCollapsedRows.ts +++ b/src/ui/diff/expandCollapsedRows.ts @@ -190,6 +190,18 @@ export function expandCollapsedRows( continue; } + const sourceStartIndex = range[0] - 1; + const sourceEndIndex = range[1] - 1; + if ( + lineCount > 0 && + (sourceStartIndex < 0 || + sourceEndIndex < sourceStartIndex || + sourceEndIndex >= sourceLines.length) + ) { + result.push({ ...row, text: errorRowText(lineCount) }); + continue; + } + result.push({ ...row, text: expandedRowText(lineCount), @@ -199,6 +211,10 @@ export function expandCollapsedRows( const oldLineNumber = row.oldRange[0] + offset; const newLineNumber = row.newRange[0] + offset; const sourceLineNumber = (side === "old" ? oldLineNumber : newLineNumber) - 1; + if (sourceLineNumber < 0 || sourceLineNumber >= sourceLines.length) { + break; + } + const text = sourceLines[sourceLineNumber]; const spans = sourceLineSpans?.(text, sourceLineNumber) ?? spansFor(text); diff --git a/src/ui/hooks/useReviewController.test.tsx b/src/ui/hooks/useReviewController.test.tsx index be7a1596..8a311904 100644 --- a/src/ui/hooks/useReviewController.test.tsx +++ b/src/ui/hooks/useReviewController.test.tsx @@ -1,6 +1,6 @@ import { describe, expect, test } from "bun:test"; import { testRender } from "@opentui/react/test-utils"; -import { act, useEffect, useState } from "react"; +import { act, StrictMode, useEffect, useState } from "react"; import type { DiffFile } from "../../core/types"; import { createTestDeferred, @@ -107,10 +107,13 @@ function ReviewControllerHarness({ } /** Render the controller hook and expose its latest state to tests. */ -async function renderReviewController(initialFiles: DiffFile[]) { +async function renderReviewController( + initialFiles: DiffFile[], + { strictMode = false }: { strictMode?: boolean } = {}, +) { const controllerRef: { current: ReviewController | null } = { current: null }; const setFilesRef: { current: ((nextFiles: DiffFile[]) => void) | null } = { current: null }; - const setup = await testRender( + const harness = ( { @@ -119,9 +122,12 @@ async function renderReviewController(initialFiles: DiffFile[]) { onSetFiles={(nextSetFiles) => { setFilesRef.current = nextSetFiles; }} - />, - { width: 80, height: 4 }, + /> ); + const setup = await testRender(strictMode ? {harness} : harness, { + width: 80, + height: 4, + }); return { controllerRef, setFilesRef, setup }; } @@ -361,6 +367,41 @@ describe("useReviewController", () => { } }); + test("toggleGap settles source status under React StrictMode", async () => { + const deferred = createTestDeferred(); + const fakeFetcher = createTestSourceFetcher(() => deferred.promise); + + const { controllerRef, setup } = await renderReviewController([createAlphaFile(fakeFetcher)], { + strictMode: true, + }); + + try { + await flush(setup); + + await act(async () => { + expectValue(controllerRef.current).toggleGap("alpha", "before:0"); + }); + await flush(setup); + + expect(expectValue(controllerRef.current).sourceStatusByFileId["alpha"]?.kind).toBe( + "loading", + ); + + deferred.resolve("strict mode source\n"); + await flush(setup); + + const status = expectValue(controllerRef.current).sourceStatusByFileId["alpha"]; + expect(status?.kind).toBe("loaded"); + if (status?.kind === "loaded") { + expect(status.text).toBe("strict mode source\n"); + } + } finally { + await act(async () => { + setup.renderer.destroy(); + }); + } + }); + test("toggleGap is a no-op for files without a source fetcher", async () => { const { controllerRef, setup } = await renderReviewController([createAlphaFile()]); @@ -660,4 +701,51 @@ describe("useReviewController", () => { }); } }); + + test("a stale rejected source load is logged without repopulating state", async () => { + const originalConsoleError = console.error; + const loggedErrors: unknown[][] = []; + console.error = (...args: unknown[]) => { + loggedErrors.push(args); + }; + + const firstLoad = createTestDeferred(); + const firstFetcher = createTestSourceFetcher(() => firstLoad.promise); + const secondFetcher = createTestSourceFetcher((side) => (side === "new" ? "second\n" : null)); + const baseFile = createAlphaFile(); + + const { controllerRef, setFilesRef, setup } = await renderReviewController([ + { ...baseFile, sourceFetcher: firstFetcher }, + ]); + + try { + await flush(setup); + + await act(async () => { + expectValue(controllerRef.current).toggleGap("alpha", "before:0"); + }); + await flush(setup); + + await act(async () => { + expectValue(setFilesRef.current)([{ ...baseFile, sourceFetcher: secondFetcher }]); + }); + await flush(setup); + + await act(async () => { + firstLoad.reject(new Error("stale failure")); + await firstLoad.promise.catch(() => undefined); + }); + await flush(setup); + + expect(expectValue(controllerRef.current).sourceStatusByFileId["alpha"]).toBeUndefined(); + expect(String(loggedErrors[0]?.[0])).toContain("ignored stale new source load failure"); + expect(String(loggedErrors[0]?.[0])).toContain("alpha.ts"); + expect(String(loggedErrors[0]?.[0])).toContain("alpha"); + } finally { + console.error = originalConsoleError; + await act(async () => { + setup.renderer.destroy(); + }); + } + }); }); diff --git a/src/ui/hooks/useReviewController.ts b/src/ui/hooks/useReviewController.ts index 892570e0..399484f9 100644 --- a/src/ui/hooks/useReviewController.ts +++ b/src/ui/hooks/useReviewController.ts @@ -374,17 +374,16 @@ export function useReviewController({ files }: { files: DiffFile[] }): ReviewCon }; const setSettledStatus = (nextStatus: FileSourceStatus) => { - setSourceStatusByFileId((prev) => { - if (!isCurrentRequest()) { - return prev; - } - sourceLoadRequestsRef.current.delete(fileId); - sourceStatusRef.current = { ...sourceStatusRef.current, [fileId]: nextStatus }; - return { - ...prev, - [fileId]: nextStatus, - }; - }); + if (!isCurrentRequest()) { + return; + } + + sourceLoadRequestsRef.current.delete(fileId); + sourceStatusRef.current = { ...sourceStatusRef.current, [fileId]: nextStatus }; + setSourceStatusByFileId((prev) => ({ + ...prev, + [fileId]: nextStatus, + })); }; void file.sourceFetcher @@ -394,6 +393,10 @@ export function useReviewController({ files }: { files: DiffFile[] }): ReviewCon }) .catch((error: unknown) => { if (!isCurrentRequest()) { + console.error( + `hunk: ignored stale ${side} source load failure for ${file.path} (${file.id}).`, + error, + ); return; } diff --git a/src/ui/lib/diffSectionGeometry.test.ts b/src/ui/lib/diffSectionGeometry.test.ts index 2758d33c..18aff99b 100644 --- a/src/ui/lib/diffSectionGeometry.test.ts +++ b/src/ui/lib/diffSectionGeometry.test.ts @@ -257,4 +257,65 @@ describe("measureDiffSectionGeometry", () => { expect(wrappedGeometry.bodyHeight).toBe(nowrapGeometry.bodyHeight + 1); }); + + test("same-length source edits invalidate note-aware expanded geometry", () => { + const beforeLines = Array.from({ length: 30 }, (_, index) => `line ${index + 1}`); + const afterLines = [...beforeLines]; + afterLines[4] = "line 5 modified"; + const file = createTestDiffFile({ + after: lines(...afterLines), + before: lines(...beforeLines), + id: "same-length-source", + path: "same-length-source.txt", + }); + const visibleAgentNotes: VisibleAgentNote[] = [ + { + id: "annotation:same-length-source:0", + annotation: { + newRange: [5, 5], + rationale: "Forces note-aware geometry caching.", + summary: "Changed line", + }, + }, + ]; + const expandedKeys = new Set(["trailing:0"]); + const shortSourceLines = [...afterLines]; + const longSourceLines = [...afterLines]; + const shortLine = "short"; + const longLine = "this is a deliberately long expanded source line"; + shortSourceLines[0] += "x".repeat(longLine.length - shortLine.length); + shortSourceLines[8] = shortLine; + longSourceLines[8] = longLine; + const shortSource = lines(...shortSourceLines); + const longSource = lines(...longSourceLines); + + expect(shortSource).toHaveLength(longSource.length); + + const shortGeometry = measureDiffSectionGeometry( + file, + "stack", + true, + theme, + visibleAgentNotes, + 24, + true, + true, + expandedKeys, + { kind: "loaded", text: shortSource }, + ); + const longGeometry = measureDiffSectionGeometry( + file, + "stack", + true, + theme, + visibleAgentNotes, + 24, + true, + true, + expandedKeys, + { kind: "loaded", text: longSource }, + ); + + expect(longGeometry.bodyHeight).toBeGreaterThan(shortGeometry.bodyHeight); + }); }); diff --git a/src/ui/lib/diffSectionGeometry.ts b/src/ui/lib/diffSectionGeometry.ts index b2508634..4c149d3c 100644 --- a/src/ui/lib/diffSectionGeometry.ts +++ b/src/ui/lib/diffSectionGeometry.ts @@ -66,6 +66,18 @@ function buildBasePlannedRows( }; } +/** Fingerprint loaded source text so same-length edits invalidate geometry. */ +function sourceTextFingerprint(text: string) { + let hash = 2166136261; + + for (let index = 0; index < text.length; index += 1) { + hash ^= text.charCodeAt(index); + hash = Math.imul(hash, 16777619); + } + + return `${text.length}:${(hash >>> 0).toString(36)}`; +} + /** Stable suffix that captures expansion state for the geometry cache key. */ function expansionCacheKey( expandedKeys: ReadonlySet, @@ -80,7 +92,7 @@ function expansionCacheKey( sourceStatus === undefined ? "pending" : sourceStatus.kind === "loaded" - ? `loaded:${sourceStatus.text.length}` + ? `loaded:${sourceTextFingerprint(sourceStatus.text)}` : sourceStatus.kind; return `:${sortedKeys}:${statusKey}`; }