diff --git a/apps/desktop/package.json b/apps/desktop/package.json index dfa3bde2f8..4c99ed797f 100644 --- a/apps/desktop/package.json +++ b/apps/desktop/package.json @@ -14,13 +14,13 @@ "smoke-test": "node scripts/smoke-test.mjs" }, "dependencies": { + "@t3tools/contracts": "workspace:*", + "@t3tools/shared": "workspace:*", "effect": "catalog:", "electron": "40.6.0", "electron-updater": "^6.6.2" }, "devDependencies": { - "@t3tools/contracts": "workspace:*", - "@t3tools/shared": "workspace:*", "@types/node": "catalog:", "tsdown": "catalog:", "typescript": "catalog:", diff --git a/apps/marketing/src/lib/releases.ts b/apps/marketing/src/lib/releases.ts index 5f3209acf8..090e043536 100644 --- a/apps/marketing/src/lib/releases.ts +++ b/apps/marketing/src/lib/releases.ts @@ -10,7 +10,7 @@ export interface ReleaseAsset { browser_download_url: string; } -export interface Release { +interface Release { tag_name: string; html_url: string; assets: ReleaseAsset[]; diff --git a/apps/server/integration/OrchestrationEngineHarness.integration.ts b/apps/server/integration/OrchestrationEngineHarness.integration.ts index 152ed1d608..40ac7ac306 100644 --- a/apps/server/integration/OrchestrationEngineHarness.integration.ts +++ b/apps/server/integration/OrchestrationEngineHarness.integration.ts @@ -29,11 +29,9 @@ import { GitCore, type GitCoreShape } from "../src/git/Services/GitCore.ts"; import { TextGeneration, type TextGenerationShape } from "../src/git/Services/TextGeneration.ts"; import { OrchestrationCommandReceiptRepositoryLive } from "../src/persistence/Layers/OrchestrationCommandReceipts.ts"; import { OrchestrationEventStoreLive } from "../src/persistence/Layers/OrchestrationEventStore.ts"; -import { ProjectionCheckpointRepositoryLive } from "../src/persistence/Layers/ProjectionCheckpoints.ts"; import { ProjectionPendingApprovalRepositoryLive } from "../src/persistence/Layers/ProjectionPendingApprovals.ts"; import { ProviderSessionRuntimeRepositoryLive } from "../src/persistence/Layers/ProviderSessionRuntime.ts"; -import { makeSqlitePersistenceLive } from "../src/persistence/Layers/Sqlite.ts"; -import { ProjectionCheckpointRepository } from "../src/persistence/Services/ProjectionCheckpoints.ts"; +import { makeSqlitePersistenceLive } from "../src/persistence/Layers/Sqlite.testing.ts"; import { ProjectionPendingApprovalRepository } from "../src/persistence/Services/ProjectionPendingApprovals.ts"; import { ProviderUnsupportedError } from "../src/provider/Errors.ts"; import { ProviderAdapterRegistry } from "../src/provider/Services/ProviderAdapterRegistry.ts"; @@ -70,6 +68,10 @@ import { import { deriveServerPaths, ServerConfig } from "../src/config.ts"; import { WorkspaceEntriesLive } from "../src/workspace/Layers/WorkspaceEntries.ts"; import { WorkspacePathsLive } from "../src/workspace/Layers/WorkspacePaths.ts"; +import { + ProjectionCheckpointRepository, + ProjectionCheckpointRepositoryLive, +} from "./support/ProjectionCheckpoints.ts"; function runGit(cwd: string, args: ReadonlyArray) { return execFileSync("git", args, { @@ -126,7 +128,7 @@ function waitFor( read: Effect.Effect, predicate: (value: A) => boolean, description: string, - timeoutMs = 40_000, + timeoutMs = 10_000, ): Effect.Effect { const RETRY_SIGNAL = "wait_for_retry"; const retryIntervalMs = 10; @@ -499,7 +501,7 @@ export const makeOrchestrationIntegrationHarness = ( yield* shutdown; }); - return { + const harness: OrchestrationIntegrationHarness = { rootDir, workspaceDir, dbPath, @@ -515,5 +517,7 @@ export const makeOrchestrationIntegrationHarness = ( waitForPendingApproval, waitForReceipt, dispose, - } satisfies OrchestrationIntegrationHarness; + }; + + return harness; }); diff --git a/apps/server/integration/orchestrationEngine.integration.test.ts b/apps/server/integration/orchestrationEngine.integration.test.ts index a5cc8f8cc6..d2a09a65c7 100644 --- a/apps/server/integration/orchestrationEngine.integration.test.ts +++ b/apps/server/integration/orchestrationEngine.integration.test.ts @@ -27,7 +27,7 @@ import { checkpointRefForThreadTurn } from "../src/checkpointing/Utils.ts"; import type { CheckpointDiffFinalizedReceipt, TurnProcessingQuiescedReceipt, -} from "../src/orchestration/Services/RuntimeReceiptBus.ts"; +} from "../src/orchestration/Services/RuntimeReceiptBus.shared.ts"; import * as NodeServices from "@effect/platform-node/NodeServices"; const asMessageId = (value: string): MessageId => MessageId.makeUnsafe(value); diff --git a/apps/server/integration/providerService.integration.test.ts b/apps/server/integration/providerService.integration.test.ts index ef03a1ab5c..08361067dd 100644 --- a/apps/server/integration/providerService.integration.test.ts +++ b/apps/server/integration/providerService.integration.test.ts @@ -15,7 +15,7 @@ import { } from "../src/provider/Services/ProviderService.ts"; import { ServerSettingsService } from "../src/serverSettings.ts"; import { AnalyticsService } from "../src/telemetry/Services/AnalyticsService.ts"; -import { SqlitePersistenceMemory } from "../src/persistence/Layers/Sqlite.ts"; +import { SqlitePersistenceMemory } from "../src/persistence/Layers/Sqlite.testing.ts"; import { ProviderSessionRuntimeRepositoryLive } from "../src/persistence/Layers/ProviderSessionRuntime.ts"; import { diff --git a/apps/server/src/persistence/Layers/ProjectionCheckpoints.ts b/apps/server/integration/support/ProjectionCheckpoints.ts similarity index 77% rename from apps/server/src/persistence/Layers/ProjectionCheckpoints.ts rename to apps/server/integration/support/ProjectionCheckpoints.ts index 26ef8fed1b..4682b68da9 100644 --- a/apps/server/src/persistence/Layers/ProjectionCheckpoints.ts +++ b/apps/server/integration/support/ProjectionCheckpoints.ts @@ -1,17 +1,55 @@ import { OrchestrationCheckpointFile } from "@t3tools/contracts"; +import { Effect, Layer, Option, Schema, ServiceMap, Struct } from "effect"; +import type { Effect as EffectType } from "effect"; import * as SqlClient from "effect/unstable/sql/SqlClient"; import * as SqlSchema from "effect/unstable/sql/SqlSchema"; -import { Effect, Layer, Option, Schema, Struct } from "effect"; - -import { toPersistenceDecodeError, toPersistenceSqlError } from "../Errors.ts"; -import { - DeleteByThreadIdInput, - GetByThreadAndTurnCountInput, - ListByThreadIdInput, - ProjectionCheckpoint, + +import { NonNegativeInt, ThreadId } from "@t3tools/contracts"; +import type { ProjectionRepositoryError } from "../../src/persistence/Errors.ts"; +import { toPersistenceDecodeError, toPersistenceSqlError } from "../../src/persistence/Errors.ts"; +import { ProjectionCheckpoint } from "../../src/persistence/Services/ProjectionCheckpoints.ts"; + +const ListByThreadIdInput = Schema.Struct({ + threadId: ThreadId, +}); +type ListByThreadIdInput = typeof ListByThreadIdInput.Type; + +const GetByThreadAndTurnCountInput = Schema.Struct({ + threadId: ThreadId, + checkpointTurnCount: NonNegativeInt, +}); +type GetByThreadAndTurnCountInput = typeof GetByThreadAndTurnCountInput.Type; + +const DeleteByThreadIdInput = Schema.Struct({ + threadId: ThreadId, +}); +type DeleteByThreadIdInput = typeof DeleteByThreadIdInput.Type; + +interface ProjectionCheckpointRepositoryShape { + readonly upsert: ( + row: typeof ProjectionCheckpoint.Type, + ) => EffectType.Effect; + readonly listByThreadId: ( + input: ListByThreadIdInput, + ) => EffectType.Effect< + ReadonlyArray, + ProjectionRepositoryError + >; + readonly getByThreadAndTurnCount: ( + input: GetByThreadAndTurnCountInput, + ) => EffectType.Effect< + Option.Option, + ProjectionRepositoryError + >; + readonly deleteByThreadId: ( + input: DeleteByThreadIdInput, + ) => EffectType.Effect; +} + +export class ProjectionCheckpointRepository extends ServiceMap.Service< ProjectionCheckpointRepository, - type ProjectionCheckpointRepositoryShape, -} from "../Services/ProjectionCheckpoints.ts"; + ProjectionCheckpointRepositoryShape +>()("t3/integration/support/ProjectionCheckpoints/ProjectionCheckpointRepository") {} const ProjectionCheckpointDbRowSchema = ProjectionCheckpoint.mapFields( Struct.assign({ @@ -170,7 +208,7 @@ const makeProjectionCheckpointRepository = Effect.gen(function* () { "ProjectionCheckpointRepository.listByThreadId:decodeRows", ), ), - Effect.map((rows) => rows as ReadonlyArray>), + Effect.map((rows) => rows as ReadonlyArray), ); const getByThreadAndTurnCount: ProjectionCheckpointRepositoryShape["getByThreadAndTurnCount"] = ( @@ -186,8 +224,7 @@ const makeProjectionCheckpointRepository = Effect.gen(function* () { Effect.flatMap((rowOption) => Option.match(rowOption, { onNone: () => Effect.succeed(Option.none()), - onSome: (row) => - Effect.succeed(Option.some(row as Schema.Schema.Type)), + onSome: (row) => Effect.succeed(Option.some(row as typeof ProjectionCheckpoint.Type)), }), ), ); diff --git a/apps/server/package.json b/apps/server/package.json index e59c7c208c..a107da7c36 100644 --- a/apps/server/package.json +++ b/apps/server/package.json @@ -28,6 +28,8 @@ "@effect/platform-node": "catalog:", "@effect/sql-sqlite-bun": "catalog:", "@pierre/diffs": "^1.1.0-beta.16", + "@t3tools/contracts": "workspace:*", + "@t3tools/shared": "workspace:*", "effect": "catalog:", "node-pty": "^1.1.0", "open": "^10.1.0" @@ -35,9 +37,6 @@ "devDependencies": { "@effect/language-service": "catalog:", "@effect/vitest": "catalog:", - "@t3tools/contracts": "workspace:*", - "@t3tools/shared": "workspace:*", - "@t3tools/web": "workspace:*", "@types/bun": "catalog:", "@types/node": "catalog:", "tsdown": "catalog:", diff --git a/apps/server/src/bootstrap.shared.ts b/apps/server/src/bootstrap.shared.ts new file mode 100644 index 0000000000..f8a597f955 --- /dev/null +++ b/apps/server/src/bootstrap.shared.ts @@ -0,0 +1,12 @@ +export function resolveFdPath( + fd: number, + platform: NodeJS.Platform = process.platform, +): string | undefined { + if (platform === "linux") { + return `/proc/self/fd/${fd}`; + } + if (platform === "win32") { + return undefined; + } + return `/dev/fd/${fd}`; +} diff --git a/apps/server/src/bootstrap.test.ts b/apps/server/src/bootstrap.test.ts index 3fce6af9c4..3e8b60d5ce 100644 --- a/apps/server/src/bootstrap.test.ts +++ b/apps/server/src/bootstrap.test.ts @@ -10,7 +10,8 @@ import * as Fiber from "effect/Fiber"; import { TestClock } from "effect/testing"; import { vi } from "vitest"; -import { readBootstrapEnvelope, resolveFdPath } from "./bootstrap"; +import { readBootstrapEnvelope } from "./bootstrap"; +import { resolveFdPath } from "./bootstrap.shared"; import { assertNone, assertSome } from "@effect/vitest/utils"; const openSyncInterceptor = vi.hoisted(() => ({ failPath: null as string | null })); diff --git a/apps/server/src/bootstrap.ts b/apps/server/src/bootstrap.ts index 0fb1352268..5bb83d8cfe 100644 --- a/apps/server/src/bootstrap.ts +++ b/apps/server/src/bootstrap.ts @@ -5,6 +5,7 @@ import type { Readable } from "node:stream"; import { Data, Effect, Option, Predicate, Result, Schema } from "effect"; import { decodeJsonResult } from "@t3tools/shared/schemaJson"; +import { resolveFdPath } from "./bootstrap.shared"; class BootstrapError extends Data.TaggedError("BootstrapError")<{ readonly message: string; @@ -158,16 +159,3 @@ const isBootstrapFdPathDuplicationError = Predicate.compose( Predicate.hasProperty("code"), (_) => _.code === "ENXIO" || _.code === "EINVAL" || _.code === "EPERM", ); - -export function resolveFdPath( - fd: number, - platform: NodeJS.Platform = process.platform, -): string | undefined { - if (platform === "linux") { - return `/proc/self/fd/${fd}`; - } - if (platform === "win32") { - return undefined; - } - return `/dev/fd/${fd}`; -} diff --git a/apps/server/src/checkpointing/Diffs.ts b/apps/server/src/checkpointing/Diffs.ts index c2f867b96e..7160fc453b 100644 --- a/apps/server/src/checkpointing/Diffs.ts +++ b/apps/server/src/checkpointing/Diffs.ts @@ -1,6 +1,6 @@ import { parsePatchFiles } from "@pierre/diffs"; -export interface TurnDiffFileSummary { +interface TurnDiffFileSummary { readonly path: string; readonly additions: number; readonly deletions: number; diff --git a/apps/server/src/checkpointing/Utils.ts b/apps/server/src/checkpointing/Utils.ts index 3cd92f8510..02169a182d 100644 --- a/apps/server/src/checkpointing/Utils.ts +++ b/apps/server/src/checkpointing/Utils.ts @@ -1,7 +1,7 @@ import { Encoding } from "effect"; import { CheckpointRef, ProjectId, type ThreadId } from "@t3tools/contracts"; -export const CHECKPOINT_REFS_PREFIX = "refs/t3/checkpoints"; +const CHECKPOINT_REFS_PREFIX = "refs/t3/checkpoints"; export function checkpointRefForThreadTurn(threadId: ThreadId, turnCount: number): CheckpointRef { return CheckpointRef.makeUnsafe( diff --git a/apps/server/src/cli-config.test.ts b/apps/server/src/cli-config.test.ts index 038d22e8f3..4eae38c474 100644 --- a/apps/server/src/cli-config.test.ts +++ b/apps/server/src/cli-config.test.ts @@ -6,7 +6,7 @@ import { ConfigProvider, Effect, FileSystem, Layer, Option, Path } from "effect" import { NetService } from "@t3tools/shared/Net"; import * as NodeServices from "@effect/platform-node/NodeServices"; import { deriveServerPaths } from "./config"; -import { resolveServerConfig } from "./cli"; +import { resolveServerConfig } from "./cli.logic"; it.layer(NodeServices.layer)("cli config resolution", (it) => { const defaultObservabilityConfig = { @@ -153,6 +153,68 @@ it.layer(NodeServices.layer)("cli config resolution", (it) => { }), ); + it.effect("uses explicit false CLI boolean flags over env and bootstrap values", () => + Effect.gen(function* () { + const { join } = yield* Path.Path; + const baseDir = join(os.tmpdir(), "t3-cli-config-false-flags-base"); + const fd = yield* openBootstrapFd({ + noBrowser: true, + autoBootstrapProjectFromCwd: true, + logWebSocketEvents: true, + }); + const derivedPaths = yield* deriveServerPaths(baseDir, new URL("http://127.0.0.1:4173")); + + const resolved = yield* resolveServerConfig( + { + mode: Option.some("web"), + port: Option.some(8788), + host: Option.some("127.0.0.1"), + baseDir: Option.some(baseDir), + devUrl: Option.some(new URL("http://127.0.0.1:4173")), + noBrowser: Option.some(false), + authToken: Option.none(), + bootstrapFd: Option.none(), + autoBootstrapProjectFromCwd: Option.some(false), + logWebSocketEvents: Option.some(false), + }, + Option.none(), + ).pipe( + Effect.provide( + Layer.mergeAll( + ConfigProvider.layer( + ConfigProvider.fromEnv({ + env: { + T3CODE_BOOTSTRAP_FD: String(fd), + T3CODE_NO_BROWSER: "true", + T3CODE_AUTO_BOOTSTRAP_PROJECT_FROM_CWD: "true", + T3CODE_LOG_WS_EVENTS: "true", + }, + }), + ), + NetService.layer, + ), + ), + ); + + expect(resolved).toEqual({ + logLevel: "Info", + ...defaultObservabilityConfig, + mode: "web", + port: 8788, + cwd: process.cwd(), + baseDir, + ...derivedPaths, + host: "127.0.0.1", + staticDir: undefined, + devUrl: new URL("http://127.0.0.1:4173"), + noBrowser: false, + authToken: undefined, + autoBootstrapProjectFromCwd: false, + logWebSocketEvents: false, + }); + }), + ); + it.effect("uses bootstrap envelope values as fallbacks when flags and env are absent", () => Effect.gen(function* () { const { join } = yield* Path.Path; diff --git a/apps/server/src/cli.logic.ts b/apps/server/src/cli.logic.ts new file mode 100644 index 0000000000..31cfdffa65 --- /dev/null +++ b/apps/server/src/cli.logic.ts @@ -0,0 +1,273 @@ +import { NetService } from "@t3tools/shared/Net"; +import { parsePersistedServerObservabilitySettings } from "@t3tools/shared/serverSettings"; +import { Config, Effect, FileSystem, LogLevel, Option, Path, Schema } from "effect"; + +import { + DEFAULT_PORT, + deriveServerPaths, + ensureServerDirectories, + resolveStaticDir, + RuntimeMode, + type ServerConfigShape, +} from "./config"; +import { readBootstrapEnvelope } from "./bootstrap"; +import { resolveBaseDir } from "./os-jank"; + +export const PortSchema = Schema.Int.check(Schema.isBetween({ minimum: 1, maximum: 65535 })); + +const BootstrapEnvelopeSchema = Schema.Struct({ + mode: Schema.optional(RuntimeMode), + port: Schema.optional(PortSchema), + host: Schema.optional(Schema.String), + t3Home: Schema.optional(Schema.String), + devUrl: Schema.optional(Schema.URLFromString), + noBrowser: Schema.optional(Schema.Boolean), + authToken: Schema.optional(Schema.String), + autoBootstrapProjectFromCwd: Schema.optional(Schema.Boolean), + logWebSocketEvents: Schema.optional(Schema.Boolean), + otlpTracesUrl: Schema.optional(Schema.String), + otlpMetricsUrl: Schema.optional(Schema.String), +}); + +const EnvServerConfig = Config.all({ + logLevel: Config.logLevel("T3CODE_LOG_LEVEL").pipe(Config.withDefault("Info")), + traceMinLevel: Config.logLevel("T3CODE_TRACE_MIN_LEVEL").pipe(Config.withDefault("Info")), + traceTimingEnabled: Config.boolean("T3CODE_TRACE_TIMING_ENABLED").pipe(Config.withDefault(true)), + traceFile: Config.string("T3CODE_TRACE_FILE").pipe( + Config.option, + Config.map(Option.getOrUndefined), + ), + traceMaxBytes: Config.int("T3CODE_TRACE_MAX_BYTES").pipe(Config.withDefault(10 * 1024 * 1024)), + traceMaxFiles: Config.int("T3CODE_TRACE_MAX_FILES").pipe(Config.withDefault(10)), + traceBatchWindowMs: Config.int("T3CODE_TRACE_BATCH_WINDOW_MS").pipe(Config.withDefault(200)), + otlpTracesUrl: Config.string("T3CODE_OTLP_TRACES_URL").pipe( + Config.option, + Config.map(Option.getOrUndefined), + ), + otlpMetricsUrl: Config.string("T3CODE_OTLP_METRICS_URL").pipe( + Config.option, + Config.map(Option.getOrUndefined), + ), + otlpExportIntervalMs: Config.int("T3CODE_OTLP_EXPORT_INTERVAL_MS").pipe( + Config.withDefault(10_000), + ), + otlpServiceName: Config.string("T3CODE_OTLP_SERVICE_NAME").pipe(Config.withDefault("t3-server")), + mode: Config.schema(RuntimeMode, "T3CODE_MODE").pipe( + Config.option, + Config.map(Option.getOrUndefined), + ), + port: Config.port("T3CODE_PORT").pipe(Config.option, Config.map(Option.getOrUndefined)), + host: Config.string("T3CODE_HOST").pipe(Config.option, Config.map(Option.getOrUndefined)), + t3Home: Config.string("T3CODE_HOME").pipe(Config.option, Config.map(Option.getOrUndefined)), + devUrl: Config.url("VITE_DEV_SERVER_URL").pipe(Config.option, Config.map(Option.getOrUndefined)), + noBrowser: Config.boolean("T3CODE_NO_BROWSER").pipe( + Config.option, + Config.map(Option.getOrUndefined), + ), + authToken: Config.string("T3CODE_AUTH_TOKEN").pipe( + Config.option, + Config.map(Option.getOrUndefined), + ), + bootstrapFd: Config.int("T3CODE_BOOTSTRAP_FD").pipe( + Config.option, + Config.map(Option.getOrUndefined), + ), + autoBootstrapProjectFromCwd: Config.boolean("T3CODE_AUTO_BOOTSTRAP_PROJECT_FROM_CWD").pipe( + Config.option, + Config.map(Option.getOrUndefined), + ), + logWebSocketEvents: Config.boolean("T3CODE_LOG_WS_EVENTS").pipe( + Config.option, + Config.map(Option.getOrUndefined), + ), +}); + +interface CliServerFlags { + readonly mode: Option.Option; + readonly port: Option.Option; + readonly host: Option.Option; + readonly baseDir: Option.Option; + readonly devUrl: Option.Option; + readonly noBrowser: Option.Option; + readonly authToken: Option.Option; + readonly bootstrapFd: Option.Option; + readonly autoBootstrapProjectFromCwd: Option.Option; + readonly logWebSocketEvents: Option.Option; +} + +const resolveBooleanFlag = (flag: Option.Option, envValue: boolean) => + Option.getOrElse(flag, () => envValue); + +const resolveOptionPrecedence = ( + ...values: ReadonlyArray> +): Option.Option => Option.firstSomeOf(values); + +const loadPersistedObservabilitySettings = Effect.fn(function* (settingsPath: string) { + const fs = yield* FileSystem.FileSystem; + const exists = yield* fs.exists(settingsPath).pipe(Effect.orElseSucceed(() => false)); + if (!exists) { + return { otlpTracesUrl: undefined, otlpMetricsUrl: undefined }; + } + + const raw = yield* fs.readFileString(settingsPath).pipe(Effect.orElseSucceed(() => "")); + return parsePersistedServerObservabilitySettings(raw); +}); + +export const resolveServerConfig = ( + flags: CliServerFlags, + cliLogLevel: Option.Option, +) => + Effect.gen(function* () { + const { findAvailablePort } = yield* NetService; + const path = yield* Path.Path; + const fs = yield* FileSystem.FileSystem; + const env = yield* EnvServerConfig; + const bootstrapFd = Option.getOrUndefined(flags.bootstrapFd) ?? env.bootstrapFd; + const bootstrapEnvelope = + bootstrapFd !== undefined + ? yield* readBootstrapEnvelope(BootstrapEnvelopeSchema, bootstrapFd) + : Option.none(); + + const mode: RuntimeMode = Option.getOrElse( + resolveOptionPrecedence( + flags.mode, + Option.fromUndefinedOr(env.mode), + Option.flatMap(bootstrapEnvelope, (bootstrap) => Option.fromUndefinedOr(bootstrap.mode)), + ), + () => "web", + ); + + const port = yield* Option.match( + resolveOptionPrecedence( + flags.port, + Option.fromUndefinedOr(env.port), + Option.flatMap(bootstrapEnvelope, (bootstrap) => Option.fromUndefinedOr(bootstrap.port)), + ), + { + onSome: (value) => Effect.succeed(value), + onNone: () => + mode === "desktop" ? Effect.succeed(DEFAULT_PORT) : findAvailablePort(DEFAULT_PORT), + }, + ); + const devUrl = Option.getOrElse( + resolveOptionPrecedence( + flags.devUrl, + Option.fromUndefinedOr(env.devUrl), + Option.flatMap(bootstrapEnvelope, (bootstrap) => Option.fromUndefinedOr(bootstrap.devUrl)), + ), + () => undefined, + ); + const baseDir = yield* resolveBaseDir( + Option.getOrUndefined( + resolveOptionPrecedence( + flags.baseDir, + Option.fromUndefinedOr(env.t3Home), + Option.flatMap(bootstrapEnvelope, (bootstrap) => + Option.fromUndefinedOr(bootstrap.t3Home), + ), + ), + ), + ); + const derivedPaths = yield* deriveServerPaths(baseDir, devUrl); + yield* ensureServerDirectories(derivedPaths); + const persistedObservabilitySettings = yield* loadPersistedObservabilitySettings( + derivedPaths.settingsPath, + ); + const serverTracePath = env.traceFile ?? derivedPaths.serverTracePath; + yield* fs.makeDirectory(path.dirname(serverTracePath), { recursive: true }); + const noBrowser = resolveBooleanFlag( + flags.noBrowser, + Option.getOrElse( + resolveOptionPrecedence( + Option.fromUndefinedOr(env.noBrowser), + Option.flatMap(bootstrapEnvelope, (bootstrap) => + Option.fromUndefinedOr(bootstrap.noBrowser), + ), + ), + () => mode === "desktop", + ), + ); + const authToken = Option.getOrUndefined( + resolveOptionPrecedence( + flags.authToken, + Option.fromUndefinedOr(env.authToken), + Option.flatMap(bootstrapEnvelope, (bootstrap) => + Option.fromUndefinedOr(bootstrap.authToken), + ), + ), + ); + const autoBootstrapProjectFromCwd = resolveBooleanFlag( + flags.autoBootstrapProjectFromCwd, + Option.getOrElse( + resolveOptionPrecedence( + Option.fromUndefinedOr(env.autoBootstrapProjectFromCwd), + Option.flatMap(bootstrapEnvelope, (bootstrap) => + Option.fromUndefinedOr(bootstrap.autoBootstrapProjectFromCwd), + ), + ), + () => mode === "web", + ), + ); + const logWebSocketEvents = resolveBooleanFlag( + flags.logWebSocketEvents, + Option.getOrElse( + resolveOptionPrecedence( + Option.fromUndefinedOr(env.logWebSocketEvents), + Option.flatMap(bootstrapEnvelope, (bootstrap) => + Option.fromUndefinedOr(bootstrap.logWebSocketEvents), + ), + ), + () => Boolean(devUrl), + ), + ); + const staticDir = devUrl ? undefined : yield* resolveStaticDir(); + const host = Option.getOrElse( + resolveOptionPrecedence( + flags.host, + Option.fromUndefinedOr(env.host), + Option.flatMap(bootstrapEnvelope, (bootstrap) => Option.fromUndefinedOr(bootstrap.host)), + ), + () => (mode === "desktop" ? "127.0.0.1" : undefined), + ); + const logLevel = Option.getOrElse(cliLogLevel, () => env.logLevel); + + return { + logLevel, + traceMinLevel: env.traceMinLevel, + traceTimingEnabled: env.traceTimingEnabled, + traceBatchWindowMs: env.traceBatchWindowMs, + traceMaxBytes: env.traceMaxBytes, + traceMaxFiles: env.traceMaxFiles, + otlpTracesUrl: + env.otlpTracesUrl ?? + Option.getOrUndefined( + Option.flatMap(bootstrapEnvelope, (bootstrap) => + Option.fromUndefinedOr(bootstrap.otlpTracesUrl), + ), + ) ?? + persistedObservabilitySettings.otlpTracesUrl, + otlpMetricsUrl: + env.otlpMetricsUrl ?? + Option.getOrUndefined( + Option.flatMap(bootstrapEnvelope, (bootstrap) => + Option.fromUndefinedOr(bootstrap.otlpMetricsUrl), + ), + ) ?? + persistedObservabilitySettings.otlpMetricsUrl, + otlpExportIntervalMs: env.otlpExportIntervalMs, + otlpServiceName: env.otlpServiceName, + mode, + port, + cwd: process.cwd(), + baseDir, + ...derivedPaths, + serverTracePath, + host, + staticDir, + devUrl, + noBrowser, + authToken, + autoBootstrapProjectFromCwd, + logWebSocketEvents, + } satisfies ServerConfigShape; + }); diff --git a/apps/server/src/cli.ts b/apps/server/src/cli.ts index 09c17278a5..5efc61156a 100644 --- a/apps/server/src/cli.ts +++ b/apps/server/src/cli.ts @@ -1,37 +1,10 @@ -import { NetService } from "@t3tools/shared/Net"; -import { parsePersistedServerObservabilitySettings } from "@t3tools/shared/serverSettings"; -import { Config, Effect, FileSystem, LogLevel, Option, Path, Schema } from "effect"; +import { Effect, Schema } from "effect"; import { Command, Flag, GlobalFlag } from "effect/unstable/cli"; -import { - DEFAULT_PORT, - deriveServerPaths, - ensureServerDirectories, - resolveStaticDir, - ServerConfig, - RuntimeMode, - type ServerConfigShape, -} from "./config"; -import { readBootstrapEnvelope } from "./bootstrap"; -import { resolveBaseDir } from "./os-jank"; +import { ServerConfig, RuntimeMode } from "./config"; +import { PortSchema, resolveServerConfig } from "./cli.logic"; import { runServer } from "./server"; -const PortSchema = Schema.Int.check(Schema.isBetween({ minimum: 1, maximum: 65535 })); - -const BootstrapEnvelopeSchema = Schema.Struct({ - mode: Schema.optional(RuntimeMode), - port: Schema.optional(PortSchema), - host: Schema.optional(Schema.String), - t3Home: Schema.optional(Schema.String), - devUrl: Schema.optional(Schema.URLFromString), - noBrowser: Schema.optional(Schema.Boolean), - authToken: Schema.optional(Schema.String), - autoBootstrapProjectFromCwd: Schema.optional(Schema.Boolean), - logWebSocketEvents: Schema.optional(Schema.Boolean), - otlpTracesUrl: Schema.optional(Schema.String), - otlpMetricsUrl: Schema.optional(Schema.String), -}); - const modeFlag = Flag.choice("mode", RuntimeMode.literals).pipe( Flag.withDescription("Runtime mode. `desktop` keeps loopback defaults unless overridden."), Flag.optional, @@ -82,255 +55,6 @@ const logWebSocketEventsFlag = Flag.boolean("log-websocket-events").pipe( Flag.optional, ); -const EnvServerConfig = Config.all({ - logLevel: Config.logLevel("T3CODE_LOG_LEVEL").pipe(Config.withDefault("Info")), - traceMinLevel: Config.logLevel("T3CODE_TRACE_MIN_LEVEL").pipe(Config.withDefault("Info")), - traceTimingEnabled: Config.boolean("T3CODE_TRACE_TIMING_ENABLED").pipe(Config.withDefault(true)), - traceFile: Config.string("T3CODE_TRACE_FILE").pipe( - Config.option, - Config.map(Option.getOrUndefined), - ), - traceMaxBytes: Config.int("T3CODE_TRACE_MAX_BYTES").pipe(Config.withDefault(10 * 1024 * 1024)), - traceMaxFiles: Config.int("T3CODE_TRACE_MAX_FILES").pipe(Config.withDefault(10)), - traceBatchWindowMs: Config.int("T3CODE_TRACE_BATCH_WINDOW_MS").pipe(Config.withDefault(200)), - otlpTracesUrl: Config.string("T3CODE_OTLP_TRACES_URL").pipe( - Config.option, - Config.map(Option.getOrUndefined), - ), - otlpMetricsUrl: Config.string("T3CODE_OTLP_METRICS_URL").pipe( - Config.option, - Config.map(Option.getOrUndefined), - ), - otlpExportIntervalMs: Config.int("T3CODE_OTLP_EXPORT_INTERVAL_MS").pipe( - Config.withDefault(10_000), - ), - otlpServiceName: Config.string("T3CODE_OTLP_SERVICE_NAME").pipe(Config.withDefault("t3-server")), - mode: Config.schema(RuntimeMode, "T3CODE_MODE").pipe( - Config.option, - Config.map(Option.getOrUndefined), - ), - port: Config.port("T3CODE_PORT").pipe(Config.option, Config.map(Option.getOrUndefined)), - host: Config.string("T3CODE_HOST").pipe(Config.option, Config.map(Option.getOrUndefined)), - t3Home: Config.string("T3CODE_HOME").pipe(Config.option, Config.map(Option.getOrUndefined)), - devUrl: Config.url("VITE_DEV_SERVER_URL").pipe(Config.option, Config.map(Option.getOrUndefined)), - noBrowser: Config.boolean("T3CODE_NO_BROWSER").pipe( - Config.option, - Config.map(Option.getOrUndefined), - ), - authToken: Config.string("T3CODE_AUTH_TOKEN").pipe( - Config.option, - Config.map(Option.getOrUndefined), - ), - bootstrapFd: Config.int("T3CODE_BOOTSTRAP_FD").pipe( - Config.option, - Config.map(Option.getOrUndefined), - ), - autoBootstrapProjectFromCwd: Config.boolean("T3CODE_AUTO_BOOTSTRAP_PROJECT_FROM_CWD").pipe( - Config.option, - Config.map(Option.getOrUndefined), - ), - logWebSocketEvents: Config.boolean("T3CODE_LOG_WS_EVENTS").pipe( - Config.option, - Config.map(Option.getOrUndefined), - ), -}); - -interface CliServerFlags { - readonly mode: Option.Option; - readonly port: Option.Option; - readonly host: Option.Option; - readonly baseDir: Option.Option; - readonly devUrl: Option.Option; - readonly noBrowser: Option.Option; - readonly authToken: Option.Option; - readonly bootstrapFd: Option.Option; - readonly autoBootstrapProjectFromCwd: Option.Option; - readonly logWebSocketEvents: Option.Option; -} - -const resolveBooleanFlag = (flag: Option.Option, envValue: boolean) => - Option.getOrElse(Option.filter(flag, Boolean), () => envValue); - -const resolveOptionPrecedence = ( - ...values: ReadonlyArray> -): Option.Option => Option.firstSomeOf(values); - -const loadPersistedObservabilitySettings = Effect.fn(function* (settingsPath: string) { - const fs = yield* FileSystem.FileSystem; - const exists = yield* fs.exists(settingsPath).pipe(Effect.orElseSucceed(() => false)); - if (!exists) { - return { otlpTracesUrl: undefined, otlpMetricsUrl: undefined }; - } - - const raw = yield* fs.readFileString(settingsPath).pipe(Effect.orElseSucceed(() => "")); - return parsePersistedServerObservabilitySettings(raw); -}); - -export const resolveServerConfig = ( - flags: CliServerFlags, - cliLogLevel: Option.Option, -) => - Effect.gen(function* () { - const { findAvailablePort } = yield* NetService; - const path = yield* Path.Path; - const fs = yield* FileSystem.FileSystem; - const env = yield* EnvServerConfig; - const bootstrapFd = Option.getOrUndefined(flags.bootstrapFd) ?? env.bootstrapFd; - const bootstrapEnvelope = - bootstrapFd !== undefined - ? yield* readBootstrapEnvelope(BootstrapEnvelopeSchema, bootstrapFd) - : Option.none(); - - const mode: RuntimeMode = Option.getOrElse( - resolveOptionPrecedence( - flags.mode, - Option.fromUndefinedOr(env.mode), - Option.flatMap(bootstrapEnvelope, (bootstrap) => Option.fromUndefinedOr(bootstrap.mode)), - ), - () => "web", - ); - - const port = yield* Option.match( - resolveOptionPrecedence( - flags.port, - Option.fromUndefinedOr(env.port), - Option.flatMap(bootstrapEnvelope, (bootstrap) => Option.fromUndefinedOr(bootstrap.port)), - ), - { - onSome: (value) => Effect.succeed(value), - onNone: () => { - if (mode === "desktop") { - return Effect.succeed(DEFAULT_PORT); - } - return findAvailablePort(DEFAULT_PORT); - }, - }, - ); - const devUrl = Option.getOrElse( - resolveOptionPrecedence( - flags.devUrl, - Option.fromUndefinedOr(env.devUrl), - Option.flatMap(bootstrapEnvelope, (bootstrap) => Option.fromUndefinedOr(bootstrap.devUrl)), - ), - () => undefined, - ); - const baseDir = yield* resolveBaseDir( - Option.getOrUndefined( - resolveOptionPrecedence( - flags.baseDir, - Option.fromUndefinedOr(env.t3Home), - Option.flatMap(bootstrapEnvelope, (bootstrap) => - Option.fromUndefinedOr(bootstrap.t3Home), - ), - ), - ), - ); - const derivedPaths = yield* deriveServerPaths(baseDir, devUrl); - yield* ensureServerDirectories(derivedPaths); - const persistedObservabilitySettings = yield* loadPersistedObservabilitySettings( - derivedPaths.settingsPath, - ); - const serverTracePath = env.traceFile ?? derivedPaths.serverTracePath; - yield* fs.makeDirectory(path.dirname(serverTracePath), { recursive: true }); - const noBrowser = resolveBooleanFlag( - flags.noBrowser, - Option.getOrElse( - resolveOptionPrecedence( - Option.fromUndefinedOr(env.noBrowser), - Option.flatMap(bootstrapEnvelope, (bootstrap) => - Option.fromUndefinedOr(bootstrap.noBrowser), - ), - ), - () => mode === "desktop", - ), - ); - const authToken = Option.getOrUndefined( - resolveOptionPrecedence( - flags.authToken, - Option.fromUndefinedOr(env.authToken), - Option.flatMap(bootstrapEnvelope, (bootstrap) => - Option.fromUndefinedOr(bootstrap.authToken), - ), - ), - ); - const autoBootstrapProjectFromCwd = resolveBooleanFlag( - flags.autoBootstrapProjectFromCwd, - Option.getOrElse( - resolveOptionPrecedence( - Option.fromUndefinedOr(env.autoBootstrapProjectFromCwd), - Option.flatMap(bootstrapEnvelope, (bootstrap) => - Option.fromUndefinedOr(bootstrap.autoBootstrapProjectFromCwd), - ), - ), - () => mode === "web", - ), - ); - const logWebSocketEvents = resolveBooleanFlag( - flags.logWebSocketEvents, - Option.getOrElse( - resolveOptionPrecedence( - Option.fromUndefinedOr(env.logWebSocketEvents), - Option.flatMap(bootstrapEnvelope, (bootstrap) => - Option.fromUndefinedOr(bootstrap.logWebSocketEvents), - ), - ), - () => Boolean(devUrl), - ), - ); - const staticDir = devUrl ? undefined : yield* resolveStaticDir(); - const host = Option.getOrElse( - resolveOptionPrecedence( - flags.host, - Option.fromUndefinedOr(env.host), - Option.flatMap(bootstrapEnvelope, (bootstrap) => Option.fromUndefinedOr(bootstrap.host)), - ), - () => (mode === "desktop" ? "127.0.0.1" : undefined), - ); - const logLevel = Option.getOrElse(cliLogLevel, () => env.logLevel); - - const config: ServerConfigShape = { - logLevel, - traceMinLevel: env.traceMinLevel, - traceTimingEnabled: env.traceTimingEnabled, - traceBatchWindowMs: env.traceBatchWindowMs, - traceMaxBytes: env.traceMaxBytes, - traceMaxFiles: env.traceMaxFiles, - otlpTracesUrl: - env.otlpTracesUrl ?? - Option.getOrUndefined( - Option.flatMap(bootstrapEnvelope, (bootstrap) => - Option.fromUndefinedOr(bootstrap.otlpTracesUrl), - ), - ) ?? - persistedObservabilitySettings.otlpTracesUrl, - otlpMetricsUrl: - env.otlpMetricsUrl ?? - Option.getOrUndefined( - Option.flatMap(bootstrapEnvelope, (bootstrap) => - Option.fromUndefinedOr(bootstrap.otlpMetricsUrl), - ), - ) ?? - persistedObservabilitySettings.otlpMetricsUrl, - otlpExportIntervalMs: env.otlpExportIntervalMs, - otlpServiceName: env.otlpServiceName, - mode, - port, - cwd: process.cwd(), - baseDir, - ...derivedPaths, - serverTracePath, - host, - staticDir, - devUrl, - noBrowser, - authToken, - autoBootstrapProjectFromCwd, - logWebSocketEvents, - }; - - return config; - }); - const commandFlags = { mode: modeFlag, port: portFlag, diff --git a/apps/server/src/codexAppServerManager.shared.ts b/apps/server/src/codexAppServerManager.shared.ts new file mode 100644 index 0000000000..bc64113cc4 --- /dev/null +++ b/apps/server/src/codexAppServerManager.shared.ts @@ -0,0 +1,210 @@ +import { type ProviderInteractionMode, ThreadId } from "@t3tools/contracts"; +import { normalizeModelSlug } from "@t3tools/shared/model"; + +export interface CodexAppServerSendTurnInput { + readonly threadId: ThreadId; + readonly input?: string; + readonly attachments?: ReadonlyArray<{ type: "image"; url: string }>; + readonly model?: string; + readonly serviceTier?: string | null; + readonly effort?: string; + readonly interactionMode?: ProviderInteractionMode; +} + +const ANSI_ESCAPE_CHAR = String.fromCharCode(27); +const ANSI_ESCAPE_REGEX = new RegExp(`${ANSI_ESCAPE_CHAR}\\[[0-9;]*m`, "g"); +const CODEX_STDERR_LOG_REGEX = + /^\d{4}-\d{2}-\d{2}T\S+\s+(TRACE|DEBUG|INFO|WARN|ERROR)\s+\S+:\s+(.*)$/; +const BENIGN_ERROR_LOG_SNIPPETS = [ + "state db missing rollout path for thread", + "state db record_discrepancy: find_thread_path_by_id_str_in_subdir, falling_back", +]; +const RECOVERABLE_THREAD_RESUME_ERROR_SNIPPETS = [ + "not found", + "missing thread", + "no such thread", + "unknown thread", + "does not exist", +]; + +export const CODEX_PLAN_MODE_DEVELOPER_INSTRUCTIONS = `# Plan Mode (Conversational) + +You work in 3 phases, and you should *chat your way* to a great plan before finalizing it. A great plan is very detailed-intent- and implementation-wise-so that it can be handed to another engineer or agent to be implemented right away. It must be **decision complete**, where the implementer does not need to make any decisions. + +## Mode rules (strict) + +You are in **Plan Mode** until a developer message explicitly ends it. + +Plan Mode is not changed by user intent, tone, or imperative language. If a user asks for execution while still in Plan Mode, treat it as a request to **plan the execution**, not perform it. + +## Plan Mode vs update_plan tool + +Plan Mode is a collaboration mode that can involve requesting user input and eventually issuing a \`\` block. + +Separately, \`update_plan\` is a checklist/progress/TODOs tool; it does not enter or exit Plan Mode. Do not confuse it with Plan mode or try to use it while in Plan mode. If you try to use \`update_plan\` in Plan mode, it will return an error. + +## Execution vs. mutation in Plan Mode + +You may explore and execute **non-mutating** actions that improve the plan. You must not perform **mutating** actions. + +### Allowed (non-mutating, plan-improving) + +Actions that gather truth, reduce ambiguity, or validate feasibility without changing repo-tracked state. Examples: + +* Reading or searching files, configs, schemas, types, manifests, and docs +* Static analysis, inspection, and repo exploration +* Dry-run style commands when they do not edit repo-tracked files +* Tests, builds, or checks that may write to caches or build artifacts (for example, \`target/\`, \`.cache/\`, or snapshots) so long as they do not edit repo-tracked files + +### Not allowed (mutating, plan-executing) + +Actions that implement the plan or change repo-tracked state. Examples: + +* Editing or writing files +* Running formatters or linters that rewrite files +* Applying patches, migrations, or codegen that updates repo-tracked files +* Side-effectful commands whose purpose is to carry out the plan rather than refine it + +When in doubt: if the action would reasonably be described as "doing the work" rather than "planning the work," do not do it. + +## PHASE 1 - Ground in the environment (explore first, ask second) + +Begin by grounding yourself in the actual environment. Eliminate unknowns in the prompt by discovering facts, not by asking the user. Resolve all questions that can be answered through exploration or inspection. Identify missing or ambiguous details only if they cannot be derived from the environment. Silent exploration between turns is allowed and encouraged. + +Before asking the user any question, perform at least one targeted non-mutating exploration pass (for example: search relevant files, inspect likely entrypoints/configs, confirm current implementation shape), unless no local environment/repo is available. + +Exception: you may ask clarifying questions about the user's prompt before exploring, ONLY if there are obvious ambiguities or contradictions in the prompt itself. However, if ambiguity might be resolved by exploring, always prefer exploring first. + +Do not ask questions that can be answered from the repo or system (for example, "where is this struct?" or "which UI component should we use?" when exploration can make it clear). Only ask once you have exhausted reasonable non-mutating exploration. + +## PHASE 2 - Intent chat (what they actually want) + +* Keep asking until you can clearly state: goal + success criteria, audience, in/out of scope, constraints, current state, and the key preferences/tradeoffs. +* Bias toward questions over guessing: if any high-impact ambiguity remains, do NOT plan yet-ask. + +## PHASE 3 - Implementation chat (what/how we'll build) + +* Once intent is stable, keep asking until the spec is decision complete: approach, interfaces (APIs/schemas/I/O), data flow, edge cases/failure modes, testing + acceptance criteria, rollout/monitoring, and any migrations/compat constraints. + +## Asking questions + +Critical rules: + +* Strongly prefer using the \`request_user_input\` tool to ask any questions. +* Offer only meaningful multiple-choice options; don't include filler choices that are obviously wrong or irrelevant. +* In rare cases where an unavoidable, important question can't be expressed with reasonable multiple-choice options (due to extreme ambiguity), you may ask it directly without the tool. + +You SHOULD ask many questions, but each question must: + +* materially change the spec/plan, OR +* confirm/lock an assumption, OR +* choose between meaningful tradeoffs. +* not be answerable by non-mutating commands. + +Use the \`request_user_input\` tool only for decisions that materially change the plan, for confirming important assumptions, or for information that cannot be discovered via non-mutating exploration. + +## Two kinds of unknowns (treat differently) + +1. **Discoverable facts** (repo/system truth): explore first. + + * Before asking, run targeted searches and check likely sources of truth (configs/manifests/entrypoints/schemas/types/constants). + * Ask only if: multiple plausible candidates; nothing found but you need a missing identifier/context; or ambiguity is actually product intent. + * If asking, present concrete candidates (paths/service names) + recommend one. + * Never ask questions you can answer from your environment (e.g., "where is this struct"). + +2. **Preferences/tradeoffs** (not discoverable): ask early. + + * These are intent or implementation preferences that cannot be derived from exploration. + * Provide 2-4 mutually exclusive options + a recommended default. + * If unanswered, proceed with the recommended option and record it as an assumption in the final plan. + +## Finalization rule + +Only output the final plan when it is decision complete and leaves no decisions to the implementer. + +When you present the official plan, wrap it in a \`\` block so the client can render it specially: + +1) The opening tag must be on its own line. +2) Start the plan content on the next line (no text on the same line as the tag). +3) The closing tag must be on its own line. +4) Use Markdown inside the block. +5) Keep the tags exactly as \`\` and \`\` (do not translate or rename them), even if the plan content is in another language. + +Example: + + +plan content + + +plan content should be human and agent digestible. The final plan must be plan-only and include: + +* A clear title +* A brief summary section +* Important changes or additions to public APIs/interfaces/types +* Test cases and scenarios +* Explicit assumptions and defaults chosen where needed + +Do not ask "should I proceed?" in the final output. The user can easily switch out of Plan mode and request implementation if you have included a \`\` block in your response. Alternatively, they can decide to stay in Plan mode and continue refining the plan. + +Only produce at most one \`\` block per turn, and only when you are presenting a complete spec. +`; + +export const CODEX_DEFAULT_MODE_DEVELOPER_INSTRUCTIONS = `# Collaboration Mode: Default + +You are now in Default mode. Any previous instructions for other modes (e.g. Plan mode) are no longer active. + +Your active mode changes only when new developer instructions with a different \`...\` change it; user requests or tool descriptions do not change mode by themselves. Known mode names are Default and Plan. + +## request_user_input availability + +The \`request_user_input\` tool is unavailable in Default mode. If you call it while in Default mode, it will return an error. + +In Default mode, strongly prefer making reasonable assumptions and executing the user's request rather than stopping to ask questions. If you absolutely must ask a question because the answer cannot be discovered from local context and a reasonable assumption would be risky, ask the user directly with a concise plain-text question. Never write a multiple choice question as a textual assistant message. +`; + +export function normalizeCodexModelSlug( + model: string | undefined | null, + preferredId?: string, +): string | undefined { + const normalized = normalizeModelSlug(model); + if (!normalized) { + return undefined; + } + + if (preferredId?.endsWith("-codex") && preferredId !== normalized) { + return preferredId; + } + + return normalized; +} + +export function classifyCodexStderrLine(rawLine: string): { message: string } | null { + const line = rawLine.replaceAll(ANSI_ESCAPE_REGEX, "").trim(); + if (!line) { + return null; + } + + const match = line.match(CODEX_STDERR_LOG_REGEX); + if (match) { + const level = match[1]; + if (level && level !== "ERROR") { + return null; + } + + const isBenignError = BENIGN_ERROR_LOG_SNIPPETS.some((snippet) => line.includes(snippet)); + if (isBenignError) { + return null; + } + } + + return { message: line }; +} + +export function isRecoverableThreadResumeError(error: unknown): boolean { + const message = (error instanceof Error ? error.message : String(error)).toLowerCase(); + if (!message.includes("thread/resume")) { + return false; + } + + return RECOVERABLE_THREAD_RESUME_ERROR_SNIPPETS.some((snippet) => message.includes(snippet)); +} diff --git a/apps/server/src/codexAppServerManager.test.ts b/apps/server/src/codexAppServerManager.test.ts index 2f740a7407..cb0a636a37 100644 --- a/apps/server/src/codexAppServerManager.test.ts +++ b/apps/server/src/codexAppServerManager.test.ts @@ -5,17 +5,17 @@ import os from "node:os"; import path from "node:path"; import { ApprovalRequestId, ThreadId } from "@t3tools/contracts"; +import { CodexAppServerManager } from "./codexAppServerManager"; import { buildCodexInitializeParams, CODEX_DEFAULT_MODE_DEVELOPER_INSTRUCTIONS, CODEX_PLAN_MODE_DEVELOPER_INSTRUCTIONS, - CodexAppServerManager, classifyCodexStderrLine, isRecoverableThreadResumeError, normalizeCodexModelSlug, readCodexAccountSnapshot, resolveCodexModelForAccount, -} from "./codexAppServerManager"; +} from "./codexAppServerManager.testing"; const asThreadId = (value: string): ThreadId => ThreadId.makeUnsafe(value); diff --git a/apps/server/src/codexAppServerManager.testing.ts b/apps/server/src/codexAppServerManager.testing.ts new file mode 100644 index 0000000000..82db91a7e8 --- /dev/null +++ b/apps/server/src/codexAppServerManager.testing.ts @@ -0,0 +1,10 @@ +export { + CODEX_DEFAULT_MODE_DEVELOPER_INSTRUCTIONS, + CODEX_PLAN_MODE_DEVELOPER_INSTRUCTIONS, + classifyCodexStderrLine, + isRecoverableThreadResumeError, + normalizeCodexModelSlug, + type CodexAppServerSendTurnInput, +} from "./codexAppServerManager.shared"; +export { buildCodexInitializeParams } from "./provider/codexAppServer"; +export { readCodexAccountSnapshot, resolveCodexModelForAccount } from "./provider/codexAccount"; diff --git a/apps/server/src/codexAppServerManager.ts b/apps/server/src/codexAppServerManager.ts index 3145038647..abe66f8b91 100644 --- a/apps/server/src/codexAppServerManager.ts +++ b/apps/server/src/codexAppServerManager.ts @@ -16,9 +16,7 @@ import { type ProviderSession, type ProviderTurnStartResult, RuntimeMode, - ProviderInteractionMode, } from "@t3tools/contracts"; -import { normalizeModelSlug } from "@t3tools/shared/model"; import { Effect, ServiceMap } from "effect"; import { @@ -32,9 +30,14 @@ import { type CodexAccountSnapshot, } from "./provider/codexAccount"; import { buildCodexInitializeParams, killCodexChildProcess } from "./provider/codexAppServer"; - -export { buildCodexInitializeParams } from "./provider/codexAppServer"; -export { readCodexAccountSnapshot, resolveCodexModelForAccount } from "./provider/codexAccount"; +import { + CODEX_DEFAULT_MODE_DEVELOPER_INSTRUCTIONS, + CODEX_PLAN_MODE_DEVELOPER_INSTRUCTIONS, + classifyCodexStderrLine, + isRecoverableThreadResumeError, + normalizeCodexModelSlug, + type CodexAppServerSendTurnInput, +} from "./codexAppServerManager.shared"; type PendingRequestKey = string; @@ -105,16 +108,6 @@ interface JsonRpcNotification { params?: unknown; } -export interface CodexAppServerSendTurnInput { - readonly threadId: ThreadId; - readonly input?: string; - readonly attachments?: ReadonlyArray<{ type: "image"; url: string }>; - readonly model?: string; - readonly serviceTier?: string | null; - readonly effort?: string; - readonly interactionMode?: ProviderInteractionMode; -} - export interface CodexAppServerStartSessionInput { readonly threadId: ThreadId; readonly provider?: "codex"; @@ -127,168 +120,18 @@ export interface CodexAppServerStartSessionInput { readonly runtimeMode: RuntimeMode; } -export interface CodexThreadTurnSnapshot { +interface CodexThreadTurnSnapshot { id: TurnId; items: unknown[]; } -export interface CodexThreadSnapshot { +interface CodexThreadSnapshot { threadId: string; turns: CodexThreadTurnSnapshot[]; } const CODEX_VERSION_CHECK_TIMEOUT_MS = 4_000; -const ANSI_ESCAPE_CHAR = String.fromCharCode(27); -const ANSI_ESCAPE_REGEX = new RegExp(`${ANSI_ESCAPE_CHAR}\\[[0-9;]*m`, "g"); -const CODEX_STDERR_LOG_REGEX = - /^\d{4}-\d{2}-\d{2}T\S+\s+(TRACE|DEBUG|INFO|WARN|ERROR)\s+\S+:\s+(.*)$/; -const BENIGN_ERROR_LOG_SNIPPETS = [ - "state db missing rollout path for thread", - "state db record_discrepancy: find_thread_path_by_id_str_in_subdir, falling_back", -]; -const RECOVERABLE_THREAD_RESUME_ERROR_SNIPPETS = [ - "not found", - "missing thread", - "no such thread", - "unknown thread", - "does not exist", -]; -export const CODEX_PLAN_MODE_DEVELOPER_INSTRUCTIONS = `# Plan Mode (Conversational) - -You work in 3 phases, and you should *chat your way* to a great plan before finalizing it. A great plan is very detailed-intent- and implementation-wise-so that it can be handed to another engineer or agent to be implemented right away. It must be **decision complete**, where the implementer does not need to make any decisions. - -## Mode rules (strict) - -You are in **Plan Mode** until a developer message explicitly ends it. - -Plan Mode is not changed by user intent, tone, or imperative language. If a user asks for execution while still in Plan Mode, treat it as a request to **plan the execution**, not perform it. - -## Plan Mode vs update_plan tool - -Plan Mode is a collaboration mode that can involve requesting user input and eventually issuing a \`\` block. - -Separately, \`update_plan\` is a checklist/progress/TODOs tool; it does not enter or exit Plan Mode. Do not confuse it with Plan mode or try to use it while in Plan mode. If you try to use \`update_plan\` in Plan mode, it will return an error. - -## Execution vs. mutation in Plan Mode - -You may explore and execute **non-mutating** actions that improve the plan. You must not perform **mutating** actions. - -### Allowed (non-mutating, plan-improving) - -Actions that gather truth, reduce ambiguity, or validate feasibility without changing repo-tracked state. Examples: - -* Reading or searching files, configs, schemas, types, manifests, and docs -* Static analysis, inspection, and repo exploration -* Dry-run style commands when they do not edit repo-tracked files -* Tests, builds, or checks that may write to caches or build artifacts (for example, \`target/\`, \`.cache/\`, or snapshots) so long as they do not edit repo-tracked files - -### Not allowed (mutating, plan-executing) - -Actions that implement the plan or change repo-tracked state. Examples: - -* Editing or writing files -* Running formatters or linters that rewrite files -* Applying patches, migrations, or codegen that updates repo-tracked files -* Side-effectful commands whose purpose is to carry out the plan rather than refine it - -When in doubt: if the action would reasonably be described as "doing the work" rather than "planning the work," do not do it. - -## PHASE 1 - Ground in the environment (explore first, ask second) - -Begin by grounding yourself in the actual environment. Eliminate unknowns in the prompt by discovering facts, not by asking the user. Resolve all questions that can be answered through exploration or inspection. Identify missing or ambiguous details only if they cannot be derived from the environment. Silent exploration between turns is allowed and encouraged. - -Before asking the user any question, perform at least one targeted non-mutating exploration pass (for example: search relevant files, inspect likely entrypoints/configs, confirm current implementation shape), unless no local environment/repo is available. - -Exception: you may ask clarifying questions about the user's prompt before exploring, ONLY if there are obvious ambiguities or contradictions in the prompt itself. However, if ambiguity might be resolved by exploring, always prefer exploring first. - -Do not ask questions that can be answered from the repo or system (for example, "where is this struct?" or "which UI component should we use?" when exploration can make it clear). Only ask once you have exhausted reasonable non-mutating exploration. - -## PHASE 2 - Intent chat (what they actually want) - -* Keep asking until you can clearly state: goal + success criteria, audience, in/out of scope, constraints, current state, and the key preferences/tradeoffs. -* Bias toward questions over guessing: if any high-impact ambiguity remains, do NOT plan yet-ask. - -## PHASE 3 - Implementation chat (what/how we'll build) - -* Once intent is stable, keep asking until the spec is decision complete: approach, interfaces (APIs/schemas/I/O), data flow, edge cases/failure modes, testing + acceptance criteria, rollout/monitoring, and any migrations/compat constraints. - -## Asking questions - -Critical rules: - -* Strongly prefer using the \`request_user_input\` tool to ask any questions. -* Offer only meaningful multiple-choice options; don't include filler choices that are obviously wrong or irrelevant. -* In rare cases where an unavoidable, important question can't be expressed with reasonable multiple-choice options (due to extreme ambiguity), you may ask it directly without the tool. - -You SHOULD ask many questions, but each question must: - -* materially change the spec/plan, OR -* confirm/lock an assumption, OR -* choose between meaningful tradeoffs. -* not be answerable by non-mutating commands. - -Use the \`request_user_input\` tool only for decisions that materially change the plan, for confirming important assumptions, or for information that cannot be discovered via non-mutating exploration. - -## Two kinds of unknowns (treat differently) - -1. **Discoverable facts** (repo/system truth): explore first. - - * Before asking, run targeted searches and check likely sources of truth (configs/manifests/entrypoints/schemas/types/constants). - * Ask only if: multiple plausible candidates; nothing found but you need a missing identifier/context; or ambiguity is actually product intent. - * If asking, present concrete candidates (paths/service names) + recommend one. - * Never ask questions you can answer from your environment (e.g., "where is this struct"). - -2. **Preferences/tradeoffs** (not discoverable): ask early. - - * These are intent or implementation preferences that cannot be derived from exploration. - * Provide 2-4 mutually exclusive options + a recommended default. - * If unanswered, proceed with the recommended option and record it as an assumption in the final plan. - -## Finalization rule - -Only output the final plan when it is decision complete and leaves no decisions to the implementer. - -When you present the official plan, wrap it in a \`\` block so the client can render it specially: - -1) The opening tag must be on its own line. -2) Start the plan content on the next line (no text on the same line as the tag). -3) The closing tag must be on its own line. -4) Use Markdown inside the block. -5) Keep the tags exactly as \`\` and \`\` (do not translate or rename them), even if the plan content is in another language. - -Example: - - -plan content - - -plan content should be human and agent digestible. The final plan must be plan-only and include: - -* A clear title -* A brief summary section -* Important changes or additions to public APIs/interfaces/types -* Test cases and scenarios -* Explicit assumptions and defaults chosen where needed - -Do not ask "should I proceed?" in the final output. The user can easily switch out of Plan mode and request implementation if you have included a \`\` block in your response. Alternatively, they can decide to stay in Plan mode and continue refining the plan. - -Only produce at most one \`\` block per turn, and only when you are presenting a complete spec. -`; - -export const CODEX_DEFAULT_MODE_DEVELOPER_INSTRUCTIONS = `# Collaboration Mode: Default - -You are now in Default mode. Any previous instructions for other modes (e.g. Plan mode) are no longer active. - -Your active mode changes only when new developer instructions with a different \`...\` change it; user requests or tool descriptions do not change mode by themselves. Known mode names are Default and Plan. - -## request_user_input availability - -The \`request_user_input\` tool is unavailable in Default mode. If you call it while in Default mode, it will return an error. - -In Default mode, strongly prefer making reasonable assumptions and executing the user's request rather than stopping to ask questions. If you absolutely must ask a question because the answer cannot be discovered from local context and a reasonable assumption would be risky, ask the user directly with a concise plain-text question. Never write a multiple choice question as a textual assistant message. -`; - function mapCodexRuntimeMode(runtimeMode: RuntimeMode): { readonly approvalPolicy: "on-request" | "never"; readonly sandbox: "workspace-write" | "danger-full-access"; @@ -315,22 +158,6 @@ function killChildTree(child: ChildProcessWithoutNullStreams): void { killCodexChildProcess(child); } -export function normalizeCodexModelSlug( - model: string | undefined | null, - preferredId?: string, -): string | undefined { - const normalized = normalizeModelSlug(model); - if (!normalized) { - return undefined; - } - - if (preferredId?.endsWith("-codex") && preferredId !== normalized) { - return preferredId; - } - - return normalized; -} - function buildCodexCollaborationMode(input: { readonly interactionMode?: "default" | "plan"; readonly model?: string; @@ -394,38 +221,7 @@ function toCodexUserInputAnswers( ); } -export function classifyCodexStderrLine(rawLine: string): { message: string } | null { - const line = rawLine.replaceAll(ANSI_ESCAPE_REGEX, "").trim(); - if (!line) { - return null; - } - - const match = line.match(CODEX_STDERR_LOG_REGEX); - if (match) { - const level = match[1]; - if (level && level !== "ERROR") { - return null; - } - - const isBenignError = BENIGN_ERROR_LOG_SNIPPETS.some((snippet) => line.includes(snippet)); - if (isBenignError) { - return null; - } - } - - return { message: line }; -} - -export function isRecoverableThreadResumeError(error: unknown): boolean { - const message = (error instanceof Error ? error.message : String(error)).toLowerCase(); - if (!message.includes("thread/resume")) { - return false; - } - - return RECOVERABLE_THREAD_RESUME_ERROR_SNIPPETS.some((snippet) => message.includes(snippet)); -} - -export interface CodexAppServerManagerEvents { +interface CodexAppServerManagerEvents { event: [event: ProviderEvent]; } diff --git a/apps/server/src/git/Layers/GitCore.shared.ts b/apps/server/src/git/Layers/GitCore.shared.ts new file mode 100644 index 0000000000..8105b4bada --- /dev/null +++ b/apps/server/src/git/Layers/GitCore.shared.ts @@ -0,0 +1,2086 @@ +import { + Cache, + Data, + Duration, + Effect, + Exit, + FileSystem, + Option, + Path, + PlatformError, + Ref, + Result, + Schema, + Scope, + Semaphore, + Stream, +} from "effect"; +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; + +import { GitCommandError, type GitBranch } from "@t3tools/contracts"; +import { dedupeRemoteBranchesWithLocalMatches } from "@t3tools/shared/git"; +import { + type ExecuteGitProgress, + type GitCommitOptions, + type GitCoreShape, + type ExecuteGitInput, + type ExecuteGitResult, +} from "../Services/GitCore.ts"; +import { + parseRemoteNames, + parseRemoteNamesInGitOrder, + parseRemoteRefWithRemoteNames, +} from "../remoteRefs.ts"; +import { ServerConfig } from "../../config.ts"; +import { decodeJsonResult } from "@t3tools/shared/schemaJson"; + +const DEFAULT_TIMEOUT_MS = 30_000; +const DEFAULT_MAX_OUTPUT_BYTES = 1_000_000; +const OUTPUT_TRUNCATED_MARKER = "\n\n[truncated]"; +const PREPARED_COMMIT_PATCH_MAX_OUTPUT_BYTES = 49_000; +const RANGE_COMMIT_SUMMARY_MAX_OUTPUT_BYTES = 19_000; +const RANGE_DIFF_SUMMARY_MAX_OUTPUT_BYTES = 19_000; +const RANGE_DIFF_PATCH_MAX_OUTPUT_BYTES = 59_000; +const WORKSPACE_FILES_MAX_OUTPUT_BYTES = 16 * 1024 * 1024; +const GIT_CHECK_IGNORE_MAX_STDIN_BYTES = 256 * 1024; +const STATUS_UPSTREAM_REFRESH_INTERVAL = Duration.seconds(15); +const STATUS_UPSTREAM_REFRESH_TIMEOUT = Duration.seconds(5); +const STATUS_UPSTREAM_REFRESH_FAILURE_COOLDOWN = Duration.seconds(5); +const STATUS_UPSTREAM_REFRESH_CACHE_CAPACITY = 2_048; +const DEFAULT_BASE_BRANCH_CANDIDATES = ["main", "master"] as const; +const GIT_LIST_BRANCHES_DEFAULT_LIMIT = 100; + +type TraceTailState = { + processedChars: number; + remainder: string; +}; + +class StatusUpstreamRefreshCacheKey extends Data.Class<{ + gitCommonDir: string; + upstreamRef: string; + remoteName: string; + upstreamBranch: string; +}> {} + +interface ExecuteGitOptions { + stdin?: string | undefined; + timeoutMs?: number | undefined; + allowNonZeroExit?: boolean | undefined; + fallbackErrorMessage?: string | undefined; + maxOutputBytes?: number | undefined; + truncateOutputAtMaxBytes?: boolean | undefined; + progress?: ExecuteGitProgress | undefined; +} + +function parseBranchAb(value: string): { ahead: number; behind: number } { + const match = value.match(/^\+(\d+)\s+-(\d+)$/); + if (!match) return { ahead: 0, behind: 0 }; + return { + ahead: Number(match[1] ?? "0"), + behind: Number(match[2] ?? "0"), + }; +} + +function parseNumstatEntries( + stdout: string, +): Array<{ path: string; insertions: number; deletions: number }> { + const entries: Array<{ path: string; insertions: number; deletions: number }> = []; + for (const line of stdout.split(/\r?\n/g)) { + if (line.trim().length === 0) continue; + const [addedRaw, deletedRaw, ...pathParts] = line.split("\t"); + const rawPath = + pathParts.length > 1 ? (pathParts.at(-1) ?? "").trim() : pathParts.join("\t").trim(); + if (rawPath.length === 0) continue; + const added = Number.parseInt(addedRaw ?? "0", 10); + const deleted = Number.parseInt(deletedRaw ?? "0", 10); + const renameArrowIndex = rawPath.indexOf(" => "); + const normalizedPath = + renameArrowIndex >= 0 ? rawPath.slice(renameArrowIndex + " => ".length).trim() : rawPath; + entries.push({ + path: normalizedPath.length > 0 ? normalizedPath : rawPath, + insertions: Number.isFinite(added) ? added : 0, + deletions: Number.isFinite(deleted) ? deleted : 0, + }); + } + return entries; +} + +function splitNullSeparatedPaths(input: string, truncated: boolean): string[] { + const parts = input.split("\0"); + if (parts.length === 0) return []; + + if (truncated && parts[parts.length - 1]?.length) { + parts.pop(); + } + + return parts.filter((value) => value.length > 0); +} + +function chunkPathsForGitCheckIgnore(relativePaths: readonly string[]): string[][] { + const chunks: string[][] = []; + let chunk: string[] = []; + let chunkBytes = 0; + + for (const relativePath of relativePaths) { + const relativePathBytes = Buffer.byteLength(relativePath) + 1; + if (chunk.length > 0 && chunkBytes + relativePathBytes > GIT_CHECK_IGNORE_MAX_STDIN_BYTES) { + chunks.push(chunk); + chunk = []; + chunkBytes = 0; + } + + chunk.push(relativePath); + chunkBytes += relativePathBytes; + + if (chunkBytes >= GIT_CHECK_IGNORE_MAX_STDIN_BYTES) { + chunks.push(chunk); + chunk = []; + chunkBytes = 0; + } + } + + if (chunk.length > 0) { + chunks.push(chunk); + } + + return chunks; +} + +function parsePorcelainPath(line: string): string | null { + if (line.startsWith("? ") || line.startsWith("! ")) { + const simple = line.slice(2).trim(); + return simple.length > 0 ? simple : null; + } + + if (!(line.startsWith("1 ") || line.startsWith("2 ") || line.startsWith("u "))) { + return null; + } + + const tabIndex = line.indexOf("\t"); + if (tabIndex >= 0) { + const fromTab = line.slice(tabIndex + 1); + const [filePath] = fromTab.split("\t"); + return filePath?.trim().length ? filePath.trim() : null; + } + + const parts = line.trim().split(/\s+/g); + const filePath = parts.at(-1) ?? ""; + return filePath.length > 0 ? filePath : null; +} + +function parseBranchLine(line: string): { name: string; current: boolean } | null { + const trimmed = line.trim(); + if (trimmed.length === 0) return null; + + const name = trimmed.replace(/^[*+]\s+/, ""); + // Exclude symbolic refs like: "origin/HEAD -> origin/main". + // Exclude detached HEAD pseudo-refs like: "(HEAD detached at origin/main)". + if (name.includes(" -> ") || name.startsWith("(")) return null; + + return { + name, + current: trimmed.startsWith("* "), + }; +} + +function filterBranchesForListQuery( + branches: ReadonlyArray, + query?: string, +): ReadonlyArray { + if (!query) { + return branches; + } + + const normalizedQuery = query.toLowerCase(); + return branches.filter((branch) => branch.name.toLowerCase().includes(normalizedQuery)); +} + +function paginateBranches(input: { + branches: ReadonlyArray; + cursor?: number | undefined; + limit?: number | undefined; +}): { + branches: ReadonlyArray; + nextCursor: number | null; + totalCount: number; +} { + const cursor = input.cursor ?? 0; + const limit = input.limit ?? GIT_LIST_BRANCHES_DEFAULT_LIMIT; + const totalCount = input.branches.length; + const branches = input.branches.slice(cursor, cursor + limit); + const nextCursor = cursor + branches.length < totalCount ? cursor + branches.length : null; + + return { + branches, + nextCursor, + totalCount, + }; +} + +function sanitizeRemoteName(value: string): string { + const sanitized = value + .trim() + .replace(/[^A-Za-z0-9._-]+/g, "-") + .replace(/^-+|-+$/g, ""); + return sanitized.length > 0 ? sanitized : "fork"; +} + +function normalizeRemoteUrl(value: string): string { + return value + .trim() + .replace(/\/+$/g, "") + .replace(/\.git$/i, "") + .toLowerCase(); +} + +function parseRemoteFetchUrls(stdout: string): Map { + const remotes = new Map(); + for (const line of stdout.split("\n")) { + const trimmed = line.trim(); + if (trimmed.length === 0) continue; + const match = /^(\S+)\s+(\S+)\s+\((fetch|push)\)$/.exec(trimmed); + if (!match) continue; + const [, remoteName = "", remoteUrl = "", direction = ""] = match; + if (direction !== "fetch" || remoteName.length === 0 || remoteUrl.length === 0) { + continue; + } + remotes.set(remoteName, remoteUrl); + } + return remotes; +} + +function parseUpstreamRefWithRemoteNames( + upstreamRef: string, + remoteNames: ReadonlyArray, +): { upstreamRef: string; remoteName: string; upstreamBranch: string } | null { + const parsed = parseRemoteRefWithRemoteNames(upstreamRef, remoteNames); + if (!parsed) { + return null; + } + + return { + upstreamRef, + remoteName: parsed.remoteName, + upstreamBranch: parsed.branchName, + }; +} + +function parseUpstreamRefByFirstSeparator( + upstreamRef: string, +): { upstreamRef: string; remoteName: string; upstreamBranch: string } | null { + const separatorIndex = upstreamRef.indexOf("/"); + if (separatorIndex <= 0 || separatorIndex === upstreamRef.length - 1) { + return null; + } + + const remoteName = upstreamRef.slice(0, separatorIndex).trim(); + const upstreamBranch = upstreamRef.slice(separatorIndex + 1).trim(); + if (remoteName.length === 0 || upstreamBranch.length === 0) { + return null; + } + + return { + upstreamRef, + remoteName, + upstreamBranch, + }; +} + +function parseTrackingBranchByUpstreamRef(stdout: string, upstreamRef: string): string | null { + for (const line of stdout.split("\n")) { + const trimmedLine = line.trim(); + if (trimmedLine.length === 0) { + continue; + } + const [branchNameRaw, upstreamBranchRaw = ""] = trimmedLine.split("\t"); + const branchName = branchNameRaw?.trim() ?? ""; + const upstreamBranch = upstreamBranchRaw.trim(); + if (branchName.length === 0 || upstreamBranch.length === 0) { + continue; + } + if (upstreamBranch === upstreamRef) { + return branchName; + } + } + + return null; +} + +function deriveLocalBranchNameFromRemoteRef(branchName: string): string | null { + const separatorIndex = branchName.indexOf("/"); + if (separatorIndex <= 0 || separatorIndex === branchName.length - 1) { + return null; + } + const localBranch = branchName.slice(separatorIndex + 1).trim(); + return localBranch.length > 0 ? localBranch : null; +} + +function commandLabel(args: readonly string[]): string { + return `git ${args.join(" ")}`; +} + +function parseDefaultBranchFromRemoteHeadRef(value: string, remoteName: string): string | null { + const trimmed = value.trim(); + const prefix = `refs/remotes/${remoteName}/`; + if (!trimmed.startsWith(prefix)) { + return null; + } + const branch = trimmed.slice(prefix.length).trim(); + return branch.length > 0 ? branch : null; +} + +function createGitCommandError( + operation: string, + cwd: string, + args: readonly string[], + detail: string, + cause?: unknown, +): GitCommandError { + return new GitCommandError({ + operation, + command: commandLabel(args), + cwd, + detail, + ...(cause !== undefined ? { cause } : {}), + }); +} + +function quoteGitCommand(args: ReadonlyArray): string { + return `git ${args.join(" ")}`; +} + +function toGitCommandError( + input: Pick, + detail: string, +) { + return (cause: unknown) => + Schema.is(GitCommandError)(cause) + ? cause + : new GitCommandError({ + operation: input.operation, + command: quoteGitCommand(input.args), + cwd: input.cwd, + detail: `${cause instanceof Error && cause.message.length > 0 ? cause.message : "Unknown error"} - ${detail}`, + ...(cause !== undefined ? { cause } : {}), + }); +} + +interface Trace2Monitor { + readonly env: NodeJS.ProcessEnv; + readonly flush: Effect.Effect; +} + +function trace2ChildKey(record: Record): string | null { + const childId = record.child_id; + if (typeof childId === "number" || typeof childId === "string") { + return String(childId); + } + const hookName = record.hook_name; + return typeof hookName === "string" && hookName.trim().length > 0 ? hookName.trim() : null; +} + +const Trace2Record = Schema.Record(Schema.String, Schema.Unknown); + +const createTrace2Monitor = Effect.fn("createTrace2Monitor")(function* ( + input: Pick, + progress: ExecuteGitProgress | undefined, +): Effect.fn.Return< + Trace2Monitor, + PlatformError.PlatformError, + Scope.Scope | FileSystem.FileSystem | Path.Path +> { + if (!progress?.onHookStarted && !progress?.onHookFinished) { + return { + env: {}, + flush: Effect.void, + }; + } + + const fs = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const traceFilePath = yield* fs.makeTempFileScoped({ + prefix: `t3code-git-trace2-${process.pid}-`, + suffix: ".json", + }); + const hookStartByChildKey = new Map(); + const traceTailState = yield* Ref.make({ + processedChars: 0, + remainder: "", + }); + + const handleTraceLine = Effect.fn("handleTraceLine")(function* (line: string) { + const trimmedLine = line.trim(); + if (trimmedLine.length === 0) { + return; + } + + const traceRecord = decodeJsonResult(Trace2Record)(trimmedLine); + if (Result.isFailure(traceRecord)) { + yield* Effect.logDebug( + `GitCore.trace2: failed to parse trace line for ${quoteGitCommand(input.args)} in ${input.cwd}`, + traceRecord.failure, + ); + return; + } + + if (traceRecord.success.child_class !== "hook") { + return; + } + + const event = traceRecord.success.event; + const childKey = trace2ChildKey(traceRecord.success); + if (childKey === null) { + return; + } + const started = hookStartByChildKey.get(childKey); + const hookNameFromEvent = + typeof traceRecord.success.hook_name === "string" ? traceRecord.success.hook_name.trim() : ""; + const hookName = hookNameFromEvent.length > 0 ? hookNameFromEvent : (started?.hookName ?? ""); + if (hookName.length === 0) { + return; + } + + if (event === "child_start") { + hookStartByChildKey.set(childKey, { hookName, startedAtMs: Date.now() }); + if (progress.onHookStarted) { + yield* progress.onHookStarted(hookName); + } + return; + } + + if (event === "child_exit") { + hookStartByChildKey.delete(childKey); + if (progress.onHookFinished) { + const code = traceRecord.success.code; + yield* progress.onHookFinished({ + hookName: started?.hookName ?? hookName, + exitCode: typeof code === "number" && Number.isInteger(code) ? code : null, + durationMs: started ? Math.max(0, Date.now() - started.startedAtMs) : null, + }); + } + } + }); + + const deltaMutex = yield* Semaphore.make(1); + const readTraceDelta = deltaMutex.withPermit( + fs.readFileString(traceFilePath).pipe( + Effect.flatMap((contents) => + Effect.uninterruptible( + Ref.modify(traceTailState, ({ processedChars, remainder }) => { + if (contents.length <= processedChars) { + return [[], { processedChars, remainder }]; + } + + const appended = contents.slice(processedChars); + const combined = remainder + appended; + const lines = combined.split("\n"); + const nextRemainder = lines.pop() ?? ""; + + return [ + lines.map((line) => line.replace(/\r$/, "")), + { + processedChars: contents.length, + remainder: nextRemainder, + }, + ]; + }).pipe( + Effect.flatMap((lines) => Effect.forEach(lines, handleTraceLine, { discard: true })), + ), + ), + ), + Effect.ignore({ log: true }), + ), + ); + const traceFileName = path.basename(traceFilePath); + yield* Stream.runForEach(fs.watch(traceFilePath), (event) => { + const eventPath = event.path; + const isTargetTraceEvent = + eventPath === traceFilePath || + eventPath === traceFileName || + path.basename(eventPath) === traceFileName; + if (!isTargetTraceEvent) return Effect.void; + return readTraceDelta; + }).pipe(Effect.ignoreCause({ log: true }), Effect.forkScoped); + + const finalizeTrace2Monitor = Effect.fn("finalizeTrace2Monitor")(function* () { + yield* readTraceDelta; + const finalLine = yield* Ref.modify(traceTailState, ({ processedChars, remainder }) => [ + remainder.trim(), + { + processedChars, + remainder: "", + }, + ]); + if (finalLine.length > 0) { + yield* handleTraceLine(finalLine); + } + }); + + yield* Effect.addFinalizer(finalizeTrace2Monitor); + + return { + env: { + GIT_TRACE2_EVENT: traceFilePath, + }, + flush: readTraceDelta, + }; +}); + +const collectOutput = Effect.fn("collectOutput")(function* ( + input: Pick, + stream: Stream.Stream, + maxOutputBytes: number, + truncateOutputAtMaxBytes: boolean, + onLine: ((line: string) => Effect.Effect) | undefined, +): Effect.fn.Return<{ readonly text: string; readonly truncated: boolean }, GitCommandError> { + const decoder = new TextDecoder(); + let bytes = 0; + let text = ""; + let lineBuffer = ""; + let truncated = false; + + const emitCompleteLines = Effect.fn("emitCompleteLines")(function* (flush: boolean) { + let newlineIndex = lineBuffer.indexOf("\n"); + while (newlineIndex >= 0) { + const line = lineBuffer.slice(0, newlineIndex).replace(/\r$/, ""); + lineBuffer = lineBuffer.slice(newlineIndex + 1); + if (line.length > 0 && onLine) { + yield* onLine(line); + } + newlineIndex = lineBuffer.indexOf("\n"); + } + + if (flush) { + const trailing = lineBuffer.replace(/\r$/, ""); + lineBuffer = ""; + if (trailing.length > 0 && onLine) { + yield* onLine(trailing); + } + } + }); + + const processChunk = Effect.fn("processChunk")(function* (chunk: Uint8Array) { + if (truncateOutputAtMaxBytes && truncated) { + return; + } + const nextBytes = bytes + chunk.byteLength; + if (!truncateOutputAtMaxBytes && nextBytes > maxOutputBytes) { + return yield* new GitCommandError({ + operation: input.operation, + command: quoteGitCommand(input.args), + cwd: input.cwd, + detail: `${quoteGitCommand(input.args)} output exceeded ${maxOutputBytes} bytes and was truncated.`, + }); + } + + const chunkToDecode = + truncateOutputAtMaxBytes && nextBytes > maxOutputBytes + ? chunk.subarray(0, Math.max(0, maxOutputBytes - bytes)) + : chunk; + bytes += chunkToDecode.byteLength; + truncated = truncateOutputAtMaxBytes && nextBytes > maxOutputBytes; + + const decoded = decoder.decode(chunkToDecode, { stream: !truncated }); + text += decoded; + lineBuffer += decoded; + yield* emitCompleteLines(false); + }); + + yield* Stream.runForEach(stream, processChunk).pipe( + Effect.mapError(toGitCommandError(input, "output stream failed.")), + ); + + const remainder = truncated ? "" : decoder.decode(); + text += remainder; + lineBuffer += remainder; + yield* emitCompleteLines(true); + return { + text, + truncated, + }; +}); + +export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { + executeOverride?: GitCoreShape["execute"]; +}) { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const { worktreesDir } = yield* ServerConfig; + + let execute: GitCoreShape["execute"]; + + if (options?.executeOverride) { + execute = options.executeOverride; + } else { + const commandSpawner = yield* ChildProcessSpawner.ChildProcessSpawner; + execute = Effect.fnUntraced(function* (input) { + const commandInput = { + ...input, + args: [...input.args], + } as const; + const timeoutMs = input.timeoutMs ?? DEFAULT_TIMEOUT_MS; + const maxOutputBytes = input.maxOutputBytes ?? DEFAULT_MAX_OUTPUT_BYTES; + const truncateOutputAtMaxBytes = input.truncateOutputAtMaxBytes ?? false; + + const runGitCommand = Effect.fn("runGitCommand")(function* () { + const trace2Monitor = yield* createTrace2Monitor(commandInput, input.progress).pipe( + Effect.provideService(Path.Path, path), + Effect.provideService(FileSystem.FileSystem, fileSystem), + Effect.mapError(toGitCommandError(commandInput, "failed to create trace2 monitor.")), + ); + const child = yield* commandSpawner + .spawn( + ChildProcess.make("git", commandInput.args, { + cwd: commandInput.cwd, + env: { + ...process.env, + ...input.env, + ...trace2Monitor.env, + }, + }), + ) + .pipe(Effect.mapError(toGitCommandError(commandInput, "failed to spawn."))); + + const [stdout, stderr, exitCode] = yield* Effect.all( + [ + collectOutput( + commandInput, + child.stdout, + maxOutputBytes, + truncateOutputAtMaxBytes, + input.progress?.onStdoutLine, + ), + collectOutput( + commandInput, + child.stderr, + maxOutputBytes, + truncateOutputAtMaxBytes, + input.progress?.onStderrLine, + ), + child.exitCode.pipe( + Effect.map((value) => Number(value)), + Effect.mapError(toGitCommandError(commandInput, "failed to report exit code.")), + ), + input.stdin === undefined + ? Effect.void + : Stream.run(Stream.encodeText(Stream.make(input.stdin)), child.stdin).pipe( + Effect.mapError(toGitCommandError(commandInput, "failed to write stdin.")), + ), + ], + { concurrency: "unbounded" }, + ).pipe(Effect.map(([stdout, stderr, exitCode]) => [stdout, stderr, exitCode] as const)); + yield* trace2Monitor.flush; + + if (!input.allowNonZeroExit && exitCode !== 0) { + const trimmedStderr = stderr.text.trim(); + return yield* new GitCommandError({ + operation: commandInput.operation, + command: quoteGitCommand(commandInput.args), + cwd: commandInput.cwd, + detail: + trimmedStderr.length > 0 + ? `${quoteGitCommand(commandInput.args)} failed: ${trimmedStderr}` + : `${quoteGitCommand(commandInput.args)} failed with code ${exitCode}.`, + }); + } + + return { + code: exitCode, + stdout: stdout.text, + stderr: stderr.text, + stdoutTruncated: stdout.truncated, + stderrTruncated: stderr.truncated, + } satisfies ExecuteGitResult; + }); + + return yield* runGitCommand().pipe( + Effect.scoped, + Effect.timeoutOption(timeoutMs), + Effect.flatMap((result) => + Option.match(result, { + onNone: () => + Effect.fail( + new GitCommandError({ + operation: commandInput.operation, + command: quoteGitCommand(commandInput.args), + cwd: commandInput.cwd, + detail: `${quoteGitCommand(commandInput.args)} timed out.`, + }), + ), + onSome: Effect.succeed, + }), + ), + ); + }); + } + + const executeGit = ( + operation: string, + cwd: string, + args: readonly string[], + options: ExecuteGitOptions = {}, + ): Effect.Effect => + execute({ + operation, + cwd, + args, + ...(options.stdin !== undefined ? { stdin: options.stdin } : {}), + allowNonZeroExit: true, + ...(options.timeoutMs !== undefined ? { timeoutMs: options.timeoutMs } : {}), + ...(options.maxOutputBytes !== undefined ? { maxOutputBytes: options.maxOutputBytes } : {}), + ...(options.truncateOutputAtMaxBytes !== undefined + ? { truncateOutputAtMaxBytes: options.truncateOutputAtMaxBytes } + : {}), + ...(options.progress ? { progress: options.progress } : {}), + }).pipe( + Effect.flatMap((result) => { + if (options.allowNonZeroExit || result.code === 0) { + return Effect.succeed(result); + } + const stderr = result.stderr.trim(); + if (stderr.length > 0) { + return Effect.fail(createGitCommandError(operation, cwd, args, stderr)); + } + if (options.fallbackErrorMessage) { + return Effect.fail( + createGitCommandError(operation, cwd, args, options.fallbackErrorMessage), + ); + } + return Effect.fail( + createGitCommandError( + operation, + cwd, + args, + `${commandLabel(args)} failed: code=${result.code ?? "null"}`, + ), + ); + }), + ); + + const runGit = ( + operation: string, + cwd: string, + args: readonly string[], + allowNonZeroExit = false, + ): Effect.Effect => + executeGit(operation, cwd, args, { allowNonZeroExit }).pipe(Effect.asVoid); + + const runGitStdout = ( + operation: string, + cwd: string, + args: readonly string[], + allowNonZeroExit = false, + ): Effect.Effect => + executeGit(operation, cwd, args, { allowNonZeroExit }).pipe( + Effect.map((result) => result.stdout), + ); + + const runGitStdoutWithOptions = ( + operation: string, + cwd: string, + args: readonly string[], + options: ExecuteGitOptions = {}, + ): Effect.Effect => + executeGit(operation, cwd, args, options).pipe( + Effect.map((result) => + result.stdoutTruncated ? `${result.stdout}${OUTPUT_TRUNCATED_MARKER}` : result.stdout, + ), + ); + + const branchExists = (cwd: string, branch: string): Effect.Effect => + executeGit( + "GitCore.branchExists", + cwd, + ["show-ref", "--verify", "--quiet", `refs/heads/${branch}`], + { + allowNonZeroExit: true, + timeoutMs: 5_000, + }, + ).pipe(Effect.map((result) => result.code === 0)); + + const resolveAvailableBranchName = Effect.fn("resolveAvailableBranchName")(function* ( + cwd: string, + desiredBranch: string, + ) { + const isDesiredTaken = yield* branchExists(cwd, desiredBranch); + if (!isDesiredTaken) { + return desiredBranch; + } + + for (let suffix = 1; suffix <= 100; suffix += 1) { + const candidate = `${desiredBranch}-${suffix}`; + const isCandidateTaken = yield* branchExists(cwd, candidate); + if (!isCandidateTaken) { + return candidate; + } + } + + return yield* createGitCommandError( + "GitCore.renameBranch", + cwd, + ["branch", "-m", "--", desiredBranch], + `Could not find an available branch name for '${desiredBranch}'.`, + ); + }); + + const resolveCurrentUpstream = Effect.fn("resolveCurrentUpstream")(function* (cwd: string) { + const upstreamRef = yield* runGitStdout( + "GitCore.resolveCurrentUpstream", + cwd, + ["rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{upstream}"], + true, + ).pipe(Effect.map((stdout) => stdout.trim())); + + if (upstreamRef.length === 0 || upstreamRef === "@{upstream}") { + return null; + } + + const remoteNames = yield* runGitStdout("GitCore.listRemoteNames", cwd, ["remote"]).pipe( + Effect.map(parseRemoteNames), + Effect.catch(() => Effect.succeed>([])), + ); + return ( + parseUpstreamRefWithRemoteNames(upstreamRef, remoteNames) ?? + parseUpstreamRefByFirstSeparator(upstreamRef) + ); + }); + + const fetchUpstreamRefForStatus = ( + gitCommonDir: string, + upstream: { upstreamRef: string; remoteName: string; upstreamBranch: string }, + ): Effect.Effect => { + const refspec = `+refs/heads/${upstream.upstreamBranch}:refs/remotes/${upstream.upstreamRef}`; + const fetchCwd = + path.basename(gitCommonDir) === ".git" ? path.dirname(gitCommonDir) : gitCommonDir; + return executeGit( + "GitCore.fetchUpstreamRefForStatus", + fetchCwd, + ["--git-dir", gitCommonDir, "fetch", "--quiet", "--no-tags", upstream.remoteName, refspec], + { + allowNonZeroExit: true, + timeoutMs: Duration.toMillis(STATUS_UPSTREAM_REFRESH_TIMEOUT), + }, + ).pipe(Effect.asVoid); + }; + + const resolveGitCommonDir = Effect.fn("resolveGitCommonDir")(function* (cwd: string) { + const gitCommonDir = yield* runGitStdout("GitCore.resolveGitCommonDir", cwd, [ + "rev-parse", + "--git-common-dir", + ]).pipe(Effect.map((stdout) => stdout.trim())); + return path.isAbsolute(gitCommonDir) ? gitCommonDir : path.resolve(cwd, gitCommonDir); + }); + + const refreshStatusUpstreamCacheEntry = Effect.fn("refreshStatusUpstreamCacheEntry")(function* ( + cacheKey: StatusUpstreamRefreshCacheKey, + ) { + yield* fetchUpstreamRefForStatus(cacheKey.gitCommonDir, { + upstreamRef: cacheKey.upstreamRef, + remoteName: cacheKey.remoteName, + upstreamBranch: cacheKey.upstreamBranch, + }); + return true as const; + }); + + const statusUpstreamRefreshCache = yield* Cache.makeWith({ + capacity: STATUS_UPSTREAM_REFRESH_CACHE_CAPACITY, + lookup: refreshStatusUpstreamCacheEntry, + // Keep successful refreshes warm and briefly back off failed refreshes to avoid retry storms. + timeToLive: (exit) => + Exit.isSuccess(exit) + ? STATUS_UPSTREAM_REFRESH_INTERVAL + : STATUS_UPSTREAM_REFRESH_FAILURE_COOLDOWN, + }); + + const refreshStatusUpstreamIfStale = Effect.fn("refreshStatusUpstreamIfStale")(function* ( + cwd: string, + ) { + const upstream = yield* resolveCurrentUpstream(cwd); + if (!upstream) return; + const gitCommonDir = yield* resolveGitCommonDir(cwd); + yield* Cache.get( + statusUpstreamRefreshCache, + new StatusUpstreamRefreshCacheKey({ + gitCommonDir, + upstreamRef: upstream.upstreamRef, + remoteName: upstream.remoteName, + upstreamBranch: upstream.upstreamBranch, + }), + ); + }); + + const resolveDefaultBranchName = ( + cwd: string, + remoteName: string, + ): Effect.Effect => + executeGit( + "GitCore.resolveDefaultBranchName", + cwd, + ["symbolic-ref", `refs/remotes/${remoteName}/HEAD`], + { allowNonZeroExit: true }, + ).pipe( + Effect.map((result) => { + if (result.code !== 0) { + return null; + } + return parseDefaultBranchFromRemoteHeadRef(result.stdout, remoteName); + }), + ); + + const remoteBranchExists = ( + cwd: string, + remoteName: string, + branch: string, + ): Effect.Effect => + executeGit( + "GitCore.remoteBranchExists", + cwd, + ["show-ref", "--verify", "--quiet", `refs/remotes/${remoteName}/${branch}`], + { + allowNonZeroExit: true, + }, + ).pipe(Effect.map((result) => result.code === 0)); + + const originRemoteExists = (cwd: string): Effect.Effect => + executeGit("GitCore.originRemoteExists", cwd, ["remote", "get-url", "origin"], { + allowNonZeroExit: true, + }).pipe(Effect.map((result) => result.code === 0)); + + const listRemoteNames = (cwd: string): Effect.Effect, GitCommandError> => + runGitStdout("GitCore.listRemoteNames", cwd, ["remote"]).pipe( + Effect.map(parseRemoteNamesInGitOrder), + ); + + const resolvePrimaryRemoteName = Effect.fn("resolvePrimaryRemoteName")(function* (cwd: string) { + if (yield* originRemoteExists(cwd)) { + return "origin"; + } + const remotes = yield* listRemoteNames(cwd); + const [firstRemote] = remotes; + if (firstRemote) { + return firstRemote; + } + return yield* createGitCommandError( + "GitCore.resolvePrimaryRemoteName", + cwd, + ["remote"], + "No git remote is configured for this repository.", + ); + }); + + const resolvePushRemoteName = Effect.fn("resolvePushRemoteName")(function* ( + cwd: string, + branch: string, + ) { + const branchPushRemote = yield* runGitStdout( + "GitCore.resolvePushRemoteName.branchPushRemote", + cwd, + ["config", "--get", `branch.${branch}.pushRemote`], + true, + ).pipe(Effect.map((stdout) => stdout.trim())); + if (branchPushRemote.length > 0) { + return branchPushRemote; + } + + const pushDefaultRemote = yield* runGitStdout( + "GitCore.resolvePushRemoteName.remotePushDefault", + cwd, + ["config", "--get", "remote.pushDefault"], + true, + ).pipe(Effect.map((stdout) => stdout.trim())); + if (pushDefaultRemote.length > 0) { + return pushDefaultRemote; + } + + return yield* resolvePrimaryRemoteName(cwd).pipe(Effect.catch(() => Effect.succeed(null))); + }); + + const ensureRemote: GitCoreShape["ensureRemote"] = Effect.fn("ensureRemote")(function* (input) { + const preferredName = sanitizeRemoteName(input.preferredName); + const normalizedTargetUrl = normalizeRemoteUrl(input.url); + const remoteFetchUrls = yield* runGitStdout("GitCore.ensureRemote.listRemoteUrls", input.cwd, [ + "remote", + "-v", + ]).pipe(Effect.map((stdout) => parseRemoteFetchUrls(stdout))); + + for (const [remoteName, remoteUrl] of remoteFetchUrls.entries()) { + if (normalizeRemoteUrl(remoteUrl) === normalizedTargetUrl) { + return remoteName; + } + } + + let remoteName = preferredName; + let suffix = 1; + while (remoteFetchUrls.has(remoteName)) { + remoteName = `${preferredName}-${suffix}`; + suffix += 1; + } + + yield* runGit("GitCore.ensureRemote.add", input.cwd, ["remote", "add", remoteName, input.url]); + return remoteName; + }); + + const resolveBaseBranchForNoUpstream = Effect.fn("resolveBaseBranchForNoUpstream")(function* ( + cwd: string, + branch: string, + ) { + const configuredBaseBranch = yield* runGitStdout( + "GitCore.resolveBaseBranchForNoUpstream.config", + cwd, + ["config", "--get", `branch.${branch}.gh-merge-base`], + true, + ).pipe(Effect.map((stdout) => stdout.trim())); + + const primaryRemoteName = yield* resolvePrimaryRemoteName(cwd).pipe( + Effect.catch(() => Effect.succeed(null)), + ); + const defaultBranch = + primaryRemoteName === null ? null : yield* resolveDefaultBranchName(cwd, primaryRemoteName); + const candidates = [ + configuredBaseBranch.length > 0 ? configuredBaseBranch : null, + defaultBranch, + ...DEFAULT_BASE_BRANCH_CANDIDATES, + ]; + + for (const candidate of candidates) { + if (!candidate) { + continue; + } + + const remotePrefix = + primaryRemoteName && primaryRemoteName !== "origin" ? `${primaryRemoteName}/` : null; + const normalizedCandidate = candidate.startsWith("origin/") + ? candidate.slice("origin/".length) + : remotePrefix && candidate.startsWith(remotePrefix) + ? candidate.slice(remotePrefix.length) + : candidate; + if (normalizedCandidate.length === 0 || normalizedCandidate === branch) { + continue; + } + + if (yield* branchExists(cwd, normalizedCandidate)) { + return normalizedCandidate; + } + + if ( + primaryRemoteName && + (yield* remoteBranchExists(cwd, primaryRemoteName, normalizedCandidate)) + ) { + return `${primaryRemoteName}/${normalizedCandidate}`; + } + } + + return null; + }); + + const computeAheadCountAgainstBase = Effect.fn("computeAheadCountAgainstBase")(function* ( + cwd: string, + branch: string, + ) { + const baseBranch = yield* resolveBaseBranchForNoUpstream(cwd, branch); + if (!baseBranch) { + return 0; + } + + const result = yield* executeGit( + "GitCore.computeAheadCountAgainstBase", + cwd, + ["rev-list", "--count", `${baseBranch}..HEAD`], + { allowNonZeroExit: true }, + ); + if (result.code !== 0) { + return 0; + } + + const parsed = Number.parseInt(result.stdout.trim(), 10); + return Number.isFinite(parsed) ? Math.max(0, parsed) : 0; + }); + + const readBranchRecency = Effect.fn("readBranchRecency")(function* (cwd: string) { + const branchRecency = yield* executeGit( + "GitCore.readBranchRecency", + cwd, + [ + "for-each-ref", + "--format=%(refname:short)%09%(committerdate:unix)", + "refs/heads", + "refs/remotes", + ], + { + timeoutMs: 15_000, + allowNonZeroExit: true, + }, + ); + + const branchLastCommit = new Map(); + if (branchRecency.code !== 0) { + return branchLastCommit; + } + + for (const line of branchRecency.stdout.split("\n")) { + if (line.length === 0) { + continue; + } + const [name, lastCommitRaw] = line.split("\t"); + if (!name) { + continue; + } + const lastCommit = Number.parseInt(lastCommitRaw ?? "0", 10); + branchLastCommit.set(name, Number.isFinite(lastCommit) ? lastCommit : 0); + } + + return branchLastCommit; + }); + + const statusDetails: GitCoreShape["statusDetails"] = Effect.fn("statusDetails")(function* (cwd) { + yield* refreshStatusUpstreamIfStale(cwd).pipe(Effect.ignoreCause({ log: true })); + + const statusResult = yield* executeGit( + "GitCore.statusDetails.status", + cwd, + ["status", "--porcelain=2", "--branch"], + { + allowNonZeroExit: true, + }, + ); + + if (statusResult.code !== 0) { + const stderr = statusResult.stderr.trim(); + return yield* createGitCommandError( + "GitCore.statusDetails.status", + cwd, + ["status", "--porcelain=2", "--branch"], + stderr || "git status failed", + ); + } + + const [unstagedNumstatStdout, stagedNumstatStdout, defaultRefResult, hasOriginRemote] = + yield* Effect.all( + [ + runGitStdout("GitCore.statusDetails.unstagedNumstat", cwd, ["diff", "--numstat"]), + runGitStdout("GitCore.statusDetails.stagedNumstat", cwd, [ + "diff", + "--cached", + "--numstat", + ]), + executeGit( + "GitCore.statusDetails.defaultRef", + cwd, + ["symbolic-ref", "refs/remotes/origin/HEAD"], + { + allowNonZeroExit: true, + }, + ), + originRemoteExists(cwd).pipe(Effect.catch(() => Effect.succeed(false))), + ], + { concurrency: "unbounded" }, + ); + const statusStdout = statusResult.stdout; + const defaultBranch = + defaultRefResult.code === 0 + ? defaultRefResult.stdout.trim().replace(/^refs\/remotes\/origin\//, "") + : null; + + let branch: string | null = null; + let upstreamRef: string | null = null; + let aheadCount = 0; + let behindCount = 0; + let hasWorkingTreeChanges = false; + const changedFilesWithoutNumstat = new Set(); + + for (const line of statusStdout.split(/\r?\n/g)) { + if (line.startsWith("# branch.head ")) { + const value = line.slice("# branch.head ".length).trim(); + branch = value.startsWith("(") ? null : value; + continue; + } + if (line.startsWith("# branch.upstream ")) { + const value = line.slice("# branch.upstream ".length).trim(); + upstreamRef = value.length > 0 ? value : null; + continue; + } + if (line.startsWith("# branch.ab ")) { + const value = line.slice("# branch.ab ".length).trim(); + const parsed = parseBranchAb(value); + aheadCount = parsed.ahead; + behindCount = parsed.behind; + continue; + } + if (line.trim().length > 0 && !line.startsWith("#")) { + hasWorkingTreeChanges = true; + const pathValue = parsePorcelainPath(line); + if (pathValue) changedFilesWithoutNumstat.add(pathValue); + } + } + + if (!upstreamRef && branch) { + aheadCount = yield* computeAheadCountAgainstBase(cwd, branch).pipe( + Effect.catch(() => Effect.succeed(0)), + ); + behindCount = 0; + } + + const stagedEntries = parseNumstatEntries(stagedNumstatStdout); + const unstagedEntries = parseNumstatEntries(unstagedNumstatStdout); + const fileStatMap = new Map(); + for (const entry of [...stagedEntries, ...unstagedEntries]) { + const existing = fileStatMap.get(entry.path) ?? { insertions: 0, deletions: 0 }; + existing.insertions += entry.insertions; + existing.deletions += entry.deletions; + fileStatMap.set(entry.path, existing); + } + + let insertions = 0; + let deletions = 0; + const files = Array.from(fileStatMap.entries()) + .map(([filePath, stat]) => { + insertions += stat.insertions; + deletions += stat.deletions; + return { path: filePath, insertions: stat.insertions, deletions: stat.deletions }; + }) + .toSorted((a, b) => a.path.localeCompare(b.path)); + + for (const filePath of changedFilesWithoutNumstat) { + if (fileStatMap.has(filePath)) continue; + files.push({ path: filePath, insertions: 0, deletions: 0 }); + } + files.sort((a, b) => a.path.localeCompare(b.path)); + + return { + isRepo: true, + hasOriginRemote, + isDefaultBranch: + branch !== null && + (branch === defaultBranch || + (defaultBranch === null && (branch === "main" || branch === "master"))), + branch, + upstreamRef, + hasWorkingTreeChanges, + workingTree: { + files, + insertions, + deletions, + }, + hasUpstream: upstreamRef !== null, + aheadCount, + behindCount, + }; + }); + + const status: GitCoreShape["status"] = (input) => + statusDetails(input.cwd).pipe( + Effect.map((details) => ({ + isRepo: details.isRepo, + hasOriginRemote: details.hasOriginRemote, + isDefaultBranch: details.isDefaultBranch, + branch: details.branch, + hasWorkingTreeChanges: details.hasWorkingTreeChanges, + workingTree: details.workingTree, + hasUpstream: details.hasUpstream, + aheadCount: details.aheadCount, + behindCount: details.behindCount, + pr: null, + })), + ); + + const prepareCommitContext: GitCoreShape["prepareCommitContext"] = Effect.fn( + "prepareCommitContext", + )(function* (cwd, filePaths) { + if (filePaths && filePaths.length > 0) { + yield* runGit("GitCore.prepareCommitContext.reset", cwd, ["reset"]).pipe( + Effect.catch(() => Effect.void), + ); + yield* runGit("GitCore.prepareCommitContext.addSelected", cwd, [ + "add", + "-A", + "--", + ...filePaths, + ]); + } else { + yield* runGit("GitCore.prepareCommitContext.addAll", cwd, ["add", "-A"]); + } + + const stagedSummary = yield* runGitStdout("GitCore.prepareCommitContext.stagedSummary", cwd, [ + "diff", + "--cached", + "--name-status", + ]).pipe(Effect.map((stdout) => stdout.trim())); + if (stagedSummary.length === 0) { + return null; + } + + const stagedPatch = yield* runGitStdoutWithOptions( + "GitCore.prepareCommitContext.stagedPatch", + cwd, + ["diff", "--cached", "--patch", "--minimal"], + { + maxOutputBytes: PREPARED_COMMIT_PATCH_MAX_OUTPUT_BYTES, + truncateOutputAtMaxBytes: true, + }, + ); + + return { + stagedSummary, + stagedPatch, + }; + }); + + const commit: GitCoreShape["commit"] = Effect.fn("commit")(function* ( + cwd, + subject, + body, + options?: GitCommitOptions, + ) { + const args = ["commit", "-m", subject]; + const trimmedBody = body.trim(); + if (trimmedBody.length > 0) { + args.push("-m", trimmedBody); + } + const progress = + options?.progress?.onOutputLine === undefined + ? options?.progress + : { + ...options.progress, + onStdoutLine: (line: string) => + options.progress?.onOutputLine?.({ stream: "stdout", text: line }) ?? Effect.void, + onStderrLine: (line: string) => + options.progress?.onOutputLine?.({ stream: "stderr", text: line }) ?? Effect.void, + }; + yield* executeGit("GitCore.commit.commit", cwd, args, { + ...(options?.timeoutMs !== undefined ? { timeoutMs: options.timeoutMs } : {}), + ...(progress ? { progress } : {}), + }).pipe(Effect.asVoid); + const commitSha = yield* runGitStdout("GitCore.commit.revParseHead", cwd, [ + "rev-parse", + "HEAD", + ]).pipe(Effect.map((stdout) => stdout.trim())); + + return { commitSha }; + }); + + const pushCurrentBranch: GitCoreShape["pushCurrentBranch"] = Effect.fn("pushCurrentBranch")( + function* (cwd, fallbackBranch) { + const details = yield* statusDetails(cwd); + const branch = details.branch ?? fallbackBranch; + if (!branch) { + return yield* createGitCommandError( + "GitCore.pushCurrentBranch", + cwd, + ["push"], + "Cannot push from detached HEAD.", + ); + } + + const hasNoLocalDelta = details.aheadCount === 0 && details.behindCount === 0; + if (hasNoLocalDelta) { + if (details.hasUpstream) { + return { + status: "skipped_up_to_date" as const, + branch, + ...(details.upstreamRef ? { upstreamBranch: details.upstreamRef } : {}), + }; + } + + const comparableBaseBranch = yield* resolveBaseBranchForNoUpstream(cwd, branch).pipe( + Effect.catch(() => Effect.succeed(null)), + ); + if (comparableBaseBranch) { + const publishRemoteName = yield* resolvePushRemoteName(cwd, branch).pipe( + Effect.catch(() => Effect.succeed(null)), + ); + if (!publishRemoteName) { + return { + status: "skipped_up_to_date" as const, + branch, + }; + } + + const hasRemoteBranch = yield* remoteBranchExists(cwd, publishRemoteName, branch).pipe( + Effect.catch(() => Effect.succeed(false)), + ); + if (hasRemoteBranch) { + return { + status: "skipped_up_to_date" as const, + branch, + }; + } + } + } + + if (!details.hasUpstream) { + const publishRemoteName = yield* resolvePushRemoteName(cwd, branch); + if (!publishRemoteName) { + return yield* createGitCommandError( + "GitCore.pushCurrentBranch", + cwd, + ["push"], + "Cannot push because no git remote is configured for this repository.", + ); + } + yield* runGit("GitCore.pushCurrentBranch.pushWithUpstream", cwd, [ + "push", + "-u", + publishRemoteName, + branch, + ]); + return { + status: "pushed" as const, + branch, + upstreamBranch: `${publishRemoteName}/${branch}`, + setUpstream: true, + }; + } + + const currentUpstream = yield* resolveCurrentUpstream(cwd).pipe( + Effect.catch(() => Effect.succeed(null)), + ); + if (currentUpstream) { + yield* runGit("GitCore.pushCurrentBranch.pushUpstream", cwd, [ + "push", + currentUpstream.remoteName, + `HEAD:${currentUpstream.upstreamBranch}`, + ]); + return { + status: "pushed" as const, + branch, + upstreamBranch: currentUpstream.upstreamRef, + setUpstream: false, + }; + } + + yield* runGit("GitCore.pushCurrentBranch.push", cwd, ["push"]); + return { + status: "pushed" as const, + branch, + ...(details.upstreamRef ? { upstreamBranch: details.upstreamRef } : {}), + setUpstream: false, + }; + }, + ); + + const pullCurrentBranch: GitCoreShape["pullCurrentBranch"] = Effect.fn("pullCurrentBranch")( + function* (cwd) { + const details = yield* statusDetails(cwd); + const branch = details.branch; + if (!branch) { + return yield* createGitCommandError( + "GitCore.pullCurrentBranch", + cwd, + ["pull", "--ff-only"], + "Cannot pull from detached HEAD.", + ); + } + if (!details.hasUpstream) { + return yield* createGitCommandError( + "GitCore.pullCurrentBranch", + cwd, + ["pull", "--ff-only"], + "Current branch has no upstream configured. Push with upstream first.", + ); + } + const beforeSha = yield* runGitStdout( + "GitCore.pullCurrentBranch.beforeSha", + cwd, + ["rev-parse", "HEAD"], + true, + ).pipe(Effect.map((stdout) => stdout.trim())); + yield* executeGit("GitCore.pullCurrentBranch.pull", cwd, ["pull", "--ff-only"], { + timeoutMs: 30_000, + fallbackErrorMessage: "git pull failed", + }); + const afterSha = yield* runGitStdout( + "GitCore.pullCurrentBranch.afterSha", + cwd, + ["rev-parse", "HEAD"], + true, + ).pipe(Effect.map((stdout) => stdout.trim())); + + const refreshed = yield* statusDetails(cwd); + return { + status: beforeSha.length > 0 && beforeSha === afterSha ? "skipped_up_to_date" : "pulled", + branch, + upstreamBranch: refreshed.upstreamRef, + }; + }, + ); + + const readRangeContext: GitCoreShape["readRangeContext"] = Effect.fn("readRangeContext")( + function* (cwd, baseBranch) { + const range = `${baseBranch}..HEAD`; + const [commitSummary, diffSummary, diffPatch] = yield* Effect.all( + [ + runGitStdoutWithOptions( + "GitCore.readRangeContext.log", + cwd, + ["log", "--oneline", range], + { + maxOutputBytes: RANGE_COMMIT_SUMMARY_MAX_OUTPUT_BYTES, + truncateOutputAtMaxBytes: true, + }, + ), + runGitStdoutWithOptions( + "GitCore.readRangeContext.diffStat", + cwd, + ["diff", "--stat", range], + { + maxOutputBytes: RANGE_DIFF_SUMMARY_MAX_OUTPUT_BYTES, + truncateOutputAtMaxBytes: true, + }, + ), + runGitStdoutWithOptions( + "GitCore.readRangeContext.diffPatch", + cwd, + ["diff", "--patch", "--minimal", range], + { + maxOutputBytes: RANGE_DIFF_PATCH_MAX_OUTPUT_BYTES, + truncateOutputAtMaxBytes: true, + }, + ), + ], + { concurrency: "unbounded" }, + ); + + return { + commitSummary, + diffSummary, + diffPatch, + }; + }, + ); + + const readConfigValue: GitCoreShape["readConfigValue"] = (cwd, key) => + runGitStdout("GitCore.readConfigValue", cwd, ["config", "--get", key], true).pipe( + Effect.map((stdout) => stdout.trim()), + Effect.map((trimmed) => (trimmed.length > 0 ? trimmed : null)), + ); + + const isInsideWorkTree: GitCoreShape["isInsideWorkTree"] = (cwd) => + executeGit("GitCore.isInsideWorkTree", cwd, ["rev-parse", "--is-inside-work-tree"], { + allowNonZeroExit: true, + timeoutMs: 5_000, + maxOutputBytes: 4_096, + }).pipe(Effect.map((result) => result.code === 0 && result.stdout.trim() === "true")); + + const listWorkspaceFiles: GitCoreShape["listWorkspaceFiles"] = (cwd) => + executeGit( + "GitCore.listWorkspaceFiles", + cwd, + ["ls-files", "--cached", "--others", "--exclude-standard", "-z"], + { + allowNonZeroExit: true, + timeoutMs: 20_000, + maxOutputBytes: WORKSPACE_FILES_MAX_OUTPUT_BYTES, + truncateOutputAtMaxBytes: true, + }, + ).pipe( + Effect.flatMap((result) => + result.code === 0 + ? Effect.succeed({ + paths: splitNullSeparatedPaths(result.stdout, result.stdoutTruncated), + truncated: result.stdoutTruncated, + }) + : Effect.fail( + createGitCommandError( + "GitCore.listWorkspaceFiles", + cwd, + ["ls-files", "--cached", "--others", "--exclude-standard", "-z"], + result.stderr.trim().length > 0 ? result.stderr.trim() : "git ls-files failed", + ), + ), + ), + ); + + const filterIgnoredPaths: GitCoreShape["filterIgnoredPaths"] = (cwd, relativePaths) => + Effect.gen(function* () { + if (relativePaths.length === 0) { + return relativePaths; + } + + const ignoredPaths = new Set(); + const chunks = chunkPathsForGitCheckIgnore(relativePaths); + + for (const chunk of chunks) { + const result = yield* executeGit( + "GitCore.filterIgnoredPaths", + cwd, + ["check-ignore", "--no-index", "-z", "--stdin"], + { + stdin: `${chunk.join("\0")}\0`, + allowNonZeroExit: true, + timeoutMs: 20_000, + maxOutputBytes: WORKSPACE_FILES_MAX_OUTPUT_BYTES, + truncateOutputAtMaxBytes: true, + }, + ); + + if (result.code !== 0 && result.code !== 1) { + return yield* createGitCommandError( + "GitCore.filterIgnoredPaths", + cwd, + ["check-ignore", "--no-index", "-z", "--stdin"], + result.stderr.trim().length > 0 ? result.stderr.trim() : "git check-ignore failed", + ); + } + + for (const ignoredPath of splitNullSeparatedPaths(result.stdout, result.stdoutTruncated)) { + ignoredPaths.add(ignoredPath); + } + } + + if (ignoredPaths.size === 0) { + return relativePaths; + } + + return relativePaths.filter((relativePath) => !ignoredPaths.has(relativePath)); + }); + + const listBranches: GitCoreShape["listBranches"] = Effect.fn("listBranches")(function* (input) { + const branchRecencyPromise = readBranchRecency(input.cwd).pipe( + Effect.catch(() => Effect.succeed(new Map())), + ); + const localBranchResult = yield* executeGit( + "GitCore.listBranches.branchNoColor", + input.cwd, + ["branch", "--no-color", "--no-column"], + { + timeoutMs: 10_000, + allowNonZeroExit: true, + }, + ); + + if (localBranchResult.code !== 0) { + const stderr = localBranchResult.stderr.trim(); + if (stderr.toLowerCase().includes("not a git repository")) { + return { + branches: [], + isRepo: false, + hasOriginRemote: false, + nextCursor: null, + totalCount: 0, + }; + } + return yield* createGitCommandError( + "GitCore.listBranches", + input.cwd, + ["branch", "--no-color", "--no-column"], + stderr || "git branch failed", + ); + } + + const remoteBranchResultEffect = executeGit( + "GitCore.listBranches.remoteBranches", + input.cwd, + ["branch", "--no-color", "--no-column", "--remotes"], + { + timeoutMs: 10_000, + allowNonZeroExit: true, + }, + ).pipe( + Effect.catch((error) => + Effect.logWarning( + `GitCore.listBranches: remote branch lookup failed for ${input.cwd}: ${error.message}. Falling back to an empty remote branch list.`, + ).pipe(Effect.as({ code: 1, stdout: "", stderr: "" })), + ), + ); + + const remoteNamesResultEffect = executeGit( + "GitCore.listBranches.remoteNames", + input.cwd, + ["remote"], + { + timeoutMs: 5_000, + allowNonZeroExit: true, + }, + ).pipe( + Effect.catch((error) => + Effect.logWarning( + `GitCore.listBranches: remote name lookup failed for ${input.cwd}: ${error.message}. Falling back to an empty remote name list.`, + ).pipe(Effect.as({ code: 1, stdout: "", stderr: "" })), + ), + ); + + const [defaultRef, worktreeList, remoteBranchResult, remoteNamesResult, branchLastCommit] = + yield* Effect.all( + [ + executeGit( + "GitCore.listBranches.defaultRef", + input.cwd, + ["symbolic-ref", "refs/remotes/origin/HEAD"], + { + timeoutMs: 5_000, + allowNonZeroExit: true, + }, + ), + executeGit( + "GitCore.listBranches.worktreeList", + input.cwd, + ["worktree", "list", "--porcelain"], + { + timeoutMs: 5_000, + allowNonZeroExit: true, + }, + ), + remoteBranchResultEffect, + remoteNamesResultEffect, + branchRecencyPromise, + ], + { concurrency: "unbounded" }, + ); + + const remoteNames = + remoteNamesResult.code === 0 ? parseRemoteNames(remoteNamesResult.stdout) : []; + if (remoteBranchResult.code !== 0 && remoteBranchResult.stderr.trim().length > 0) { + yield* Effect.logWarning( + `GitCore.listBranches: remote branch lookup returned code ${remoteBranchResult.code} for ${input.cwd}: ${remoteBranchResult.stderr.trim()}. Falling back to an empty remote branch list.`, + ); + } + if (remoteNamesResult.code !== 0 && remoteNamesResult.stderr.trim().length > 0) { + yield* Effect.logWarning( + `GitCore.listBranches: remote name lookup returned code ${remoteNamesResult.code} for ${input.cwd}: ${remoteNamesResult.stderr.trim()}. Falling back to an empty remote name list.`, + ); + } + + const defaultBranch = + defaultRef.code === 0 + ? defaultRef.stdout.trim().replace(/^refs\/remotes\/origin\//, "") + : null; + + const worktreeMap = new Map(); + if (worktreeList.code === 0) { + let currentPath: string | null = null; + for (const line of worktreeList.stdout.split("\n")) { + if (line.startsWith("worktree ")) { + const candidatePath = line.slice("worktree ".length); + const exists = yield* fileSystem.stat(candidatePath).pipe( + Effect.map(() => true), + Effect.catch(() => Effect.succeed(false)), + ); + currentPath = exists ? candidatePath : null; + } else if (line.startsWith("branch refs/heads/") && currentPath) { + worktreeMap.set(line.slice("branch refs/heads/".length), currentPath); + } else if (line === "") { + currentPath = null; + } + } + } + + const localBranches = localBranchResult.stdout + .split("\n") + .map(parseBranchLine) + .filter((branch): branch is { name: string; current: boolean } => branch !== null) + .map((branch) => ({ + name: branch.name, + current: branch.current, + isRemote: false, + isDefault: branch.name === defaultBranch, + worktreePath: worktreeMap.get(branch.name) ?? null, + })) + .toSorted((a, b) => { + const aPriority = a.current ? 0 : a.isDefault ? 1 : 2; + const bPriority = b.current ? 0 : b.isDefault ? 1 : 2; + if (aPriority !== bPriority) return aPriority - bPriority; + + const aLastCommit = branchLastCommit.get(a.name) ?? 0; + const bLastCommit = branchLastCommit.get(b.name) ?? 0; + if (aLastCommit !== bLastCommit) return bLastCommit - aLastCommit; + return a.name.localeCompare(b.name); + }); + + const remoteBranches = + remoteBranchResult.code === 0 + ? remoteBranchResult.stdout + .split("\n") + .map(parseBranchLine) + .filter((branch): branch is { name: string; current: boolean } => branch !== null) + .map((branch) => { + const parsedRemoteRef = parseRemoteRefWithRemoteNames(branch.name, remoteNames); + const remoteBranch: { + name: string; + current: boolean; + isRemote: boolean; + remoteName?: string; + isDefault: boolean; + worktreePath: string | null; + } = { + name: branch.name, + current: false, + isRemote: true, + isDefault: false, + worktreePath: null, + }; + if (parsedRemoteRef) { + remoteBranch.remoteName = parsedRemoteRef.remoteName; + } + return remoteBranch; + }) + .toSorted((a, b) => { + const aLastCommit = branchLastCommit.get(a.name) ?? 0; + const bLastCommit = branchLastCommit.get(b.name) ?? 0; + if (aLastCommit !== bLastCommit) return bLastCommit - aLastCommit; + return a.name.localeCompare(b.name); + }) + : []; + + const branches = paginateBranches({ + branches: filterBranchesForListQuery( + dedupeRemoteBranchesWithLocalMatches([...localBranches, ...remoteBranches]), + input.query, + ), + cursor: input.cursor, + limit: input.limit, + }); + + return { + branches: [...branches.branches], + isRepo: true, + hasOriginRemote: remoteNames.includes("origin"), + nextCursor: branches.nextCursor, + totalCount: branches.totalCount, + }; + }); + + const createWorktree: GitCoreShape["createWorktree"] = Effect.fn("createWorktree")( + function* (input) { + const targetBranch = input.newBranch ?? input.branch; + const sanitizedBranch = targetBranch.replace(/\//g, "-"); + const repoName = path.basename(input.cwd); + const worktreePath = input.path ?? path.join(worktreesDir, repoName, sanitizedBranch); + const args = input.newBranch + ? ["worktree", "add", "-b", input.newBranch, worktreePath, input.branch] + : ["worktree", "add", worktreePath, input.branch]; + + yield* executeGit("GitCore.createWorktree", input.cwd, args, { + fallbackErrorMessage: "git worktree add failed", + }); + + return { + worktree: { + path: worktreePath, + branch: targetBranch, + }, + }; + }, + ); + + const fetchPullRequestBranch: GitCoreShape["fetchPullRequestBranch"] = Effect.fn( + "fetchPullRequestBranch", + )(function* (input) { + const remoteName = yield* resolvePrimaryRemoteName(input.cwd); + yield* executeGit( + "GitCore.fetchPullRequestBranch", + input.cwd, + [ + "fetch", + "--quiet", + "--no-tags", + remoteName, + `+refs/pull/${input.prNumber}/head:refs/heads/${input.branch}`, + ], + { + fallbackErrorMessage: "git fetch pull request branch failed", + }, + ); + }); + + const fetchRemoteBranch: GitCoreShape["fetchRemoteBranch"] = Effect.fn("fetchRemoteBranch")( + function* (input) { + yield* runGit("GitCore.fetchRemoteBranch.fetch", input.cwd, [ + "fetch", + "--quiet", + "--no-tags", + input.remoteName, + `+refs/heads/${input.remoteBranch}:refs/remotes/${input.remoteName}/${input.remoteBranch}`, + ]); + + const localBranchAlreadyExists = yield* branchExists(input.cwd, input.localBranch); + const targetRef = `${input.remoteName}/${input.remoteBranch}`; + yield* runGit( + "GitCore.fetchRemoteBranch.materialize", + input.cwd, + localBranchAlreadyExists + ? ["branch", "--force", input.localBranch, targetRef] + : ["branch", input.localBranch, targetRef], + ); + }, + ); + + const setBranchUpstream: GitCoreShape["setBranchUpstream"] = (input) => + runGit("GitCore.setBranchUpstream", input.cwd, [ + "branch", + "--set-upstream-to", + `${input.remoteName}/${input.remoteBranch}`, + input.branch, + ]); + + const removeWorktree: GitCoreShape["removeWorktree"] = Effect.fn("removeWorktree")( + function* (input) { + const args = ["worktree", "remove"]; + if (input.force) { + args.push("--force"); + } + args.push(input.path); + yield* executeGit("GitCore.removeWorktree", input.cwd, args, { + timeoutMs: 15_000, + fallbackErrorMessage: "git worktree remove failed", + }).pipe( + Effect.mapError((error) => + createGitCommandError( + "GitCore.removeWorktree", + input.cwd, + args, + `${commandLabel(args)} failed (cwd: ${input.cwd}): ${error instanceof Error ? error.message : String(error)}`, + error, + ), + ), + ); + }, + ); + + const renameBranch: GitCoreShape["renameBranch"] = Effect.fn("renameBranch")(function* (input) { + if (input.oldBranch === input.newBranch) { + return { branch: input.newBranch }; + } + const targetBranch = yield* resolveAvailableBranchName(input.cwd, input.newBranch); + + yield* executeGit( + "GitCore.renameBranch", + input.cwd, + ["branch", "-m", "--", input.oldBranch, targetBranch], + { + timeoutMs: 10_000, + fallbackErrorMessage: "git branch rename failed", + }, + ); + + return { branch: targetBranch }; + }); + + const createBranch: GitCoreShape["createBranch"] = (input) => + executeGit("GitCore.createBranch", input.cwd, ["branch", input.branch], { + timeoutMs: 10_000, + fallbackErrorMessage: "git branch create failed", + }).pipe(Effect.asVoid); + + const checkoutBranch: GitCoreShape["checkoutBranch"] = Effect.fn("checkoutBranch")( + function* (input) { + const [localInputExists, remoteExists] = yield* Effect.all( + [ + executeGit( + "GitCore.checkoutBranch.localInputExists", + input.cwd, + ["show-ref", "--verify", "--quiet", `refs/heads/${input.branch}`], + { + timeoutMs: 5_000, + allowNonZeroExit: true, + }, + ).pipe(Effect.map((result) => result.code === 0)), + executeGit( + "GitCore.checkoutBranch.remoteExists", + input.cwd, + ["show-ref", "--verify", "--quiet", `refs/remotes/${input.branch}`], + { + timeoutMs: 5_000, + allowNonZeroExit: true, + }, + ).pipe(Effect.map((result) => result.code === 0)), + ], + { concurrency: "unbounded" }, + ); + + const localTrackingBranch = remoteExists + ? yield* executeGit( + "GitCore.checkoutBranch.localTrackingBranch", + input.cwd, + ["for-each-ref", "--format=%(refname:short)\t%(upstream:short)", "refs/heads"], + { + timeoutMs: 5_000, + allowNonZeroExit: true, + }, + ).pipe( + Effect.map((result) => + result.code === 0 + ? parseTrackingBranchByUpstreamRef(result.stdout, input.branch) + : null, + ), + ) + : null; + + const localTrackedBranchCandidate = deriveLocalBranchNameFromRemoteRef(input.branch); + const localTrackedBranchTargetExists = + remoteExists && localTrackedBranchCandidate + ? yield* executeGit( + "GitCore.checkoutBranch.localTrackedBranchTargetExists", + input.cwd, + ["show-ref", "--verify", "--quiet", `refs/heads/${localTrackedBranchCandidate}`], + { + timeoutMs: 5_000, + allowNonZeroExit: true, + }, + ).pipe(Effect.map((result) => result.code === 0)) + : false; + + const checkoutArgs = localInputExists + ? ["checkout", input.branch] + : remoteExists && !localTrackingBranch && localTrackedBranchTargetExists + ? ["checkout", input.branch] + : remoteExists && !localTrackingBranch + ? ["checkout", "--track", input.branch] + : remoteExists && localTrackingBranch + ? ["checkout", localTrackingBranch] + : ["checkout", input.branch]; + + yield* executeGit("GitCore.checkoutBranch.checkout", input.cwd, checkoutArgs, { + timeoutMs: 10_000, + fallbackErrorMessage: "git checkout failed", + }); + }, + ); + + const initRepo: GitCoreShape["initRepo"] = (input) => + executeGit("GitCore.initRepo", input.cwd, ["init"], { + timeoutMs: 10_000, + fallbackErrorMessage: "git init failed", + }).pipe(Effect.asVoid); + + const listLocalBranchNames: GitCoreShape["listLocalBranchNames"] = (cwd) => + runGitStdout("GitCore.listLocalBranchNames", cwd, [ + "branch", + "--list", + "--no-column", + "--format=%(refname:short)", + ]).pipe( + Effect.map((stdout) => + stdout + .split("\n") + .map((line) => line.trim()) + .filter((line) => line.length > 0), + ), + ); + + return { + execute, + status, + statusDetails, + prepareCommitContext, + commit, + pushCurrentBranch, + pullCurrentBranch, + readRangeContext, + readConfigValue, + isInsideWorkTree, + listWorkspaceFiles, + filterIgnoredPaths, + listBranches, + createWorktree, + fetchPullRequestBranch, + ensureRemote, + fetchRemoteBranch, + setBranchUpstream, + removeWorktree, + renameBranch, + createBranch, + checkoutBranch, + initRepo, + listLocalBranchNames, + } satisfies GitCoreShape; +}); diff --git a/apps/server/src/git/Layers/GitCore.test.ts b/apps/server/src/git/Layers/GitCore.test.ts index 5e4416d8b9..06049995c6 100644 --- a/apps/server/src/git/Layers/GitCore.test.ts +++ b/apps/server/src/git/Layers/GitCore.test.ts @@ -6,7 +6,8 @@ import { it } from "@effect/vitest"; import { Effect, FileSystem, Layer, PlatformError, Scope } from "effect"; import { describe, expect, vi } from "vitest"; -import { GitCoreLive, makeGitCore } from "./GitCore.ts"; +import { GitCoreLive } from "./GitCore.ts"; +import { makeGitCore } from "./GitCore.shared.ts"; import { GitCore, type GitCoreShape } from "../Services/GitCore.ts"; import { GitCommandError } from "@t3tools/contracts"; import { type ProcessRunResult, runProcess } from "../../processRunner.ts"; diff --git a/apps/server/src/git/Layers/GitCore.ts b/apps/server/src/git/Layers/GitCore.ts index 1178a4b67e..8b324417c8 100644 --- a/apps/server/src/git/Layers/GitCore.ts +++ b/apps/server/src/git/Layers/GitCore.ts @@ -1,2133 +1,6 @@ -import { - Cache, - Data, - Duration, - Effect, - Exit, - FileSystem, - Layer, - Option, - Path, - PlatformError, - Ref, - Result, - Schema, - Scope, - Semaphore, - Stream, -} from "effect"; -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; +import { Layer } from "effect"; -import { GitCommandError, type GitBranch } from "@t3tools/contracts"; -import { dedupeRemoteBranchesWithLocalMatches } from "@t3tools/shared/git"; -import { compactTraceAttributes } from "../../observability/Attributes.ts"; -import { gitCommandDuration, gitCommandsTotal, withMetrics } from "../../observability/Metrics.ts"; -import { - GitCore, - type ExecuteGitProgress, - type GitCommitOptions, - type GitCoreShape, - type ExecuteGitInput, - type ExecuteGitResult, -} from "../Services/GitCore.ts"; -import { - parseRemoteNames, - parseRemoteNamesInGitOrder, - parseRemoteRefWithRemoteNames, -} from "../remoteRefs.ts"; -import { ServerConfig } from "../../config.ts"; -import { decodeJsonResult } from "@t3tools/shared/schemaJson"; - -const DEFAULT_TIMEOUT_MS = 30_000; -const DEFAULT_MAX_OUTPUT_BYTES = 1_000_000; -const OUTPUT_TRUNCATED_MARKER = "\n\n[truncated]"; -const PREPARED_COMMIT_PATCH_MAX_OUTPUT_BYTES = 49_000; -const RANGE_COMMIT_SUMMARY_MAX_OUTPUT_BYTES = 19_000; -const RANGE_DIFF_SUMMARY_MAX_OUTPUT_BYTES = 19_000; -const RANGE_DIFF_PATCH_MAX_OUTPUT_BYTES = 59_000; -const WORKSPACE_FILES_MAX_OUTPUT_BYTES = 16 * 1024 * 1024; -const GIT_CHECK_IGNORE_MAX_STDIN_BYTES = 256 * 1024; -const STATUS_UPSTREAM_REFRESH_INTERVAL = Duration.seconds(15); -const STATUS_UPSTREAM_REFRESH_TIMEOUT = Duration.seconds(5); -const STATUS_UPSTREAM_REFRESH_FAILURE_COOLDOWN = Duration.seconds(5); -const STATUS_UPSTREAM_REFRESH_CACHE_CAPACITY = 2_048; -const DEFAULT_BASE_BRANCH_CANDIDATES = ["main", "master"] as const; -const GIT_LIST_BRANCHES_DEFAULT_LIMIT = 100; - -type TraceTailState = { - processedChars: number; - remainder: string; -}; - -class StatusUpstreamRefreshCacheKey extends Data.Class<{ - gitCommonDir: string; - upstreamRef: string; - remoteName: string; - upstreamBranch: string; -}> {} - -interface ExecuteGitOptions { - stdin?: string | undefined; - timeoutMs?: number | undefined; - allowNonZeroExit?: boolean | undefined; - fallbackErrorMessage?: string | undefined; - maxOutputBytes?: number | undefined; - truncateOutputAtMaxBytes?: boolean | undefined; - progress?: ExecuteGitProgress | undefined; -} - -function parseBranchAb(value: string): { ahead: number; behind: number } { - const match = value.match(/^\+(\d+)\s+-(\d+)$/); - if (!match) return { ahead: 0, behind: 0 }; - return { - ahead: Number(match[1] ?? "0"), - behind: Number(match[2] ?? "0"), - }; -} - -function parseNumstatEntries( - stdout: string, -): Array<{ path: string; insertions: number; deletions: number }> { - const entries: Array<{ path: string; insertions: number; deletions: number }> = []; - for (const line of stdout.split(/\r?\n/g)) { - if (line.trim().length === 0) continue; - const [addedRaw, deletedRaw, ...pathParts] = line.split("\t"); - const rawPath = - pathParts.length > 1 ? (pathParts.at(-1) ?? "").trim() : pathParts.join("\t").trim(); - if (rawPath.length === 0) continue; - const added = Number.parseInt(addedRaw ?? "0", 10); - const deleted = Number.parseInt(deletedRaw ?? "0", 10); - const renameArrowIndex = rawPath.indexOf(" => "); - const normalizedPath = - renameArrowIndex >= 0 ? rawPath.slice(renameArrowIndex + " => ".length).trim() : rawPath; - entries.push({ - path: normalizedPath.length > 0 ? normalizedPath : rawPath, - insertions: Number.isFinite(added) ? added : 0, - deletions: Number.isFinite(deleted) ? deleted : 0, - }); - } - return entries; -} - -function splitNullSeparatedPaths(input: string, truncated: boolean): string[] { - const parts = input.split("\0"); - if (parts.length === 0) return []; - - if (truncated && parts[parts.length - 1]?.length) { - parts.pop(); - } - - return parts.filter((value) => value.length > 0); -} - -function chunkPathsForGitCheckIgnore(relativePaths: readonly string[]): string[][] { - const chunks: string[][] = []; - let chunk: string[] = []; - let chunkBytes = 0; - - for (const relativePath of relativePaths) { - const relativePathBytes = Buffer.byteLength(relativePath) + 1; - if (chunk.length > 0 && chunkBytes + relativePathBytes > GIT_CHECK_IGNORE_MAX_STDIN_BYTES) { - chunks.push(chunk); - chunk = []; - chunkBytes = 0; - } - - chunk.push(relativePath); - chunkBytes += relativePathBytes; - - if (chunkBytes >= GIT_CHECK_IGNORE_MAX_STDIN_BYTES) { - chunks.push(chunk); - chunk = []; - chunkBytes = 0; - } - } - - if (chunk.length > 0) { - chunks.push(chunk); - } - - return chunks; -} - -function parsePorcelainPath(line: string): string | null { - if (line.startsWith("? ") || line.startsWith("! ")) { - const simple = line.slice(2).trim(); - return simple.length > 0 ? simple : null; - } - - if (!(line.startsWith("1 ") || line.startsWith("2 ") || line.startsWith("u "))) { - return null; - } - - const tabIndex = line.indexOf("\t"); - if (tabIndex >= 0) { - const fromTab = line.slice(tabIndex + 1); - const [filePath] = fromTab.split("\t"); - return filePath?.trim().length ? filePath.trim() : null; - } - - const parts = line.trim().split(/\s+/g); - const filePath = parts.at(-1) ?? ""; - return filePath.length > 0 ? filePath : null; -} - -function parseBranchLine(line: string): { name: string; current: boolean } | null { - const trimmed = line.trim(); - if (trimmed.length === 0) return null; - - const name = trimmed.replace(/^[*+]\s+/, ""); - // Exclude symbolic refs like: "origin/HEAD -> origin/main". - // Exclude detached HEAD pseudo-refs like: "(HEAD detached at origin/main)". - if (name.includes(" -> ") || name.startsWith("(")) return null; - - return { - name, - current: trimmed.startsWith("* "), - }; -} - -function filterBranchesForListQuery( - branches: ReadonlyArray, - query?: string, -): ReadonlyArray { - if (!query) { - return branches; - } - - const normalizedQuery = query.toLowerCase(); - return branches.filter((branch) => branch.name.toLowerCase().includes(normalizedQuery)); -} - -function paginateBranches(input: { - branches: ReadonlyArray; - cursor?: number | undefined; - limit?: number | undefined; -}): { - branches: ReadonlyArray; - nextCursor: number | null; - totalCount: number; -} { - const cursor = input.cursor ?? 0; - const limit = input.limit ?? GIT_LIST_BRANCHES_DEFAULT_LIMIT; - const totalCount = input.branches.length; - const branches = input.branches.slice(cursor, cursor + limit); - const nextCursor = cursor + branches.length < totalCount ? cursor + branches.length : null; - - return { - branches, - nextCursor, - totalCount, - }; -} - -function sanitizeRemoteName(value: string): string { - const sanitized = value - .trim() - .replace(/[^A-Za-z0-9._-]+/g, "-") - .replace(/^-+|-+$/g, ""); - return sanitized.length > 0 ? sanitized : "fork"; -} - -function normalizeRemoteUrl(value: string): string { - return value - .trim() - .replace(/\/+$/g, "") - .replace(/\.git$/i, "") - .toLowerCase(); -} - -function parseRemoteFetchUrls(stdout: string): Map { - const remotes = new Map(); - for (const line of stdout.split("\n")) { - const trimmed = line.trim(); - if (trimmed.length === 0) continue; - const match = /^(\S+)\s+(\S+)\s+\((fetch|push)\)$/.exec(trimmed); - if (!match) continue; - const [, remoteName = "", remoteUrl = "", direction = ""] = match; - if (direction !== "fetch" || remoteName.length === 0 || remoteUrl.length === 0) { - continue; - } - remotes.set(remoteName, remoteUrl); - } - return remotes; -} - -function parseUpstreamRefWithRemoteNames( - upstreamRef: string, - remoteNames: ReadonlyArray, -): { upstreamRef: string; remoteName: string; upstreamBranch: string } | null { - const parsed = parseRemoteRefWithRemoteNames(upstreamRef, remoteNames); - if (!parsed) { - return null; - } - - return { - upstreamRef, - remoteName: parsed.remoteName, - upstreamBranch: parsed.branchName, - }; -} - -function parseUpstreamRefByFirstSeparator( - upstreamRef: string, -): { upstreamRef: string; remoteName: string; upstreamBranch: string } | null { - const separatorIndex = upstreamRef.indexOf("/"); - if (separatorIndex <= 0 || separatorIndex === upstreamRef.length - 1) { - return null; - } - - const remoteName = upstreamRef.slice(0, separatorIndex).trim(); - const upstreamBranch = upstreamRef.slice(separatorIndex + 1).trim(); - if (remoteName.length === 0 || upstreamBranch.length === 0) { - return null; - } - - return { - upstreamRef, - remoteName, - upstreamBranch, - }; -} - -function parseTrackingBranchByUpstreamRef(stdout: string, upstreamRef: string): string | null { - for (const line of stdout.split("\n")) { - const trimmedLine = line.trim(); - if (trimmedLine.length === 0) { - continue; - } - const [branchNameRaw, upstreamBranchRaw = ""] = trimmedLine.split("\t"); - const branchName = branchNameRaw?.trim() ?? ""; - const upstreamBranch = upstreamBranchRaw.trim(); - if (branchName.length === 0 || upstreamBranch.length === 0) { - continue; - } - if (upstreamBranch === upstreamRef) { - return branchName; - } - } - - return null; -} - -function deriveLocalBranchNameFromRemoteRef(branchName: string): string | null { - const separatorIndex = branchName.indexOf("/"); - if (separatorIndex <= 0 || separatorIndex === branchName.length - 1) { - return null; - } - const localBranch = branchName.slice(separatorIndex + 1).trim(); - return localBranch.length > 0 ? localBranch : null; -} - -function commandLabel(args: readonly string[]): string { - return `git ${args.join(" ")}`; -} - -function parseDefaultBranchFromRemoteHeadRef(value: string, remoteName: string): string | null { - const trimmed = value.trim(); - const prefix = `refs/remotes/${remoteName}/`; - if (!trimmed.startsWith(prefix)) { - return null; - } - const branch = trimmed.slice(prefix.length).trim(); - return branch.length > 0 ? branch : null; -} - -function createGitCommandError( - operation: string, - cwd: string, - args: readonly string[], - detail: string, - cause?: unknown, -): GitCommandError { - return new GitCommandError({ - operation, - command: commandLabel(args), - cwd, - detail, - ...(cause !== undefined ? { cause } : {}), - }); -} - -function quoteGitCommand(args: ReadonlyArray): string { - return `git ${args.join(" ")}`; -} - -function toGitCommandError( - input: Pick, - detail: string, -) { - return (cause: unknown) => - Schema.is(GitCommandError)(cause) - ? cause - : new GitCommandError({ - operation: input.operation, - command: quoteGitCommand(input.args), - cwd: input.cwd, - detail: `${cause instanceof Error && cause.message.length > 0 ? cause.message : "Unknown error"} - ${detail}`, - ...(cause !== undefined ? { cause } : {}), - }); -} - -interface Trace2Monitor { - readonly env: NodeJS.ProcessEnv; - readonly flush: Effect.Effect; -} - -const nowUnixNano = (): bigint => BigInt(Date.now()) * 1_000_000n; - -const addCurrentSpanEvent = (name: string, attributes: Record) => - Effect.currentSpan.pipe( - Effect.tap((span) => - Effect.sync(() => { - span.event(name, nowUnixNano(), compactTraceAttributes(attributes)); - }), - ), - Effect.catch(() => Effect.void), - ); - -function trace2ChildKey(record: Record): string | null { - const childId = record.child_id; - if (typeof childId === "number" || typeof childId === "string") { - return String(childId); - } - const hookName = record.hook_name; - return typeof hookName === "string" && hookName.trim().length > 0 ? hookName.trim() : null; -} - -const Trace2Record = Schema.Record(Schema.String, Schema.Unknown); - -const createTrace2Monitor = Effect.fn("createTrace2Monitor")(function* ( - input: Pick, - progress: ExecuteGitProgress | undefined, -): Effect.fn.Return< - Trace2Monitor, - PlatformError.PlatformError, - Scope.Scope | FileSystem.FileSystem | Path.Path -> { - if (!progress?.onHookStarted && !progress?.onHookFinished) { - return { - env: {}, - flush: Effect.void, - }; - } - - const fs = yield* FileSystem.FileSystem; - const path = yield* Path.Path; - const traceFilePath = yield* fs.makeTempFileScoped({ - prefix: `t3code-git-trace2-${process.pid}-`, - suffix: ".json", - }); - const hookStartByChildKey = new Map(); - const traceTailState = yield* Ref.make({ - processedChars: 0, - remainder: "", - }); - - const handleTraceLine = Effect.fn("handleTraceLine")(function* (line: string) { - const trimmedLine = line.trim(); - if (trimmedLine.length === 0) { - return; - } - - const traceRecord = decodeJsonResult(Trace2Record)(trimmedLine); - if (Result.isFailure(traceRecord)) { - yield* Effect.logDebug( - `GitCore.trace2: failed to parse trace line for ${quoteGitCommand(input.args)} in ${input.cwd}`, - traceRecord.failure, - ); - return; - } - - if (traceRecord.success.child_class !== "hook") { - return; - } - - const event = traceRecord.success.event; - const childKey = trace2ChildKey(traceRecord.success); - if (childKey === null) { - return; - } - const started = hookStartByChildKey.get(childKey); - const hookNameFromEvent = - typeof traceRecord.success.hook_name === "string" ? traceRecord.success.hook_name.trim() : ""; - const hookName = hookNameFromEvent.length > 0 ? hookNameFromEvent : (started?.hookName ?? ""); - if (hookName.length === 0) { - return; - } - - if (event === "child_start") { - hookStartByChildKey.set(childKey, { hookName, startedAtMs: Date.now() }); - yield* addCurrentSpanEvent("git.hook.started", { - hookName, - }); - if (progress.onHookStarted) { - yield* progress.onHookStarted(hookName); - } - return; - } - - if (event === "child_exit") { - hookStartByChildKey.delete(childKey); - const code = traceRecord.success.code; - const exitCode = typeof code === "number" && Number.isInteger(code) ? code : null; - const durationMs = started ? Math.max(0, Date.now() - started.startedAtMs) : null; - yield* addCurrentSpanEvent("git.hook.finished", { - hookName: started?.hookName ?? hookName, - exitCode, - durationMs, - }); - if (progress.onHookFinished) { - yield* progress.onHookFinished({ - hookName: started?.hookName ?? hookName, - exitCode, - durationMs, - }); - } - } - }); - - const deltaMutex = yield* Semaphore.make(1); - const readTraceDelta = deltaMutex.withPermit( - fs.readFileString(traceFilePath).pipe( - Effect.flatMap((contents) => - Effect.uninterruptible( - Ref.modify(traceTailState, ({ processedChars, remainder }) => { - if (contents.length <= processedChars) { - return [[], { processedChars, remainder }]; - } - - const appended = contents.slice(processedChars); - const combined = remainder + appended; - const lines = combined.split("\n"); - const nextRemainder = lines.pop() ?? ""; - - return [ - lines.map((line) => line.replace(/\r$/, "")), - { - processedChars: contents.length, - remainder: nextRemainder, - }, - ]; - }).pipe( - Effect.flatMap((lines) => Effect.forEach(lines, handleTraceLine, { discard: true })), - ), - ), - ), - Effect.ignore({ log: true }), - ), - ); - const traceFileName = path.basename(traceFilePath); - yield* Stream.runForEach(fs.watch(traceFilePath), (event) => { - const eventPath = event.path; - const isTargetTraceEvent = - eventPath === traceFilePath || - eventPath === traceFileName || - path.basename(eventPath) === traceFileName; - if (!isTargetTraceEvent) return Effect.void; - return readTraceDelta; - }).pipe(Effect.ignoreCause({ log: true }), Effect.forkScoped); - - const finalizeTrace2Monitor = Effect.fn("finalizeTrace2Monitor")(function* () { - yield* readTraceDelta; - const finalLine = yield* Ref.modify(traceTailState, ({ processedChars, remainder }) => [ - remainder.trim(), - { - processedChars, - remainder: "", - }, - ]); - if (finalLine.length > 0) { - yield* handleTraceLine(finalLine); - } - }); - - yield* Effect.addFinalizer(finalizeTrace2Monitor); - - return { - env: { - GIT_TRACE2_EVENT: traceFilePath, - }, - flush: readTraceDelta, - }; -}); - -const collectOutput = Effect.fn("collectOutput")(function* ( - input: Pick, - stream: Stream.Stream, - maxOutputBytes: number, - truncateOutputAtMaxBytes: boolean, - onLine: ((line: string) => Effect.Effect) | undefined, -): Effect.fn.Return<{ readonly text: string; readonly truncated: boolean }, GitCommandError> { - const decoder = new TextDecoder(); - let bytes = 0; - let text = ""; - let lineBuffer = ""; - let truncated = false; - - const emitCompleteLines = Effect.fn("emitCompleteLines")(function* (flush: boolean) { - let newlineIndex = lineBuffer.indexOf("\n"); - while (newlineIndex >= 0) { - const line = lineBuffer.slice(0, newlineIndex).replace(/\r$/, ""); - lineBuffer = lineBuffer.slice(newlineIndex + 1); - if (line.length > 0 && onLine) { - yield* onLine(line); - } - newlineIndex = lineBuffer.indexOf("\n"); - } - - if (flush) { - const trailing = lineBuffer.replace(/\r$/, ""); - lineBuffer = ""; - if (trailing.length > 0 && onLine) { - yield* onLine(trailing); - } - } - }); - - const processChunk = Effect.fn("processChunk")(function* (chunk: Uint8Array) { - if (truncateOutputAtMaxBytes && truncated) { - return; - } - const nextBytes = bytes + chunk.byteLength; - if (!truncateOutputAtMaxBytes && nextBytes > maxOutputBytes) { - return yield* new GitCommandError({ - operation: input.operation, - command: quoteGitCommand(input.args), - cwd: input.cwd, - detail: `${quoteGitCommand(input.args)} output exceeded ${maxOutputBytes} bytes and was truncated.`, - }); - } - - const chunkToDecode = - truncateOutputAtMaxBytes && nextBytes > maxOutputBytes - ? chunk.subarray(0, Math.max(0, maxOutputBytes - bytes)) - : chunk; - bytes += chunkToDecode.byteLength; - truncated = truncateOutputAtMaxBytes && nextBytes > maxOutputBytes; - - const decoded = decoder.decode(chunkToDecode, { stream: !truncated }); - text += decoded; - lineBuffer += decoded; - yield* emitCompleteLines(false); - }); - - yield* Stream.runForEach(stream, processChunk).pipe( - Effect.mapError(toGitCommandError(input, "output stream failed.")), - ); - - const remainder = truncated ? "" : decoder.decode(); - text += remainder; - lineBuffer += remainder; - yield* emitCompleteLines(true); - return { - text, - truncated, - }; -}); - -export const makeGitCore = Effect.fn("makeGitCore")(function* (options?: { - executeOverride?: GitCoreShape["execute"]; -}) { - const fileSystem = yield* FileSystem.FileSystem; - const path = yield* Path.Path; - const { worktreesDir } = yield* ServerConfig; - - let executeRaw: GitCoreShape["execute"]; - - if (options?.executeOverride) { - executeRaw = options.executeOverride; - } else { - const commandSpawner = yield* ChildProcessSpawner.ChildProcessSpawner; - executeRaw = Effect.fnUntraced(function* (input) { - const commandInput = { - ...input, - args: [...input.args], - } as const; - const timeoutMs = input.timeoutMs ?? DEFAULT_TIMEOUT_MS; - const maxOutputBytes = input.maxOutputBytes ?? DEFAULT_MAX_OUTPUT_BYTES; - const truncateOutputAtMaxBytes = input.truncateOutputAtMaxBytes ?? false; - - const runGitCommand = Effect.fn("runGitCommand")(function* () { - const trace2Monitor = yield* createTrace2Monitor(commandInput, input.progress).pipe( - Effect.provideService(Path.Path, path), - Effect.provideService(FileSystem.FileSystem, fileSystem), - Effect.mapError(toGitCommandError(commandInput, "failed to create trace2 monitor.")), - ); - const child = yield* commandSpawner - .spawn( - ChildProcess.make("git", commandInput.args, { - cwd: commandInput.cwd, - env: { - ...process.env, - ...input.env, - ...trace2Monitor.env, - }, - }), - ) - .pipe(Effect.mapError(toGitCommandError(commandInput, "failed to spawn."))); - - const [stdout, stderr, exitCode] = yield* Effect.all( - [ - collectOutput( - commandInput, - child.stdout, - maxOutputBytes, - truncateOutputAtMaxBytes, - input.progress?.onStdoutLine, - ), - collectOutput( - commandInput, - child.stderr, - maxOutputBytes, - truncateOutputAtMaxBytes, - input.progress?.onStderrLine, - ), - child.exitCode.pipe( - Effect.map((value) => Number(value)), - Effect.mapError(toGitCommandError(commandInput, "failed to report exit code.")), - ), - input.stdin === undefined - ? Effect.void - : Stream.run(Stream.encodeText(Stream.make(input.stdin)), child.stdin).pipe( - Effect.mapError(toGitCommandError(commandInput, "failed to write stdin.")), - ), - ], - { concurrency: "unbounded" }, - ).pipe(Effect.map(([stdout, stderr, exitCode]) => [stdout, stderr, exitCode] as const)); - yield* trace2Monitor.flush; - - if (!input.allowNonZeroExit && exitCode !== 0) { - const trimmedStderr = stderr.text.trim(); - return yield* new GitCommandError({ - operation: commandInput.operation, - command: quoteGitCommand(commandInput.args), - cwd: commandInput.cwd, - detail: - trimmedStderr.length > 0 - ? `${quoteGitCommand(commandInput.args)} failed: ${trimmedStderr}` - : `${quoteGitCommand(commandInput.args)} failed with code ${exitCode}.`, - }); - } - - return { - code: exitCode, - stdout: stdout.text, - stderr: stderr.text, - stdoutTruncated: stdout.truncated, - stderrTruncated: stderr.truncated, - } satisfies ExecuteGitResult; - }); - - return yield* runGitCommand().pipe( - Effect.scoped, - Effect.timeoutOption(timeoutMs), - Effect.flatMap((result) => - Option.match(result, { - onNone: () => - Effect.fail( - new GitCommandError({ - operation: commandInput.operation, - command: quoteGitCommand(commandInput.args), - cwd: commandInput.cwd, - detail: `${quoteGitCommand(commandInput.args)} timed out.`, - }), - ), - onSome: Effect.succeed, - }), - ), - ); - }); - } - - const execute: GitCoreShape["execute"] = (input) => - executeRaw(input).pipe( - withMetrics({ - counter: gitCommandsTotal, - timer: gitCommandDuration, - attributes: { - operation: input.operation, - }, - }), - Effect.withSpan(input.operation, { - kind: "client", - attributes: { - "git.operation": input.operation, - "git.cwd": input.cwd, - "git.args_count": input.args.length, - }, - }), - ); - - const executeGit = ( - operation: string, - cwd: string, - args: readonly string[], - options: ExecuteGitOptions = {}, - ): Effect.Effect => - execute({ - operation, - cwd, - args, - ...(options.stdin !== undefined ? { stdin: options.stdin } : {}), - allowNonZeroExit: true, - ...(options.timeoutMs !== undefined ? { timeoutMs: options.timeoutMs } : {}), - ...(options.maxOutputBytes !== undefined ? { maxOutputBytes: options.maxOutputBytes } : {}), - ...(options.truncateOutputAtMaxBytes !== undefined - ? { truncateOutputAtMaxBytes: options.truncateOutputAtMaxBytes } - : {}), - ...(options.progress ? { progress: options.progress } : {}), - }).pipe( - Effect.flatMap((result) => { - if (options.allowNonZeroExit || result.code === 0) { - return Effect.succeed(result); - } - const stderr = result.stderr.trim(); - if (stderr.length > 0) { - return Effect.fail(createGitCommandError(operation, cwd, args, stderr)); - } - if (options.fallbackErrorMessage) { - return Effect.fail( - createGitCommandError(operation, cwd, args, options.fallbackErrorMessage), - ); - } - return Effect.fail( - createGitCommandError( - operation, - cwd, - args, - `${commandLabel(args)} failed: code=${result.code ?? "null"}`, - ), - ); - }), - ); - - const runGit = ( - operation: string, - cwd: string, - args: readonly string[], - allowNonZeroExit = false, - ): Effect.Effect => - executeGit(operation, cwd, args, { allowNonZeroExit }).pipe(Effect.asVoid); - - const runGitStdout = ( - operation: string, - cwd: string, - args: readonly string[], - allowNonZeroExit = false, - ): Effect.Effect => - executeGit(operation, cwd, args, { allowNonZeroExit }).pipe( - Effect.map((result) => result.stdout), - ); - - const runGitStdoutWithOptions = ( - operation: string, - cwd: string, - args: readonly string[], - options: ExecuteGitOptions = {}, - ): Effect.Effect => - executeGit(operation, cwd, args, options).pipe( - Effect.map((result) => - result.stdoutTruncated ? `${result.stdout}${OUTPUT_TRUNCATED_MARKER}` : result.stdout, - ), - ); - - const branchExists = (cwd: string, branch: string): Effect.Effect => - executeGit( - "GitCore.branchExists", - cwd, - ["show-ref", "--verify", "--quiet", `refs/heads/${branch}`], - { - allowNonZeroExit: true, - timeoutMs: 5_000, - }, - ).pipe(Effect.map((result) => result.code === 0)); - - const resolveAvailableBranchName = Effect.fn("resolveAvailableBranchName")(function* ( - cwd: string, - desiredBranch: string, - ) { - const isDesiredTaken = yield* branchExists(cwd, desiredBranch); - if (!isDesiredTaken) { - return desiredBranch; - } - - for (let suffix = 1; suffix <= 100; suffix += 1) { - const candidate = `${desiredBranch}-${suffix}`; - const isCandidateTaken = yield* branchExists(cwd, candidate); - if (!isCandidateTaken) { - return candidate; - } - } - - return yield* createGitCommandError( - "GitCore.renameBranch", - cwd, - ["branch", "-m", "--", desiredBranch], - `Could not find an available branch name for '${desiredBranch}'.`, - ); - }); - - const resolveCurrentUpstream = Effect.fn("resolveCurrentUpstream")(function* (cwd: string) { - const upstreamRef = yield* runGitStdout( - "GitCore.resolveCurrentUpstream", - cwd, - ["rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{upstream}"], - true, - ).pipe(Effect.map((stdout) => stdout.trim())); - - if (upstreamRef.length === 0 || upstreamRef === "@{upstream}") { - return null; - } - - const remoteNames = yield* runGitStdout("GitCore.listRemoteNames", cwd, ["remote"]).pipe( - Effect.map(parseRemoteNames), - Effect.catch(() => Effect.succeed>([])), - ); - return ( - parseUpstreamRefWithRemoteNames(upstreamRef, remoteNames) ?? - parseUpstreamRefByFirstSeparator(upstreamRef) - ); - }); - - const fetchUpstreamRefForStatus = ( - gitCommonDir: string, - upstream: { upstreamRef: string; remoteName: string; upstreamBranch: string }, - ): Effect.Effect => { - const refspec = `+refs/heads/${upstream.upstreamBranch}:refs/remotes/${upstream.upstreamRef}`; - const fetchCwd = - path.basename(gitCommonDir) === ".git" ? path.dirname(gitCommonDir) : gitCommonDir; - return executeGit( - "GitCore.fetchUpstreamRefForStatus", - fetchCwd, - ["--git-dir", gitCommonDir, "fetch", "--quiet", "--no-tags", upstream.remoteName, refspec], - { - allowNonZeroExit: true, - timeoutMs: Duration.toMillis(STATUS_UPSTREAM_REFRESH_TIMEOUT), - }, - ).pipe(Effect.asVoid); - }; - - const resolveGitCommonDir = Effect.fn("resolveGitCommonDir")(function* (cwd: string) { - const gitCommonDir = yield* runGitStdout("GitCore.resolveGitCommonDir", cwd, [ - "rev-parse", - "--git-common-dir", - ]).pipe(Effect.map((stdout) => stdout.trim())); - return path.isAbsolute(gitCommonDir) ? gitCommonDir : path.resolve(cwd, gitCommonDir); - }); - - const refreshStatusUpstreamCacheEntry = Effect.fn("refreshStatusUpstreamCacheEntry")(function* ( - cacheKey: StatusUpstreamRefreshCacheKey, - ) { - yield* fetchUpstreamRefForStatus(cacheKey.gitCommonDir, { - upstreamRef: cacheKey.upstreamRef, - remoteName: cacheKey.remoteName, - upstreamBranch: cacheKey.upstreamBranch, - }); - return true as const; - }); - - const statusUpstreamRefreshCache = yield* Cache.makeWith({ - capacity: STATUS_UPSTREAM_REFRESH_CACHE_CAPACITY, - lookup: refreshStatusUpstreamCacheEntry, - // Keep successful refreshes warm and briefly back off failed refreshes to avoid retry storms. - timeToLive: (exit) => - Exit.isSuccess(exit) - ? STATUS_UPSTREAM_REFRESH_INTERVAL - : STATUS_UPSTREAM_REFRESH_FAILURE_COOLDOWN, - }); - - const refreshStatusUpstreamIfStale = Effect.fn("refreshStatusUpstreamIfStale")(function* ( - cwd: string, - ) { - const upstream = yield* resolveCurrentUpstream(cwd); - if (!upstream) return; - const gitCommonDir = yield* resolveGitCommonDir(cwd); - yield* Cache.get( - statusUpstreamRefreshCache, - new StatusUpstreamRefreshCacheKey({ - gitCommonDir, - upstreamRef: upstream.upstreamRef, - remoteName: upstream.remoteName, - upstreamBranch: upstream.upstreamBranch, - }), - ); - }); - - const resolveDefaultBranchName = ( - cwd: string, - remoteName: string, - ): Effect.Effect => - executeGit( - "GitCore.resolveDefaultBranchName", - cwd, - ["symbolic-ref", `refs/remotes/${remoteName}/HEAD`], - { allowNonZeroExit: true }, - ).pipe( - Effect.map((result) => { - if (result.code !== 0) { - return null; - } - return parseDefaultBranchFromRemoteHeadRef(result.stdout, remoteName); - }), - ); - - const remoteBranchExists = ( - cwd: string, - remoteName: string, - branch: string, - ): Effect.Effect => - executeGit( - "GitCore.remoteBranchExists", - cwd, - ["show-ref", "--verify", "--quiet", `refs/remotes/${remoteName}/${branch}`], - { - allowNonZeroExit: true, - }, - ).pipe(Effect.map((result) => result.code === 0)); - - const originRemoteExists = (cwd: string): Effect.Effect => - executeGit("GitCore.originRemoteExists", cwd, ["remote", "get-url", "origin"], { - allowNonZeroExit: true, - }).pipe(Effect.map((result) => result.code === 0)); - - const listRemoteNames = (cwd: string): Effect.Effect, GitCommandError> => - runGitStdout("GitCore.listRemoteNames", cwd, ["remote"]).pipe( - Effect.map(parseRemoteNamesInGitOrder), - ); - - const resolvePrimaryRemoteName = Effect.fn("resolvePrimaryRemoteName")(function* (cwd: string) { - if (yield* originRemoteExists(cwd)) { - return "origin"; - } - const remotes = yield* listRemoteNames(cwd); - const [firstRemote] = remotes; - if (firstRemote) { - return firstRemote; - } - return yield* createGitCommandError( - "GitCore.resolvePrimaryRemoteName", - cwd, - ["remote"], - "No git remote is configured for this repository.", - ); - }); - - const resolvePushRemoteName = Effect.fn("resolvePushRemoteName")(function* ( - cwd: string, - branch: string, - ) { - const branchPushRemote = yield* runGitStdout( - "GitCore.resolvePushRemoteName.branchPushRemote", - cwd, - ["config", "--get", `branch.${branch}.pushRemote`], - true, - ).pipe(Effect.map((stdout) => stdout.trim())); - if (branchPushRemote.length > 0) { - return branchPushRemote; - } - - const pushDefaultRemote = yield* runGitStdout( - "GitCore.resolvePushRemoteName.remotePushDefault", - cwd, - ["config", "--get", "remote.pushDefault"], - true, - ).pipe(Effect.map((stdout) => stdout.trim())); - if (pushDefaultRemote.length > 0) { - return pushDefaultRemote; - } - - return yield* resolvePrimaryRemoteName(cwd).pipe(Effect.catch(() => Effect.succeed(null))); - }); - - const ensureRemote: GitCoreShape["ensureRemote"] = Effect.fn("ensureRemote")(function* (input) { - const preferredName = sanitizeRemoteName(input.preferredName); - const normalizedTargetUrl = normalizeRemoteUrl(input.url); - const remoteFetchUrls = yield* runGitStdout("GitCore.ensureRemote.listRemoteUrls", input.cwd, [ - "remote", - "-v", - ]).pipe(Effect.map((stdout) => parseRemoteFetchUrls(stdout))); - - for (const [remoteName, remoteUrl] of remoteFetchUrls.entries()) { - if (normalizeRemoteUrl(remoteUrl) === normalizedTargetUrl) { - return remoteName; - } - } - - let remoteName = preferredName; - let suffix = 1; - while (remoteFetchUrls.has(remoteName)) { - remoteName = `${preferredName}-${suffix}`; - suffix += 1; - } - - yield* runGit("GitCore.ensureRemote.add", input.cwd, ["remote", "add", remoteName, input.url]); - return remoteName; - }); - - const resolveBaseBranchForNoUpstream = Effect.fn("resolveBaseBranchForNoUpstream")(function* ( - cwd: string, - branch: string, - ) { - const configuredBaseBranch = yield* runGitStdout( - "GitCore.resolveBaseBranchForNoUpstream.config", - cwd, - ["config", "--get", `branch.${branch}.gh-merge-base`], - true, - ).pipe(Effect.map((stdout) => stdout.trim())); - - const primaryRemoteName = yield* resolvePrimaryRemoteName(cwd).pipe( - Effect.catch(() => Effect.succeed(null)), - ); - const defaultBranch = - primaryRemoteName === null ? null : yield* resolveDefaultBranchName(cwd, primaryRemoteName); - const candidates = [ - configuredBaseBranch.length > 0 ? configuredBaseBranch : null, - defaultBranch, - ...DEFAULT_BASE_BRANCH_CANDIDATES, - ]; - - for (const candidate of candidates) { - if (!candidate) { - continue; - } - - const remotePrefix = - primaryRemoteName && primaryRemoteName !== "origin" ? `${primaryRemoteName}/` : null; - const normalizedCandidate = candidate.startsWith("origin/") - ? candidate.slice("origin/".length) - : remotePrefix && candidate.startsWith(remotePrefix) - ? candidate.slice(remotePrefix.length) - : candidate; - if (normalizedCandidate.length === 0 || normalizedCandidate === branch) { - continue; - } - - if (yield* branchExists(cwd, normalizedCandidate)) { - return normalizedCandidate; - } - - if ( - primaryRemoteName && - (yield* remoteBranchExists(cwd, primaryRemoteName, normalizedCandidate)) - ) { - return `${primaryRemoteName}/${normalizedCandidate}`; - } - } - - return null; - }); - - const computeAheadCountAgainstBase = Effect.fn("computeAheadCountAgainstBase")(function* ( - cwd: string, - branch: string, - ) { - const baseBranch = yield* resolveBaseBranchForNoUpstream(cwd, branch); - if (!baseBranch) { - return 0; - } - - const result = yield* executeGit( - "GitCore.computeAheadCountAgainstBase", - cwd, - ["rev-list", "--count", `${baseBranch}..HEAD`], - { allowNonZeroExit: true }, - ); - if (result.code !== 0) { - return 0; - } - - const parsed = Number.parseInt(result.stdout.trim(), 10); - return Number.isFinite(parsed) ? Math.max(0, parsed) : 0; - }); - - const readBranchRecency = Effect.fn("readBranchRecency")(function* (cwd: string) { - const branchRecency = yield* executeGit( - "GitCore.readBranchRecency", - cwd, - [ - "for-each-ref", - "--format=%(refname:short)%09%(committerdate:unix)", - "refs/heads", - "refs/remotes", - ], - { - timeoutMs: 15_000, - allowNonZeroExit: true, - }, - ); - - const branchLastCommit = new Map(); - if (branchRecency.code !== 0) { - return branchLastCommit; - } - - for (const line of branchRecency.stdout.split("\n")) { - if (line.length === 0) { - continue; - } - const [name, lastCommitRaw] = line.split("\t"); - if (!name) { - continue; - } - const lastCommit = Number.parseInt(lastCommitRaw ?? "0", 10); - branchLastCommit.set(name, Number.isFinite(lastCommit) ? lastCommit : 0); - } - - return branchLastCommit; - }); - - const statusDetails: GitCoreShape["statusDetails"] = Effect.fn("statusDetails")(function* (cwd) { - yield* refreshStatusUpstreamIfStale(cwd).pipe(Effect.ignoreCause({ log: true })); - - const statusResult = yield* executeGit( - "GitCore.statusDetails.status", - cwd, - ["status", "--porcelain=2", "--branch"], - { - allowNonZeroExit: true, - }, - ); - - if (statusResult.code !== 0) { - const stderr = statusResult.stderr.trim(); - return yield* createGitCommandError( - "GitCore.statusDetails.status", - cwd, - ["status", "--porcelain=2", "--branch"], - stderr || "git status failed", - ); - } - - const [unstagedNumstatStdout, stagedNumstatStdout, defaultRefResult, hasOriginRemote] = - yield* Effect.all( - [ - runGitStdout("GitCore.statusDetails.unstagedNumstat", cwd, ["diff", "--numstat"]), - runGitStdout("GitCore.statusDetails.stagedNumstat", cwd, [ - "diff", - "--cached", - "--numstat", - ]), - executeGit( - "GitCore.statusDetails.defaultRef", - cwd, - ["symbolic-ref", "refs/remotes/origin/HEAD"], - { - allowNonZeroExit: true, - }, - ), - originRemoteExists(cwd).pipe(Effect.catch(() => Effect.succeed(false))), - ], - { concurrency: "unbounded" }, - ); - const statusStdout = statusResult.stdout; - const defaultBranch = - defaultRefResult.code === 0 - ? defaultRefResult.stdout.trim().replace(/^refs\/remotes\/origin\//, "") - : null; - - let branch: string | null = null; - let upstreamRef: string | null = null; - let aheadCount = 0; - let behindCount = 0; - let hasWorkingTreeChanges = false; - const changedFilesWithoutNumstat = new Set(); - - for (const line of statusStdout.split(/\r?\n/g)) { - if (line.startsWith("# branch.head ")) { - const value = line.slice("# branch.head ".length).trim(); - branch = value.startsWith("(") ? null : value; - continue; - } - if (line.startsWith("# branch.upstream ")) { - const value = line.slice("# branch.upstream ".length).trim(); - upstreamRef = value.length > 0 ? value : null; - continue; - } - if (line.startsWith("# branch.ab ")) { - const value = line.slice("# branch.ab ".length).trim(); - const parsed = parseBranchAb(value); - aheadCount = parsed.ahead; - behindCount = parsed.behind; - continue; - } - if (line.trim().length > 0 && !line.startsWith("#")) { - hasWorkingTreeChanges = true; - const pathValue = parsePorcelainPath(line); - if (pathValue) changedFilesWithoutNumstat.add(pathValue); - } - } - - if (!upstreamRef && branch) { - aheadCount = yield* computeAheadCountAgainstBase(cwd, branch).pipe( - Effect.catch(() => Effect.succeed(0)), - ); - behindCount = 0; - } - - const stagedEntries = parseNumstatEntries(stagedNumstatStdout); - const unstagedEntries = parseNumstatEntries(unstagedNumstatStdout); - const fileStatMap = new Map(); - for (const entry of [...stagedEntries, ...unstagedEntries]) { - const existing = fileStatMap.get(entry.path) ?? { insertions: 0, deletions: 0 }; - existing.insertions += entry.insertions; - existing.deletions += entry.deletions; - fileStatMap.set(entry.path, existing); - } - - let insertions = 0; - let deletions = 0; - const files = Array.from(fileStatMap.entries()) - .map(([filePath, stat]) => { - insertions += stat.insertions; - deletions += stat.deletions; - return { path: filePath, insertions: stat.insertions, deletions: stat.deletions }; - }) - .toSorted((a, b) => a.path.localeCompare(b.path)); - - for (const filePath of changedFilesWithoutNumstat) { - if (fileStatMap.has(filePath)) continue; - files.push({ path: filePath, insertions: 0, deletions: 0 }); - } - files.sort((a, b) => a.path.localeCompare(b.path)); - - return { - isRepo: true, - hasOriginRemote, - isDefaultBranch: - branch !== null && - (branch === defaultBranch || - (defaultBranch === null && (branch === "main" || branch === "master"))), - branch, - upstreamRef, - hasWorkingTreeChanges, - workingTree: { - files, - insertions, - deletions, - }, - hasUpstream: upstreamRef !== null, - aheadCount, - behindCount, - }; - }); - - const status: GitCoreShape["status"] = (input) => - statusDetails(input.cwd).pipe( - Effect.map((details) => ({ - isRepo: details.isRepo, - hasOriginRemote: details.hasOriginRemote, - isDefaultBranch: details.isDefaultBranch, - branch: details.branch, - hasWorkingTreeChanges: details.hasWorkingTreeChanges, - workingTree: details.workingTree, - hasUpstream: details.hasUpstream, - aheadCount: details.aheadCount, - behindCount: details.behindCount, - pr: null, - })), - ); - - const prepareCommitContext: GitCoreShape["prepareCommitContext"] = Effect.fn( - "prepareCommitContext", - )(function* (cwd, filePaths) { - if (filePaths && filePaths.length > 0) { - yield* runGit("GitCore.prepareCommitContext.reset", cwd, ["reset"]).pipe( - Effect.catch(() => Effect.void), - ); - yield* runGit("GitCore.prepareCommitContext.addSelected", cwd, [ - "add", - "-A", - "--", - ...filePaths, - ]); - } else { - yield* runGit("GitCore.prepareCommitContext.addAll", cwd, ["add", "-A"]); - } - - const stagedSummary = yield* runGitStdout("GitCore.prepareCommitContext.stagedSummary", cwd, [ - "diff", - "--cached", - "--name-status", - ]).pipe(Effect.map((stdout) => stdout.trim())); - if (stagedSummary.length === 0) { - return null; - } - - const stagedPatch = yield* runGitStdoutWithOptions( - "GitCore.prepareCommitContext.stagedPatch", - cwd, - ["diff", "--cached", "--patch", "--minimal"], - { - maxOutputBytes: PREPARED_COMMIT_PATCH_MAX_OUTPUT_BYTES, - truncateOutputAtMaxBytes: true, - }, - ); - - return { - stagedSummary, - stagedPatch, - }; - }); - - const commit: GitCoreShape["commit"] = Effect.fn("commit")(function* ( - cwd, - subject, - body, - options?: GitCommitOptions, - ) { - const args = ["commit", "-m", subject]; - const trimmedBody = body.trim(); - if (trimmedBody.length > 0) { - args.push("-m", trimmedBody); - } - const progress = - options?.progress?.onOutputLine === undefined - ? options?.progress - : { - ...options.progress, - onStdoutLine: (line: string) => - options.progress?.onOutputLine?.({ stream: "stdout", text: line }) ?? Effect.void, - onStderrLine: (line: string) => - options.progress?.onOutputLine?.({ stream: "stderr", text: line }) ?? Effect.void, - }; - yield* executeGit("GitCore.commit.commit", cwd, args, { - ...(options?.timeoutMs !== undefined ? { timeoutMs: options.timeoutMs } : {}), - ...(progress ? { progress } : {}), - }).pipe(Effect.asVoid); - const commitSha = yield* runGitStdout("GitCore.commit.revParseHead", cwd, [ - "rev-parse", - "HEAD", - ]).pipe(Effect.map((stdout) => stdout.trim())); - - return { commitSha }; - }); - - const pushCurrentBranch: GitCoreShape["pushCurrentBranch"] = Effect.fn("pushCurrentBranch")( - function* (cwd, fallbackBranch) { - const details = yield* statusDetails(cwd); - const branch = details.branch ?? fallbackBranch; - if (!branch) { - return yield* createGitCommandError( - "GitCore.pushCurrentBranch", - cwd, - ["push"], - "Cannot push from detached HEAD.", - ); - } - - const hasNoLocalDelta = details.aheadCount === 0 && details.behindCount === 0; - if (hasNoLocalDelta) { - if (details.hasUpstream) { - return { - status: "skipped_up_to_date" as const, - branch, - ...(details.upstreamRef ? { upstreamBranch: details.upstreamRef } : {}), - }; - } - - const comparableBaseBranch = yield* resolveBaseBranchForNoUpstream(cwd, branch).pipe( - Effect.catch(() => Effect.succeed(null)), - ); - if (comparableBaseBranch) { - const publishRemoteName = yield* resolvePushRemoteName(cwd, branch).pipe( - Effect.catch(() => Effect.succeed(null)), - ); - if (!publishRemoteName) { - return { - status: "skipped_up_to_date" as const, - branch, - }; - } - - const hasRemoteBranch = yield* remoteBranchExists(cwd, publishRemoteName, branch).pipe( - Effect.catch(() => Effect.succeed(false)), - ); - if (hasRemoteBranch) { - return { - status: "skipped_up_to_date" as const, - branch, - }; - } - } - } - - if (!details.hasUpstream) { - const publishRemoteName = yield* resolvePushRemoteName(cwd, branch); - if (!publishRemoteName) { - return yield* createGitCommandError( - "GitCore.pushCurrentBranch", - cwd, - ["push"], - "Cannot push because no git remote is configured for this repository.", - ); - } - yield* runGit("GitCore.pushCurrentBranch.pushWithUpstream", cwd, [ - "push", - "-u", - publishRemoteName, - `HEAD:refs/heads/${branch}`, - ]); - return { - status: "pushed" as const, - branch, - upstreamBranch: `${publishRemoteName}/${branch}`, - setUpstream: true, - }; - } - - const currentUpstream = yield* resolveCurrentUpstream(cwd).pipe( - Effect.catch(() => Effect.succeed(null)), - ); - if (currentUpstream) { - yield* runGit("GitCore.pushCurrentBranch.pushUpstream", cwd, [ - "push", - currentUpstream.remoteName, - `HEAD:${currentUpstream.upstreamBranch}`, - ]); - return { - status: "pushed" as const, - branch, - upstreamBranch: currentUpstream.upstreamRef, - setUpstream: false, - }; - } - - yield* runGit("GitCore.pushCurrentBranch.push", cwd, ["push"]); - return { - status: "pushed" as const, - branch, - ...(details.upstreamRef ? { upstreamBranch: details.upstreamRef } : {}), - setUpstream: false, - }; - }, - ); - - const pullCurrentBranch: GitCoreShape["pullCurrentBranch"] = Effect.fn("pullCurrentBranch")( - function* (cwd) { - const details = yield* statusDetails(cwd); - const branch = details.branch; - if (!branch) { - return yield* createGitCommandError( - "GitCore.pullCurrentBranch", - cwd, - ["pull", "--ff-only"], - "Cannot pull from detached HEAD.", - ); - } - if (!details.hasUpstream) { - return yield* createGitCommandError( - "GitCore.pullCurrentBranch", - cwd, - ["pull", "--ff-only"], - "Current branch has no upstream configured. Push with upstream first.", - ); - } - const beforeSha = yield* runGitStdout( - "GitCore.pullCurrentBranch.beforeSha", - cwd, - ["rev-parse", "HEAD"], - true, - ).pipe(Effect.map((stdout) => stdout.trim())); - yield* executeGit("GitCore.pullCurrentBranch.pull", cwd, ["pull", "--ff-only"], { - timeoutMs: 30_000, - fallbackErrorMessage: "git pull failed", - }); - const afterSha = yield* runGitStdout( - "GitCore.pullCurrentBranch.afterSha", - cwd, - ["rev-parse", "HEAD"], - true, - ).pipe(Effect.map((stdout) => stdout.trim())); - - const refreshed = yield* statusDetails(cwd); - return { - status: beforeSha.length > 0 && beforeSha === afterSha ? "skipped_up_to_date" : "pulled", - branch, - upstreamBranch: refreshed.upstreamRef, - }; - }, - ); - - const readRangeContext: GitCoreShape["readRangeContext"] = Effect.fn("readRangeContext")( - function* (cwd, baseBranch) { - const range = `${baseBranch}..HEAD`; - const [commitSummary, diffSummary, diffPatch] = yield* Effect.all( - [ - runGitStdoutWithOptions( - "GitCore.readRangeContext.log", - cwd, - ["log", "--oneline", range], - { - maxOutputBytes: RANGE_COMMIT_SUMMARY_MAX_OUTPUT_BYTES, - truncateOutputAtMaxBytes: true, - }, - ), - runGitStdoutWithOptions( - "GitCore.readRangeContext.diffStat", - cwd, - ["diff", "--stat", range], - { - maxOutputBytes: RANGE_DIFF_SUMMARY_MAX_OUTPUT_BYTES, - truncateOutputAtMaxBytes: true, - }, - ), - runGitStdoutWithOptions( - "GitCore.readRangeContext.diffPatch", - cwd, - ["diff", "--patch", "--minimal", range], - { - maxOutputBytes: RANGE_DIFF_PATCH_MAX_OUTPUT_BYTES, - truncateOutputAtMaxBytes: true, - }, - ), - ], - { concurrency: "unbounded" }, - ); - - return { - commitSummary, - diffSummary, - diffPatch, - }; - }, - ); - - const readConfigValue: GitCoreShape["readConfigValue"] = (cwd, key) => - runGitStdout("GitCore.readConfigValue", cwd, ["config", "--get", key], true).pipe( - Effect.map((stdout) => stdout.trim()), - Effect.map((trimmed) => (trimmed.length > 0 ? trimmed : null)), - ); - - const isInsideWorkTree: GitCoreShape["isInsideWorkTree"] = (cwd) => - executeGit("GitCore.isInsideWorkTree", cwd, ["rev-parse", "--is-inside-work-tree"], { - allowNonZeroExit: true, - timeoutMs: 5_000, - maxOutputBytes: 4_096, - }).pipe(Effect.map((result) => result.code === 0 && result.stdout.trim() === "true")); - - const listWorkspaceFiles: GitCoreShape["listWorkspaceFiles"] = (cwd) => - executeGit( - "GitCore.listWorkspaceFiles", - cwd, - ["ls-files", "--cached", "--others", "--exclude-standard", "-z"], - { - allowNonZeroExit: true, - timeoutMs: 20_000, - maxOutputBytes: WORKSPACE_FILES_MAX_OUTPUT_BYTES, - truncateOutputAtMaxBytes: true, - }, - ).pipe( - Effect.flatMap((result) => - result.code === 0 - ? Effect.succeed({ - paths: splitNullSeparatedPaths(result.stdout, result.stdoutTruncated), - truncated: result.stdoutTruncated, - }) - : Effect.fail( - createGitCommandError( - "GitCore.listWorkspaceFiles", - cwd, - ["ls-files", "--cached", "--others", "--exclude-standard", "-z"], - result.stderr.trim().length > 0 ? result.stderr.trim() : "git ls-files failed", - ), - ), - ), - ); - - const filterIgnoredPaths: GitCoreShape["filterIgnoredPaths"] = (cwd, relativePaths) => - Effect.gen(function* () { - if (relativePaths.length === 0) { - return relativePaths; - } - - const ignoredPaths = new Set(); - const chunks = chunkPathsForGitCheckIgnore(relativePaths); - - for (const chunk of chunks) { - const result = yield* executeGit( - "GitCore.filterIgnoredPaths", - cwd, - ["check-ignore", "--no-index", "-z", "--stdin"], - { - stdin: `${chunk.join("\0")}\0`, - allowNonZeroExit: true, - timeoutMs: 20_000, - maxOutputBytes: WORKSPACE_FILES_MAX_OUTPUT_BYTES, - truncateOutputAtMaxBytes: true, - }, - ); - - if (result.code !== 0 && result.code !== 1) { - return yield* createGitCommandError( - "GitCore.filterIgnoredPaths", - cwd, - ["check-ignore", "--no-index", "-z", "--stdin"], - result.stderr.trim().length > 0 ? result.stderr.trim() : "git check-ignore failed", - ); - } - - for (const ignoredPath of splitNullSeparatedPaths(result.stdout, result.stdoutTruncated)) { - ignoredPaths.add(ignoredPath); - } - } - - if (ignoredPaths.size === 0) { - return relativePaths; - } - - return relativePaths.filter((relativePath) => !ignoredPaths.has(relativePath)); - }); - - const listBranches: GitCoreShape["listBranches"] = Effect.fn("listBranches")(function* (input) { - const branchRecencyPromise = readBranchRecency(input.cwd).pipe( - Effect.catch(() => Effect.succeed(new Map())), - ); - const localBranchResult = yield* executeGit( - "GitCore.listBranches.branchNoColor", - input.cwd, - ["branch", "--no-color", "--no-column"], - { - timeoutMs: 10_000, - allowNonZeroExit: true, - }, - ); - - if (localBranchResult.code !== 0) { - const stderr = localBranchResult.stderr.trim(); - if (stderr.toLowerCase().includes("not a git repository")) { - return { - branches: [], - isRepo: false, - hasOriginRemote: false, - nextCursor: null, - totalCount: 0, - }; - } - return yield* createGitCommandError( - "GitCore.listBranches", - input.cwd, - ["branch", "--no-color", "--no-column"], - stderr || "git branch failed", - ); - } - - const remoteBranchResultEffect = executeGit( - "GitCore.listBranches.remoteBranches", - input.cwd, - ["branch", "--no-color", "--no-column", "--remotes"], - { - timeoutMs: 10_000, - allowNonZeroExit: true, - }, - ).pipe( - Effect.catch((error) => - Effect.logWarning( - `GitCore.listBranches: remote branch lookup failed for ${input.cwd}: ${error.message}. Falling back to an empty remote branch list.`, - ).pipe(Effect.as({ code: 1, stdout: "", stderr: "" })), - ), - ); - - const remoteNamesResultEffect = executeGit( - "GitCore.listBranches.remoteNames", - input.cwd, - ["remote"], - { - timeoutMs: 5_000, - allowNonZeroExit: true, - }, - ).pipe( - Effect.catch((error) => - Effect.logWarning( - `GitCore.listBranches: remote name lookup failed for ${input.cwd}: ${error.message}. Falling back to an empty remote name list.`, - ).pipe(Effect.as({ code: 1, stdout: "", stderr: "" })), - ), - ); - - const [defaultRef, worktreeList, remoteBranchResult, remoteNamesResult, branchLastCommit] = - yield* Effect.all( - [ - executeGit( - "GitCore.listBranches.defaultRef", - input.cwd, - ["symbolic-ref", "refs/remotes/origin/HEAD"], - { - timeoutMs: 5_000, - allowNonZeroExit: true, - }, - ), - executeGit( - "GitCore.listBranches.worktreeList", - input.cwd, - ["worktree", "list", "--porcelain"], - { - timeoutMs: 5_000, - allowNonZeroExit: true, - }, - ), - remoteBranchResultEffect, - remoteNamesResultEffect, - branchRecencyPromise, - ], - { concurrency: "unbounded" }, - ); - - const remoteNames = - remoteNamesResult.code === 0 ? parseRemoteNames(remoteNamesResult.stdout) : []; - if (remoteBranchResult.code !== 0 && remoteBranchResult.stderr.trim().length > 0) { - yield* Effect.logWarning( - `GitCore.listBranches: remote branch lookup returned code ${remoteBranchResult.code} for ${input.cwd}: ${remoteBranchResult.stderr.trim()}. Falling back to an empty remote branch list.`, - ); - } - if (remoteNamesResult.code !== 0 && remoteNamesResult.stderr.trim().length > 0) { - yield* Effect.logWarning( - `GitCore.listBranches: remote name lookup returned code ${remoteNamesResult.code} for ${input.cwd}: ${remoteNamesResult.stderr.trim()}. Falling back to an empty remote name list.`, - ); - } - - const defaultBranch = - defaultRef.code === 0 - ? defaultRef.stdout.trim().replace(/^refs\/remotes\/origin\//, "") - : null; - - const worktreeMap = new Map(); - if (worktreeList.code === 0) { - let currentPath: string | null = null; - for (const line of worktreeList.stdout.split("\n")) { - if (line.startsWith("worktree ")) { - const candidatePath = line.slice("worktree ".length); - const exists = yield* fileSystem.stat(candidatePath).pipe( - Effect.map(() => true), - Effect.catch(() => Effect.succeed(false)), - ); - currentPath = exists ? candidatePath : null; - } else if (line.startsWith("branch refs/heads/") && currentPath) { - worktreeMap.set(line.slice("branch refs/heads/".length), currentPath); - } else if (line === "") { - currentPath = null; - } - } - } - - const localBranches = localBranchResult.stdout - .split("\n") - .map(parseBranchLine) - .filter((branch): branch is { name: string; current: boolean } => branch !== null) - .map((branch) => ({ - name: branch.name, - current: branch.current, - isRemote: false, - isDefault: branch.name === defaultBranch, - worktreePath: worktreeMap.get(branch.name) ?? null, - })) - .toSorted((a, b) => { - const aPriority = a.current ? 0 : a.isDefault ? 1 : 2; - const bPriority = b.current ? 0 : b.isDefault ? 1 : 2; - if (aPriority !== bPriority) return aPriority - bPriority; - - const aLastCommit = branchLastCommit.get(a.name) ?? 0; - const bLastCommit = branchLastCommit.get(b.name) ?? 0; - if (aLastCommit !== bLastCommit) return bLastCommit - aLastCommit; - return a.name.localeCompare(b.name); - }); - - const remoteBranches = - remoteBranchResult.code === 0 - ? remoteBranchResult.stdout - .split("\n") - .map(parseBranchLine) - .filter((branch): branch is { name: string; current: boolean } => branch !== null) - .map((branch) => { - const parsedRemoteRef = parseRemoteRefWithRemoteNames(branch.name, remoteNames); - const remoteBranch: { - name: string; - current: boolean; - isRemote: boolean; - remoteName?: string; - isDefault: boolean; - worktreePath: string | null; - } = { - name: branch.name, - current: false, - isRemote: true, - isDefault: false, - worktreePath: null, - }; - if (parsedRemoteRef) { - remoteBranch.remoteName = parsedRemoteRef.remoteName; - } - return remoteBranch; - }) - .toSorted((a, b) => { - const aLastCommit = branchLastCommit.get(a.name) ?? 0; - const bLastCommit = branchLastCommit.get(b.name) ?? 0; - if (aLastCommit !== bLastCommit) return bLastCommit - aLastCommit; - return a.name.localeCompare(b.name); - }) - : []; - - const branches = paginateBranches({ - branches: filterBranchesForListQuery( - dedupeRemoteBranchesWithLocalMatches([...localBranches, ...remoteBranches]), - input.query, - ), - cursor: input.cursor, - limit: input.limit, - }); - - return { - branches: [...branches.branches], - isRepo: true, - hasOriginRemote: remoteNames.includes("origin"), - nextCursor: branches.nextCursor, - totalCount: branches.totalCount, - }; - }); - - const createWorktree: GitCoreShape["createWorktree"] = Effect.fn("createWorktree")( - function* (input) { - const targetBranch = input.newBranch ?? input.branch; - const sanitizedBranch = targetBranch.replace(/\//g, "-"); - const repoName = path.basename(input.cwd); - const worktreePath = input.path ?? path.join(worktreesDir, repoName, sanitizedBranch); - const args = input.newBranch - ? ["worktree", "add", "-b", input.newBranch, worktreePath, input.branch] - : ["worktree", "add", worktreePath, input.branch]; - - yield* executeGit("GitCore.createWorktree", input.cwd, args, { - fallbackErrorMessage: "git worktree add failed", - }); - - return { - worktree: { - path: worktreePath, - branch: targetBranch, - }, - }; - }, - ); - - const fetchPullRequestBranch: GitCoreShape["fetchPullRequestBranch"] = Effect.fn( - "fetchPullRequestBranch", - )(function* (input) { - const remoteName = yield* resolvePrimaryRemoteName(input.cwd); - yield* executeGit( - "GitCore.fetchPullRequestBranch", - input.cwd, - [ - "fetch", - "--quiet", - "--no-tags", - remoteName, - `+refs/pull/${input.prNumber}/head:refs/heads/${input.branch}`, - ], - { - fallbackErrorMessage: "git fetch pull request branch failed", - }, - ); - }); - - const fetchRemoteBranch: GitCoreShape["fetchRemoteBranch"] = Effect.fn("fetchRemoteBranch")( - function* (input) { - yield* runGit("GitCore.fetchRemoteBranch.fetch", input.cwd, [ - "fetch", - "--quiet", - "--no-tags", - input.remoteName, - `+refs/heads/${input.remoteBranch}:refs/remotes/${input.remoteName}/${input.remoteBranch}`, - ]); - - const localBranchAlreadyExists = yield* branchExists(input.cwd, input.localBranch); - const targetRef = `${input.remoteName}/${input.remoteBranch}`; - yield* runGit( - "GitCore.fetchRemoteBranch.materialize", - input.cwd, - localBranchAlreadyExists - ? ["branch", "--force", input.localBranch, targetRef] - : ["branch", input.localBranch, targetRef], - ); - }, - ); - - const setBranchUpstream: GitCoreShape["setBranchUpstream"] = (input) => - runGit("GitCore.setBranchUpstream", input.cwd, [ - "branch", - "--set-upstream-to", - `${input.remoteName}/${input.remoteBranch}`, - input.branch, - ]); - - const removeWorktree: GitCoreShape["removeWorktree"] = Effect.fn("removeWorktree")( - function* (input) { - const args = ["worktree", "remove"]; - if (input.force) { - args.push("--force"); - } - args.push(input.path); - yield* executeGit("GitCore.removeWorktree", input.cwd, args, { - timeoutMs: 15_000, - fallbackErrorMessage: "git worktree remove failed", - }).pipe( - Effect.mapError((error) => - createGitCommandError( - "GitCore.removeWorktree", - input.cwd, - args, - `${commandLabel(args)} failed (cwd: ${input.cwd}): ${error instanceof Error ? error.message : String(error)}`, - error, - ), - ), - ); - }, - ); - - const renameBranch: GitCoreShape["renameBranch"] = Effect.fn("renameBranch")(function* (input) { - if (input.oldBranch === input.newBranch) { - return { branch: input.newBranch }; - } - const targetBranch = yield* resolveAvailableBranchName(input.cwd, input.newBranch); - - yield* executeGit( - "GitCore.renameBranch", - input.cwd, - ["branch", "-m", "--", input.oldBranch, targetBranch], - { - timeoutMs: 10_000, - fallbackErrorMessage: "git branch rename failed", - }, - ); - - return { branch: targetBranch }; - }); - - const createBranch: GitCoreShape["createBranch"] = (input) => - executeGit("GitCore.createBranch", input.cwd, ["branch", input.branch], { - timeoutMs: 10_000, - fallbackErrorMessage: "git branch create failed", - }).pipe(Effect.asVoid); - - const checkoutBranch: GitCoreShape["checkoutBranch"] = Effect.fn("checkoutBranch")( - function* (input) { - const [localInputExists, remoteExists] = yield* Effect.all( - [ - executeGit( - "GitCore.checkoutBranch.localInputExists", - input.cwd, - ["show-ref", "--verify", "--quiet", `refs/heads/${input.branch}`], - { - timeoutMs: 5_000, - allowNonZeroExit: true, - }, - ).pipe(Effect.map((result) => result.code === 0)), - executeGit( - "GitCore.checkoutBranch.remoteExists", - input.cwd, - ["show-ref", "--verify", "--quiet", `refs/remotes/${input.branch}`], - { - timeoutMs: 5_000, - allowNonZeroExit: true, - }, - ).pipe(Effect.map((result) => result.code === 0)), - ], - { concurrency: "unbounded" }, - ); - - const localTrackingBranch = remoteExists - ? yield* executeGit( - "GitCore.checkoutBranch.localTrackingBranch", - input.cwd, - ["for-each-ref", "--format=%(refname:short)\t%(upstream:short)", "refs/heads"], - { - timeoutMs: 5_000, - allowNonZeroExit: true, - }, - ).pipe( - Effect.map((result) => - result.code === 0 - ? parseTrackingBranchByUpstreamRef(result.stdout, input.branch) - : null, - ), - ) - : null; - - const localTrackedBranchCandidate = deriveLocalBranchNameFromRemoteRef(input.branch); - const localTrackedBranchTargetExists = - remoteExists && localTrackedBranchCandidate - ? yield* executeGit( - "GitCore.checkoutBranch.localTrackedBranchTargetExists", - input.cwd, - ["show-ref", "--verify", "--quiet", `refs/heads/${localTrackedBranchCandidate}`], - { - timeoutMs: 5_000, - allowNonZeroExit: true, - }, - ).pipe(Effect.map((result) => result.code === 0)) - : false; - - const checkoutArgs = localInputExists - ? ["checkout", input.branch] - : remoteExists && !localTrackingBranch && localTrackedBranchTargetExists - ? ["checkout", input.branch] - : remoteExists && !localTrackingBranch - ? ["checkout", "--track", input.branch] - : remoteExists && localTrackingBranch - ? ["checkout", localTrackingBranch] - : ["checkout", input.branch]; - - yield* executeGit("GitCore.checkoutBranch.checkout", input.cwd, checkoutArgs, { - timeoutMs: 10_000, - fallbackErrorMessage: "git checkout failed", - }); - }, - ); - - const initRepo: GitCoreShape["initRepo"] = (input) => - executeGit("GitCore.initRepo", input.cwd, ["init"], { - timeoutMs: 10_000, - fallbackErrorMessage: "git init failed", - }).pipe(Effect.asVoid); - - const listLocalBranchNames: GitCoreShape["listLocalBranchNames"] = (cwd) => - runGitStdout("GitCore.listLocalBranchNames", cwd, [ - "branch", - "--list", - "--no-column", - "--format=%(refname:short)", - ]).pipe( - Effect.map((stdout) => - stdout - .split("\n") - .map((line) => line.trim()) - .filter((line) => line.length > 0), - ), - ); - - return { - execute, - status, - statusDetails, - prepareCommitContext, - commit, - pushCurrentBranch, - pullCurrentBranch, - readRangeContext, - readConfigValue, - isInsideWorkTree, - listWorkspaceFiles, - filterIgnoredPaths, - listBranches, - createWorktree, - fetchPullRequestBranch, - ensureRemote, - fetchRemoteBranch, - setBranchUpstream, - removeWorktree, - renameBranch, - createBranch, - checkoutBranch, - initRepo, - listLocalBranchNames, - } satisfies GitCoreShape; -}); +import { GitCore } from "../Services/GitCore.ts"; +import { makeGitCore } from "./GitCore.shared.ts"; export const GitCoreLive = Layer.effect(GitCore, makeGitCore()); diff --git a/apps/server/src/git/Layers/GitManager.test.ts b/apps/server/src/git/Layers/GitManager.test.ts index 8fcac582ff..44b7bc7821 100644 --- a/apps/server/src/git/Layers/GitManager.test.ts +++ b/apps/server/src/git/Layers/GitManager.test.ts @@ -18,7 +18,8 @@ import { import { type TextGenerationShape, TextGeneration } from "../Services/TextGeneration.ts"; import { GitCoreLive } from "./GitCore.ts"; import { GitCore } from "../Services/GitCore.ts"; -import { makeGitManager } from "./GitManager.ts"; +import { GitManagerLive } from "./GitManager.ts"; +import { GitManager } from "../Services/GitManager.ts"; import { ServerConfig } from "../../config.ts"; import { ServerSettingsService } from "../../serverSettings.ts"; @@ -622,10 +623,10 @@ function makeManager(input?: { serverSettingsLayer, ).pipe(Layer.provideMerge(NodeServices.layer)); - return makeGitManager().pipe( - Effect.provide(managerLayer), - Effect.map((manager) => ({ manager, ghCalls })), - ); + return Effect.gen(function* () { + const manager = yield* GitManager; + return { manager, ghCalls }; + }).pipe(Effect.provide(Layer.provideMerge(GitManagerLive, managerLayer))); } const GitManagerTestLayer = GitCoreLive.pipe( diff --git a/apps/server/src/git/Layers/GitManager.ts b/apps/server/src/git/Layers/GitManager.ts index abc4985abd..8275bcab66 100644 --- a/apps/server/src/git/Layers/GitManager.ts +++ b/apps/server/src/git/Layers/GitManager.ts @@ -548,7 +548,7 @@ function toPullRequestHeadRemoteInfo(pr: { }; } -export const makeGitManager = Effect.fn("makeGitManager")(function* () { +const makeGitManager = Effect.fn("makeGitManager")(function* () { const gitCore = yield* GitCore; const gitHubCli = yield* GitHubCli; const textGeneration = yield* TextGeneration; diff --git a/apps/server/src/git/Prompts.ts b/apps/server/src/git/Prompts.ts index 4092358825..e5557b9587 100644 --- a/apps/server/src/git/Prompts.ts +++ b/apps/server/src/git/Prompts.ts @@ -15,7 +15,7 @@ import { limitSection } from "./Utils.ts"; // Commit message // --------------------------------------------------------------------------- -export interface CommitMessagePromptInput { +interface CommitMessagePromptInput { branch: string | null; stagedSummary: string; stagedPatch: string; @@ -71,7 +71,7 @@ export function buildCommitMessagePrompt(input: CommitMessagePromptInput) { // PR content // --------------------------------------------------------------------------- -export interface PrContentPromptInput { +interface PrContentPromptInput { baseBranch: string; headBranch: string; commitSummary: string; @@ -114,7 +114,7 @@ export function buildPrContentPrompt(input: PrContentPromptInput) { // Branch name // --------------------------------------------------------------------------- -export interface BranchNamePromptInput { +interface BranchNamePromptInput { message: string; attachments?: ReadonlyArray | undefined; } @@ -176,7 +176,7 @@ export function buildBranchNamePrompt(input: BranchNamePromptInput) { // Thread title // --------------------------------------------------------------------------- -export interface ThreadTitlePromptInput { +interface ThreadTitlePromptInput { message: string; attachments?: ReadonlyArray | undefined; } diff --git a/apps/server/src/git/Services/TextGeneration.ts b/apps/server/src/git/Services/TextGeneration.ts index f4354c7a99..6a356a4c70 100644 --- a/apps/server/src/git/Services/TextGeneration.ts +++ b/apps/server/src/git/Services/TextGeneration.ts @@ -72,16 +72,6 @@ export interface ThreadTitleGenerationInput { export interface ThreadTitleGenerationResult { title: string; } - -export interface TextGenerationService { - generateCommitMessage( - input: CommitMessageGenerationInput, - ): Promise; - generatePrContent(input: PrContentGenerationInput): Promise; - generateBranchName(input: BranchNameGenerationInput): Promise; - generateThreadTitle(input: ThreadTitleGenerationInput): Promise; -} - /** * TextGenerationShape - Service API for commit/PR text generation. */ diff --git a/apps/server/src/imageMime.ts b/apps/server/src/imageMime.ts index 814abbb32c..3c8b0376dd 100644 --- a/apps/server/src/imageMime.ts +++ b/apps/server/src/imageMime.ts @@ -1,6 +1,6 @@ import Mime from "@effect/platform-node/Mime"; -export const IMAGE_EXTENSION_BY_MIME_TYPE: Record = { +const IMAGE_EXTENSION_BY_MIME_TYPE: Record = { "image/avif": ".avif", "image/bmp": ".bmp", "image/gif": ".gif", diff --git a/apps/server/src/keybindings.logic.ts b/apps/server/src/keybindings.logic.ts new file mode 100644 index 0000000000..95272c1564 --- /dev/null +++ b/apps/server/src/keybindings.logic.ts @@ -0,0 +1,342 @@ +import { + KeybindingRule, + KeybindingShortcut, + KeybindingsConfig, + KeybindingWhenNode, + MAX_WHEN_EXPRESSION_DEPTH, + ResolvedKeybindingRule, + ResolvedKeybindingsConfig, + THREAD_JUMP_KEYBINDING_COMMANDS, +} from "@t3tools/contracts"; +import { Effect, Option, Predicate, Schema, SchemaIssue, SchemaTransformation } from "effect"; +import { Mutable } from "effect/Types"; + +type WhenToken = + | { type: "identifier"; value: string } + | { type: "not" } + | { type: "and" } + | { type: "or" } + | { type: "lparen" } + | { type: "rparen" }; + +export const DEFAULT_KEYBINDINGS: ReadonlyArray = [ + { key: "mod+j", command: "terminal.toggle" }, + { key: "mod+d", command: "terminal.split", when: "terminalFocus" }, + { key: "mod+n", command: "terminal.new", when: "terminalFocus" }, + { key: "mod+w", command: "terminal.close", when: "terminalFocus" }, + { key: "mod+d", command: "diff.toggle", when: "!terminalFocus" }, + { key: "mod+n", command: "chat.new", when: "!terminalFocus" }, + { key: "mod+shift+o", command: "chat.new", when: "!terminalFocus" }, + { key: "mod+shift+n", command: "chat.newLocal", when: "!terminalFocus" }, + { key: "mod+o", command: "editor.openFavorite" }, + { key: "mod+shift+[", command: "thread.previous" }, + { key: "mod+shift+]", command: "thread.next" }, + ...THREAD_JUMP_KEYBINDING_COMMANDS.map((command, index) => ({ + key: `mod+${index + 1}`, + command, + })), +]; + +function normalizeKeyToken(token: string): string { + if (token === "space") return " "; + if (token === "esc") return "escape"; + return token; +} + +export function parseKeybindingShortcut(value: string): KeybindingShortcut | null { + const trimmedValue = value.trim(); + if (trimmedValue.length === 0) return null; + + const rawTokens = trimmedValue + .toLowerCase() + .split("+") + .map((token) => token.trim()); + const tokens = [...rawTokens]; + let trailingEmptyCount = 0; + while (tokens[tokens.length - 1] === "") { + trailingEmptyCount += 1; + tokens.pop(); + } + if (trailingEmptyCount > 0) { + tokens.push("+"); + } + if (tokens.some((token) => token.length === 0)) { + return null; + } + if (tokens.length === 0) return null; + + let key: string | null = null; + let metaKey = false; + let ctrlKey = false; + let shiftKey = false; + let altKey = false; + let modKey = false; + + for (const token of tokens) { + switch (token) { + case "cmd": + case "meta": + metaKey = true; + break; + case "ctrl": + case "control": + ctrlKey = true; + break; + case "shift": + shiftKey = true; + break; + case "alt": + case "option": + altKey = true; + break; + case "mod": + modKey = true; + break; + default: { + if (key !== null) return null; + key = normalizeKeyToken(token); + } + } + } + + if (key === null) return null; + return { + key, + metaKey, + ctrlKey, + shiftKey, + altKey, + modKey, + }; +} + +function tokenizeWhenExpression(expression: string): WhenToken[] | null { + const tokens: WhenToken[] = []; + let index = 0; + + while (index < expression.length) { + const current = expression[index]; + if (!current) break; + + if (/\s/.test(current)) { + index += 1; + continue; + } + if (expression.startsWith("&&", index)) { + tokens.push({ type: "and" }); + index += 2; + continue; + } + if (expression.startsWith("||", index)) { + tokens.push({ type: "or" }); + index += 2; + continue; + } + if (current === "!") { + tokens.push({ type: "not" }); + index += 1; + continue; + } + if (current === "(") { + tokens.push({ type: "lparen" }); + index += 1; + continue; + } + if (current === ")") { + tokens.push({ type: "rparen" }); + index += 1; + continue; + } + + const identifier = /^[A-Za-z_][A-Za-z0-9_.-]*/.exec(expression.slice(index)); + if (!identifier) { + return null; + } + tokens.push({ type: "identifier", value: identifier[0] }); + index += identifier[0].length; + } + + return tokens; +} + +function parseKeybindingWhenExpression(expression: string): KeybindingWhenNode | null { + const tokens = tokenizeWhenExpression(expression); + if (!tokens || tokens.length === 0) return null; + let index = 0; + + const parsePrimary = (depth: number): KeybindingWhenNode | null => { + if (depth > MAX_WHEN_EXPRESSION_DEPTH) { + return null; + } + const token = tokens[index]; + if (!token) return null; + + if (token.type === "identifier") { + index += 1; + return { type: "identifier", name: token.value }; + } + + if (token.type === "lparen") { + index += 1; + const expressionNode = parseOr(depth + 1); + const closeToken = tokens[index]; + if (!expressionNode || !closeToken || closeToken.type !== "rparen") { + return null; + } + index += 1; + return expressionNode; + } + + return null; + }; + + const parseUnary = (depth: number): KeybindingWhenNode | null => { + let notCount = 0; + while (tokens[index]?.type === "not") { + index += 1; + notCount += 1; + if (notCount > MAX_WHEN_EXPRESSION_DEPTH) { + return null; + } + } + + let node = parsePrimary(depth); + if (!node) return null; + + while (notCount > 0) { + node = { type: "not", node }; + notCount -= 1; + } + + return node; + }; + + const parseAnd = (depth: number): KeybindingWhenNode | null => { + let left = parseUnary(depth); + if (!left) return null; + + while (tokens[index]?.type === "and") { + index += 1; + const right = parseUnary(depth); + if (!right) return null; + left = { type: "and", left, right }; + } + + return left; + }; + + const parseOr = (depth: number): KeybindingWhenNode | null => { + let left = parseAnd(depth); + if (!left) return null; + + while (tokens[index]?.type === "or") { + index += 1; + const right = parseAnd(depth); + if (!right) return null; + left = { type: "or", left, right }; + } + + return left; + }; + + const ast = parseOr(0); + if (!ast || index !== tokens.length) return null; + return ast; +} + +function compileResolvedKeybindingRule(rule: KeybindingRule): ResolvedKeybindingRule | null { + const shortcut = parseKeybindingShortcut(rule.key); + if (!shortcut) return null; + + if (rule.when !== undefined) { + const whenAst = parseKeybindingWhenExpression(rule.when); + if (!whenAst) return null; + return { + command: rule.command, + shortcut, + whenAst, + }; + } + + return { + command: rule.command, + shortcut, + }; +} + +export function compileResolvedKeybindingsConfig( + config: KeybindingsConfig, +): ResolvedKeybindingsConfig { + const compiled: Mutable = []; + for (const rule of config) { + const result = Schema.decodeExit(ResolvedKeybindingFromConfig)(rule); + if (result._tag === "Success") { + compiled.push(result.value); + } + } + return compiled; +} + +export const encodeShortcut = (shortcut: KeybindingShortcut): string | null => { + const modifiers: string[] = []; + if (shortcut.modKey) modifiers.push("mod"); + if (shortcut.metaKey) modifiers.push("meta"); + if (shortcut.ctrlKey) modifiers.push("ctrl"); + if (shortcut.altKey) modifiers.push("alt"); + if (shortcut.shiftKey) modifiers.push("shift"); + if (!shortcut.key) return null; + if (shortcut.key !== "+" && shortcut.key.includes("+")) return null; + const key = shortcut.key === " " ? "space" : shortcut.key; + return [...modifiers, key].join("+"); +}; + +function encodeWhenAst(node: KeybindingWhenNode): string { + switch (node.type) { + case "identifier": + return node.name; + case "not": + return `!(${encodeWhenAst(node.node)})`; + case "and": + return `(${encodeWhenAst(node.left)} && ${encodeWhenAst(node.right)})`; + case "or": + return `(${encodeWhenAst(node.left)} || ${encodeWhenAst(node.right)})`; + } +} + +export const ResolvedKeybindingFromConfig = KeybindingRule.pipe( + Schema.decodeTo( + Schema.toType(ResolvedKeybindingRule), + SchemaTransformation.transformOrFail({ + decode: (rule) => + Effect.succeed(compileResolvedKeybindingRule(rule)).pipe( + Effect.filterOrFail( + Predicate.isNotNull, + () => + new SchemaIssue.InvalidValue(Option.some(rule), { + title: "Invalid keybinding rule", + }), + ), + Effect.map((resolved) => resolved), + ), + + encode: (resolved) => + Effect.gen(function* () { + const key = encodeShortcut(resolved.shortcut); + if (!key) { + return yield* Effect.fail( + new SchemaIssue.InvalidValue(Option.some(resolved), { + title: "Resolved shortcut cannot be encoded to key string", + }), + ); + } + + const when = resolved.whenAst ? encodeWhenAst(resolved.whenAst) : undefined; + return { + key, + command: resolved.command, + when, + }; + }), + }), + ), +); diff --git a/apps/server/src/keybindings.test.ts b/apps/server/src/keybindings.test.ts index 8eda0ca85d..73f334ebca 100644 --- a/apps/server/src/keybindings.test.ts +++ b/apps/server/src/keybindings.test.ts @@ -1,20 +1,22 @@ -import { KeybindingCommand, KeybindingRule, KeybindingsConfig } from "@t3tools/contracts"; +import { + KeybindingCommand, + KeybindingRule, + KeybindingsConfig, + KeybindingsConfigError, +} from "@t3tools/contracts"; import * as NodeServices from "@effect/platform-node/NodeServices"; import { assert, it } from "@effect/vitest"; import { assertFailure } from "@effect/vitest/utils"; -import { Effect, FileSystem, Layer, Logger, Path, Schema } from "effect"; +import { Effect, FileSystem, Layer, Logger, Option, Path, Schema } from "effect"; import { ServerConfig } from "./config"; import { DEFAULT_KEYBINDINGS, - Keybindings, - KeybindingsLive, ResolvedKeybindingFromConfig, - compileResolvedKeybindingRule, compileResolvedKeybindingsConfig, parseKeybindingShortcut, -} from "./keybindings"; -import { KeybindingsConfigError } from "@t3tools/contracts"; +} from "./keybindings.logic"; +import { Keybindings, KeybindingsLive } from "./keybindings"; const KeybindingsConfigJson = Schema.fromJsonString(KeybindingsConfig); const makeKeybindingsLayer = () => { @@ -35,6 +37,9 @@ const toDetailResult = (effect: Effect.Effect + Option.getOrNull(Schema.decodeUnknownOption(ResolvedKeybindingFromConfig)(rule)); + const writeKeybindingsConfig = (configPath: string, rules: readonly KeybindingRule[]) => Effect.gen(function* () { const fileSystem = yield* FileSystem.FileSystem; @@ -70,6 +75,7 @@ it.layer(NodeServices.layer)("keybindings", (it) => { altKey: false, modKey: true, }); + assert.deepEqual(parseKeybindingShortcut(" "), null); }), ); diff --git a/apps/server/src/keybindings.ts b/apps/server/src/keybindings.ts index 086d795c0c..113a6654f5 100644 --- a/apps/server/src/keybindings.ts +++ b/apps/server/src/keybindings.ts @@ -10,16 +10,10 @@ import { KeybindingRule, KeybindingsConfig, KeybindingsConfigError, - KeybindingShortcut, - KeybindingWhenNode, MAX_KEYBINDINGS_COUNT, - MAX_WHEN_EXPRESSION_DEPTH, - ResolvedKeybindingRule, ResolvedKeybindingsConfig, - THREAD_JUMP_KEYBINDING_COMMANDS, type ServerConfigIssue, } from "@t3tools/contracts"; -import { Mutable } from "effect/Types"; import { Array, Cache, @@ -31,13 +25,10 @@ import { FileSystem, Path, Layer, - Option, Predicate, PubSub, Schema, SchemaGetter, - SchemaIssue, - SchemaTransformation, Ref, ServiceMap, Scope, @@ -46,313 +37,13 @@ import { import * as Semaphore from "effect/Semaphore"; import { ServerConfig } from "./config"; import { fromLenientJson } from "@t3tools/shared/schemaJson"; - -type WhenToken = - | { type: "identifier"; value: string } - | { type: "not" } - | { type: "and" } - | { type: "or" } - | { type: "lparen" } - | { type: "rparen" }; - -export const DEFAULT_KEYBINDINGS: ReadonlyArray = [ - { key: "mod+j", command: "terminal.toggle" }, - { key: "mod+d", command: "terminal.split", when: "terminalFocus" }, - { key: "mod+n", command: "terminal.new", when: "terminalFocus" }, - { key: "mod+w", command: "terminal.close", when: "terminalFocus" }, - { key: "mod+d", command: "diff.toggle", when: "!terminalFocus" }, - { key: "mod+n", command: "chat.new", when: "!terminalFocus" }, - { key: "mod+shift+o", command: "chat.new", when: "!terminalFocus" }, - { key: "mod+shift+n", command: "chat.newLocal", when: "!terminalFocus" }, - { key: "mod+o", command: "editor.openFavorite" }, - { key: "mod+shift+[", command: "thread.previous" }, - { key: "mod+shift+]", command: "thread.next" }, - ...THREAD_JUMP_KEYBINDING_COMMANDS.map((command, index) => ({ - key: `mod+${index + 1}`, - command, - })), -]; - -function normalizeKeyToken(token: string): string { - if (token === "space") return " "; - if (token === "esc") return "escape"; - return token; -} - -/** @internal - Exported for testing */ -export function parseKeybindingShortcut(value: string): KeybindingShortcut | null { - const rawTokens = value - .toLowerCase() - .split("+") - .map((token) => token.trim()); - const tokens = [...rawTokens]; - let trailingEmptyCount = 0; - while (tokens[tokens.length - 1] === "") { - trailingEmptyCount += 1; - tokens.pop(); - } - if (trailingEmptyCount > 0) { - tokens.push("+"); - } - if (tokens.some((token) => token.length === 0)) { - return null; - } - if (tokens.length === 0) return null; - - let key: string | null = null; - let metaKey = false; - let ctrlKey = false; - let shiftKey = false; - let altKey = false; - let modKey = false; - - for (const token of tokens) { - switch (token) { - case "cmd": - case "meta": - metaKey = true; - break; - case "ctrl": - case "control": - ctrlKey = true; - break; - case "shift": - shiftKey = true; - break; - case "alt": - case "option": - altKey = true; - break; - case "mod": - modKey = true; - break; - default: { - if (key !== null) return null; - key = normalizeKeyToken(token); - } - } - } - - if (key === null) return null; - return { - key, - metaKey, - ctrlKey, - shiftKey, - altKey, - modKey, - }; -} - -function tokenizeWhenExpression(expression: string): WhenToken[] | null { - const tokens: WhenToken[] = []; - let index = 0; - - while (index < expression.length) { - const current = expression[index]; - if (!current) break; - - if (/\s/.test(current)) { - index += 1; - continue; - } - if (expression.startsWith("&&", index)) { - tokens.push({ type: "and" }); - index += 2; - continue; - } - if (expression.startsWith("||", index)) { - tokens.push({ type: "or" }); - index += 2; - continue; - } - if (current === "!") { - tokens.push({ type: "not" }); - index += 1; - continue; - } - if (current === "(") { - tokens.push({ type: "lparen" }); - index += 1; - continue; - } - if (current === ")") { - tokens.push({ type: "rparen" }); - index += 1; - continue; - } - - const identifier = /^[A-Za-z_][A-Za-z0-9_.-]*/.exec(expression.slice(index)); - if (!identifier) { - return null; - } - tokens.push({ type: "identifier", value: identifier[0] }); - index += identifier[0].length; - } - - return tokens; -} - -function parseKeybindingWhenExpression(expression: string): KeybindingWhenNode | null { - const tokens = tokenizeWhenExpression(expression); - if (!tokens || tokens.length === 0) return null; - let index = 0; - - const parsePrimary = (depth: number): KeybindingWhenNode | null => { - if (depth > MAX_WHEN_EXPRESSION_DEPTH) { - return null; - } - const token = tokens[index]; - if (!token) return null; - - if (token.type === "identifier") { - index += 1; - return { type: "identifier", name: token.value }; - } - - if (token.type === "lparen") { - index += 1; - const expressionNode = parseOr(depth + 1); - const closeToken = tokens[index]; - if (!expressionNode || !closeToken || closeToken.type !== "rparen") { - return null; - } - index += 1; - return expressionNode; - } - - return null; - }; - - const parseUnary = (depth: number): KeybindingWhenNode | null => { - let notCount = 0; - while (tokens[index]?.type === "not") { - index += 1; - notCount += 1; - if (notCount > MAX_WHEN_EXPRESSION_DEPTH) { - return null; - } - } - - let node = parsePrimary(depth); - if (!node) return null; - - while (notCount > 0) { - node = { type: "not", node }; - notCount -= 1; - } - - return node; - }; - - const parseAnd = (depth: number): KeybindingWhenNode | null => { - let left = parseUnary(depth); - if (!left) return null; - - while (tokens[index]?.type === "and") { - index += 1; - const right = parseUnary(depth); - if (!right) return null; - left = { type: "and", left, right }; - } - - return left; - }; - - const parseOr = (depth: number): KeybindingWhenNode | null => { - let left = parseAnd(depth); - if (!left) return null; - - while (tokens[index]?.type === "or") { - index += 1; - const right = parseAnd(depth); - if (!right) return null; - left = { type: "or", left, right }; - } - - return left; - }; - - const ast = parseOr(0); - if (!ast || index !== tokens.length) return null; - return ast; -} - -/** @internal - Exported for testing */ -export function compileResolvedKeybindingRule(rule: KeybindingRule): ResolvedKeybindingRule | null { - const shortcut = parseKeybindingShortcut(rule.key); - if (!shortcut) return null; - - if (rule.when !== undefined) { - const whenAst = parseKeybindingWhenExpression(rule.when); - if (!whenAst) return null; - return { - command: rule.command, - shortcut, - whenAst, - }; - } - - return { - command: rule.command, - shortcut, - }; -} - -export function compileResolvedKeybindingsConfig( - config: KeybindingsConfig, -): ResolvedKeybindingsConfig { - const compiled: Mutable = []; - for (const rule of config) { - const result = Schema.decodeExit(ResolvedKeybindingFromConfig)(rule); - if (result._tag === "Success") { - compiled.push(result.value); - } - } - return compiled; -} - -export const ResolvedKeybindingFromConfig = KeybindingRule.pipe( - Schema.decodeTo( - Schema.toType(ResolvedKeybindingRule), - SchemaTransformation.transformOrFail({ - decode: (rule) => - Effect.succeed(compileResolvedKeybindingRule(rule)).pipe( - Effect.filterOrFail( - Predicate.isNotNull, - () => - new SchemaIssue.InvalidValue(Option.some(rule), { - title: "Invalid keybinding rule", - }), - ), - Effect.map((resolved) => resolved), - ), - - encode: (resolved) => - Effect.gen(function* () { - const key = encodeShortcut(resolved.shortcut); - if (!key) { - return yield* Effect.fail( - new SchemaIssue.InvalidValue(Option.some(resolved), { - title: "Resolved shortcut cannot be encoded to key string", - }), - ); - } - - const when = resolved.whenAst ? encodeWhenAst(resolved.whenAst) : undefined; - return { - key, - command: resolved.command, - when, - }; - }), - }), - ), -); - -export const ResolvedKeybindingsFromConfig = Schema.Array(ResolvedKeybindingFromConfig).check( - Schema.isMaxLength(MAX_KEYBINDINGS_COUNT), -); +import { + compileResolvedKeybindingsConfig, + DEFAULT_KEYBINDINGS, + encodeShortcut, + parseKeybindingShortcut, + ResolvedKeybindingFromConfig, +} from "./keybindings.logic"; function isSameKeybindingRule(left: KeybindingRule, right: KeybindingRule): boolean { return ( @@ -377,32 +68,6 @@ function hasSameShortcutContext(left: KeybindingRule, right: KeybindingRule): bo return leftContext === rightContext; } -function encodeShortcut(shortcut: KeybindingShortcut): string | null { - const modifiers: string[] = []; - if (shortcut.modKey) modifiers.push("mod"); - if (shortcut.metaKey) modifiers.push("meta"); - if (shortcut.ctrlKey) modifiers.push("ctrl"); - if (shortcut.altKey) modifiers.push("alt"); - if (shortcut.shiftKey) modifiers.push("shift"); - if (!shortcut.key) return null; - if (shortcut.key !== "+" && shortcut.key.includes("+")) return null; - const key = shortcut.key === " " ? "space" : shortcut.key; - return [...modifiers, key].join("+"); -} - -function encodeWhenAst(node: KeybindingWhenNode): string { - switch (node.type) { - case "identifier": - return node.name; - case "not": - return `!(${encodeWhenAst(node.node)})`; - case "and": - return `(${encodeWhenAst(node.left)} && ${encodeWhenAst(node.right)})`; - case "or": - return `(${encodeWhenAst(node.left)} || ${encodeWhenAst(node.right)})`; - } -} - const DEFAULT_RESOLVED_KEYBINDINGS = compileResolvedKeybindingsConfig(DEFAULT_KEYBINDINGS); const RawKeybindingsEntries = fromLenientJson(Schema.Array(Schema.Unknown)); @@ -417,12 +82,12 @@ const KeybindingsConfigPrettyJson = KeybindingsConfigJson.pipe( }), ); -export interface KeybindingsConfigState { +interface KeybindingsConfigState { readonly keybindings: ResolvedKeybindingsConfig; readonly issues: readonly ServerConfigIssue[]; } -export interface KeybindingsChangeEvent { +interface KeybindingsChangeEvent { readonly keybindings: ResolvedKeybindingsConfig; readonly issues: readonly ServerConfigIssue[]; } @@ -469,7 +134,7 @@ function mergeWithDefaultKeybindings(custom: ResolvedKeybindingsConfig): Resolve /** * KeybindingsShape - Service API for keybinding configuration operations. */ -export interface KeybindingsShape { +interface KeybindingsShape { /** * Start the keybindings runtime and attach file watching. * diff --git a/apps/server/src/observability/Attributes.test.ts b/apps/server/src/observability/Attributes.test.ts deleted file mode 100644 index 4b495598ea..0000000000 --- a/apps/server/src/observability/Attributes.test.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { assert, describe, it } from "@effect/vitest"; - -import { compactTraceAttributes, normalizeModelMetricLabel } from "./Attributes.ts"; - -describe("Attributes", () => { - it("normalizes circular arrays, maps, and sets without recursing forever", () => { - const array: Array = ["alpha"]; - array.push(array); - - const map = new Map(); - map.set("self", map); - - const set = new Set(); - set.add(set); - - assert.deepStrictEqual( - compactTraceAttributes({ - array, - map, - set, - }), - { - array: ["alpha", "[Circular]"], - map: { self: "[Circular]" }, - set: ["[Circular]"], - }, - ); - }); - - it("normalizes invalid dates without throwing", () => { - assert.deepStrictEqual( - compactTraceAttributes({ - invalidDate: new Date("not-a-real-date"), - }), - { - invalidDate: "Invalid Date", - }, - ); - }); - - it("groups GPT-family models under a shared metric label", () => { - assert.strictEqual(normalizeModelMetricLabel("gpt-4o"), "gpt"); - assert.strictEqual(normalizeModelMetricLabel("gpt-5.4"), "gpt"); - assert.strictEqual(normalizeModelMetricLabel("claude-sonnet-4"), "claude"); - }); -}); diff --git a/apps/server/src/observability/Attributes.ts b/apps/server/src/observability/Attributes.ts index 2251fcfea6..c597b44279 100644 --- a/apps/server/src/observability/Attributes.ts +++ b/apps/server/src/observability/Attributes.ts @@ -1,88 +1,7 @@ import { Cause, Exit } from "effect"; -export type MetricAttributeValue = string; -export type MetricAttributes = Readonly>; -export type TraceAttributes = Readonly>; -export type ObservabilityOutcome = "success" | "failure" | "interrupt"; - -function isPlainObject(value: unknown): value is Record { - return typeof value === "object" && value !== null && !Array.isArray(value); -} - -function markSeen(value: object, seen: WeakSet): boolean { - if (seen.has(value)) { - return true; - } - seen.add(value); - return false; -} - -function normalizeJsonValue(value: unknown, seen: WeakSet = new WeakSet()): unknown { - if ( - value === null || - value === undefined || - typeof value === "string" || - typeof value === "number" || - typeof value === "boolean" - ) { - return value ?? null; - } - if (typeof value === "bigint") { - return value.toString(); - } - if (value instanceof Date) { - return Number.isNaN(value.getTime()) ? "Invalid Date" : value.toISOString(); - } - if (value instanceof Error) { - return { - name: value.name, - message: value.message, - ...(value.stack ? { stack: value.stack } : {}), - }; - } - if (Array.isArray(value)) { - if (markSeen(value, seen)) { - return "[Circular]"; - } - return value.map((entry) => normalizeJsonValue(entry, seen)); - } - if (value instanceof Map) { - if (markSeen(value, seen)) { - return "[Circular]"; - } - return Object.fromEntries( - Array.from(value.entries(), ([key, entryValue]) => [ - String(key), - normalizeJsonValue(entryValue, seen), - ]), - ); - } - if (value instanceof Set) { - if (markSeen(value, seen)) { - return "[Circular]"; - } - return Array.from(value.values(), (entry) => normalizeJsonValue(entry, seen)); - } - if (!isPlainObject(value)) { - return String(value); - } - if (markSeen(value, seen)) { - return "[Circular]"; - } - return Object.fromEntries( - Object.entries(value).map(([key, entryValue]) => [key, normalizeJsonValue(entryValue, seen)]), - ); -} - -export function compactTraceAttributes( - attributes: Readonly>, -): TraceAttributes { - return Object.fromEntries( - Object.entries(attributes) - .filter(([, value]) => value !== undefined) - .map(([key, value]) => [key, normalizeJsonValue(value)]), - ); -} +type MetricAttributes = Readonly>; +type ObservabilityOutcome = "success" | "failure" | "interrupt"; export function compactMetricAttributes( attributes: Readonly>, diff --git a/apps/server/src/observability/Layers/Observability.ts b/apps/server/src/observability/Layers/Observability.ts deleted file mode 100644 index 29b9d7eac5..0000000000 --- a/apps/server/src/observability/Layers/Observability.ts +++ /dev/null @@ -1,64 +0,0 @@ -import { Effect, Layer, References, Tracer } from "effect"; -import { OtlpMetrics, OtlpSerialization, OtlpTracer } from "effect/unstable/observability"; - -import { ServerConfig } from "../../config.ts"; -import { ServerLoggerLive } from "../../serverLogger.ts"; -import { makeLocalFileTracer } from "../LocalFileTracer.ts"; - -const otlpSerializationLayer = OtlpSerialization.layerJson; - -export const ObservabilityLive = Layer.unwrap( - Effect.gen(function* () { - const config = yield* ServerConfig; - - const traceReferencesLayer = Layer.mergeAll( - Layer.succeed(Tracer.MinimumTraceLevel, config.traceMinLevel), - Layer.succeed(References.TracerTimingEnabled, config.traceTimingEnabled), - ); - - const tracerLayer = Layer.effect( - Tracer.Tracer, - Effect.gen(function* () { - const delegate = - config.otlpTracesUrl === undefined - ? undefined - : yield* OtlpTracer.make({ - url: config.otlpTracesUrl, - exportInterval: `${config.otlpExportIntervalMs} millis`, - resource: { - serviceName: config.otlpServiceName, - attributes: { - "service.runtime": "t3-server", - "service.mode": config.mode, - }, - }, - }); - - return yield* makeLocalFileTracer({ - filePath: config.serverTracePath, - maxBytes: config.traceMaxBytes, - maxFiles: config.traceMaxFiles, - batchWindowMs: config.traceBatchWindowMs, - ...(delegate ? { delegate } : {}), - }); - }), - ).pipe(Layer.provideMerge(otlpSerializationLayer)); - - const metricsLayer = - config.otlpMetricsUrl === undefined - ? Layer.empty - : OtlpMetrics.layer({ - url: config.otlpMetricsUrl, - exportInterval: `${config.otlpExportIntervalMs} millis`, - resource: { - serviceName: config.otlpServiceName, - attributes: { - "service.runtime": "t3-server", - "service.mode": config.mode, - }, - }, - }).pipe(Layer.provideMerge(otlpSerializationLayer)); - - return Layer.mergeAll(ServerLoggerLive, traceReferencesLayer, tracerLayer, metricsLayer); - }), -); diff --git a/apps/server/src/observability/LocalFileTracer.test.ts b/apps/server/src/observability/LocalFileTracer.test.ts deleted file mode 100644 index 1bc5a34305..0000000000 --- a/apps/server/src/observability/LocalFileTracer.test.ts +++ /dev/null @@ -1,117 +0,0 @@ -import fs from "node:fs"; -import os from "node:os"; -import path from "node:path"; - -import { assert, describe, it } from "@effect/vitest"; -import { Effect, Layer, Logger, References, Tracer } from "effect"; - -import type { TraceRecord } from "./TraceRecord.ts"; -import { makeLocalFileTracer } from "./LocalFileTracer.ts"; - -const makeTestLayer = (tracePath: string) => - Layer.mergeAll( - Layer.effect( - Tracer.Tracer, - makeLocalFileTracer({ - filePath: tracePath, - maxBytes: 1024 * 1024, - maxFiles: 2, - batchWindowMs: 10_000, - }), - ), - Logger.layer([Logger.tracerLogger], { mergeWithExisting: false }), - Layer.succeed(References.MinimumLogLevel, "Info"), - ); - -const readTraceRecords = (tracePath: string): Array => - fs - .readFileSync(tracePath, "utf8") - .trim() - .split("\n") - .filter((line) => line.length > 0) - .map((line) => JSON.parse(line) as TraceRecord); - -describe("LocalFileTracer", () => { - it.effect("writes nested spans to disk and captures log messages as span events", () => - Effect.gen(function* () { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "t3-local-tracer-")); - const tracePath = path.join(tempDir, "server.trace.ndjson"); - - try { - yield* Effect.scoped( - Effect.gen(function* () { - const program = Effect.gen(function* () { - yield* Effect.annotateCurrentSpan({ - "demo.parent": true, - }); - yield* Effect.logInfo("parent event"); - yield* Effect.gen(function* () { - yield* Effect.annotateCurrentSpan({ - "demo.child": true, - }); - yield* Effect.logInfo("child event"); - }).pipe(Effect.withSpan("child-span")); - }).pipe(Effect.withSpan("parent-span")); - - yield* program.pipe(Effect.provide(makeTestLayer(tracePath))); - }), - ); - - const records = readTraceRecords(tracePath); - assert.equal(records.length, 2); - - const parent = records.find((record) => record.name === "parent-span"); - const child = records.find((record) => record.name === "child-span"); - - assert.notEqual(parent, undefined); - assert.notEqual(child, undefined); - if (!parent || !child) { - return; - } - - assert.equal(child.parentSpanId, parent.spanId); - assert.equal(parent.attributes["demo.parent"], true); - assert.equal(child.attributes["demo.child"], true); - assert.equal( - parent.events.some((event) => event.name === "parent event"), - true, - ); - assert.equal( - child.events.some((event) => event.name === "child event"), - true, - ); - assert.equal( - child.events.some((event) => event.attributes["effect.logLevel"] === "INFO"), - true, - ); - } finally { - fs.rmSync(tempDir, { recursive: true, force: true }); - } - }), - ); - - it.effect("serializes interrupted spans with an interrupted exit status", () => - Effect.gen(function* () { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "t3-local-tracer-")); - const tracePath = path.join(tempDir, "server.trace.ndjson"); - - try { - yield* Effect.scoped( - Effect.exit( - Effect.interrupt.pipe( - Effect.withSpan("interrupt-span"), - Effect.provide(makeTestLayer(tracePath)), - ), - ), - ); - - const records = readTraceRecords(tracePath); - assert.equal(records.length, 1); - assert.equal(records[0]?.name, "interrupt-span"); - assert.equal(records[0]?.exit._tag, "Interrupted"); - } finally { - fs.rmSync(tempDir, { recursive: true, force: true }); - } - }), - ); -}); diff --git a/apps/server/src/observability/LocalFileTracer.ts b/apps/server/src/observability/LocalFileTracer.ts deleted file mode 100644 index 7f75eb0bf9..0000000000 --- a/apps/server/src/observability/LocalFileTracer.ts +++ /dev/null @@ -1,104 +0,0 @@ -import type * as Exit from "effect/Exit"; -import { Effect, Option, Tracer } from "effect"; - -import { spanToTraceRecord } from "./TraceRecord.ts"; -import { makeTraceSink } from "./TraceSink.ts"; - -export interface LocalFileTracerOptions { - readonly filePath: string; - readonly maxBytes: number; - readonly maxFiles: number; - readonly batchWindowMs: number; - readonly delegate?: Tracer.Tracer; -} - -class LocalFileSpan implements Tracer.Span { - readonly _tag = "Span"; - readonly name: string; - readonly spanId: string; - readonly traceId: string; - readonly parent: Option.Option; - readonly annotations: Tracer.Span["annotations"]; - readonly links: Array; - readonly sampled: boolean; - readonly kind: Tracer.SpanKind; - - status: Tracer.SpanStatus; - attributes: Map; - events: Array<[name: string, startTime: bigint, attributes: Record]>; - - constructor( - options: Parameters[0], - private readonly delegate: Tracer.Span, - private readonly push: (record: ReturnType) => void, - ) { - this.name = delegate.name; - this.spanId = delegate.spanId; - this.traceId = delegate.traceId; - this.parent = options.parent; - this.annotations = options.annotations; - this.links = [...options.links]; - this.sampled = delegate.sampled; - this.kind = delegate.kind; - this.status = { - _tag: "Started", - startTime: options.startTime, - }; - this.attributes = new Map(); - this.events = []; - } - - end(endTime: bigint, exit: Exit.Exit): void { - this.status = { - _tag: "Ended", - startTime: this.status.startTime, - endTime, - exit, - }; - this.delegate.end(endTime, exit); - - if (this.sampled) { - this.push(spanToTraceRecord(this)); - } - } - - attribute(key: string, value: unknown): void { - this.attributes.set(key, value); - this.delegate.attribute(key, value); - } - - event(name: string, startTime: bigint, attributes?: Record): void { - const nextAttributes = attributes ?? {}; - this.events.push([name, startTime, nextAttributes]); - this.delegate.event(name, startTime, nextAttributes); - } - - addLinks(links: ReadonlyArray): void { - this.links.push(...links); - this.delegate.addLinks(links); - } -} - -export const makeLocalFileTracer = Effect.fn("makeLocalFileTracer")(function* ( - options: LocalFileTracerOptions, -) { - const sink = yield* makeTraceSink({ - filePath: options.filePath, - maxBytes: options.maxBytes, - maxFiles: options.maxFiles, - batchWindowMs: options.batchWindowMs, - }); - - const delegate = - options.delegate ?? - Tracer.make({ - span: (spanOptions) => new Tracer.NativeSpan(spanOptions), - }); - - return Tracer.make({ - span(spanOptions) { - return new LocalFileSpan(spanOptions, delegate.span(spanOptions), sink.push); - }, - ...(delegate.context ? { context: delegate.context } : {}), - }); -}); diff --git a/apps/server/src/observability/Metrics.ts b/apps/server/src/observability/Metrics.ts index 3e527c7cb4..cfae757012 100644 --- a/apps/server/src/observability/Metrics.ts +++ b/apps/server/src/observability/Metrics.ts @@ -54,22 +54,6 @@ export const providerRuntimeEventsTotal = Metric.counter("t3_provider_runtime_ev description: "Total canonical provider runtime events processed.", }); -export const gitCommandsTotal = Metric.counter("t3_git_commands_total", { - description: "Total git commands executed by the server runtime.", -}); - -export const gitCommandDuration = Metric.timer("t3_git_command_duration", { - description: "Git command execution duration.", -}); - -export const terminalSessionsTotal = Metric.counter("t3_terminal_sessions_total", { - description: "Total terminal sessions started.", -}); - -export const terminalRestartsTotal = Metric.counter("t3_terminal_restarts_total", { - description: "Total terminal restart requests handled.", -}); - export const metricAttributes = ( attributes: Readonly>, ): ReadonlyArray<[string, string]> => Object.entries(compactMetricAttributes(attributes)); @@ -80,7 +64,7 @@ export const increment = ( amount = 1, ) => Metric.update(Metric.withAttributes(metric, metricAttributes(attributes)), amount); -export interface WithMetricsOptions { +interface WithMetricsOptions { readonly counter?: Metric.Metric; readonly timer?: Metric.Metric; readonly attributes?: diff --git a/apps/server/src/observability/TraceRecord.ts b/apps/server/src/observability/TraceRecord.ts deleted file mode 100644 index 3aee1b267c..0000000000 --- a/apps/server/src/observability/TraceRecord.ts +++ /dev/null @@ -1,100 +0,0 @@ -import { Cause, Exit, Option, Tracer } from "effect"; - -import { compactTraceAttributes } from "./Attributes.ts"; - -export interface TraceRecord { - readonly type: "effect-span"; - readonly name: string; - readonly traceId: string; - readonly spanId: string; - readonly parentSpanId?: string; - readonly sampled: boolean; - readonly kind: Tracer.SpanKind; - readonly startTimeUnixNano: string; - readonly endTimeUnixNano: string; - readonly durationMs: number; - readonly attributes: Readonly>; - readonly events: ReadonlyArray<{ - readonly name: string; - readonly timeUnixNano: string; - readonly attributes: Readonly>; - }>; - readonly links: ReadonlyArray<{ - readonly traceId: string; - readonly spanId: string; - readonly attributes: Readonly>; - }>; - readonly exit: - | { - readonly _tag: "Success"; - } - | { - readonly _tag: "Interrupted"; - readonly cause: string; - } - | { - readonly _tag: "Failure"; - readonly cause: string; - }; -} - -interface SerializableSpan { - readonly name: string; - readonly traceId: string; - readonly spanId: string; - readonly parent: Option.Option; - readonly status: Tracer.SpanStatus; - readonly sampled: boolean; - readonly kind: Tracer.SpanKind; - readonly attributes: ReadonlyMap; - readonly links: ReadonlyArray; - readonly events: ReadonlyArray< - readonly [name: string, startTime: bigint, attributes: Record] - >; -} - -function formatTraceExit(exit: Exit.Exit): TraceRecord["exit"] { - if (Exit.isSuccess(exit)) { - return { _tag: "Success" }; - } - if (Cause.hasInterruptsOnly(exit.cause)) { - return { - _tag: "Interrupted", - cause: Cause.pretty(exit.cause), - }; - } - return { - _tag: "Failure", - cause: Cause.pretty(exit.cause), - }; -} - -export function spanToTraceRecord(span: SerializableSpan): TraceRecord { - const status = span.status as Extract; - const parentSpanId = Option.getOrUndefined(span.parent)?.spanId; - - return { - type: "effect-span", - name: span.name, - traceId: span.traceId, - spanId: span.spanId, - ...(parentSpanId ? { parentSpanId } : {}), - sampled: span.sampled, - kind: span.kind, - startTimeUnixNano: String(status.startTime), - endTimeUnixNano: String(status.endTime), - durationMs: Number(status.endTime - status.startTime) / 1_000_000, - attributes: compactTraceAttributes(Object.fromEntries(span.attributes)), - events: span.events.map(([name, startTime, attributes]) => ({ - name, - timeUnixNano: String(startTime), - attributes: compactTraceAttributes(attributes), - })), - links: span.links.map((link) => ({ - traceId: link.span.traceId, - spanId: link.span.spanId, - attributes: compactTraceAttributes(link.attributes), - })), - exit: formatTraceExit(status.exit), - }; -} diff --git a/apps/server/src/observability/TraceSink.test.ts b/apps/server/src/observability/TraceSink.test.ts deleted file mode 100644 index f4db90516b..0000000000 --- a/apps/server/src/observability/TraceSink.test.ts +++ /dev/null @@ -1,152 +0,0 @@ -import fs from "node:fs"; -import os from "node:os"; -import path from "node:path"; - -import { assert, describe, it } from "@effect/vitest"; -import { Effect } from "effect"; - -import type { TraceRecord } from "./TraceRecord.ts"; -import { makeTraceSink } from "./TraceSink.ts"; - -const makeRecord = (name: string, suffix = ""): TraceRecord => ({ - type: "effect-span", - name, - traceId: `trace-${name}-${suffix}`, - spanId: `span-${name}-${suffix}`, - sampled: true, - kind: "internal", - startTimeUnixNano: "1", - endTimeUnixNano: "2", - durationMs: 1, - attributes: { - payload: suffix, - }, - events: [], - links: [], - exit: { - _tag: "Success", - }, -}); - -describe("TraceSink", () => { - it.effect("flushes buffered records on close", () => - Effect.scoped( - Effect.gen(function* () { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "t3-trace-sink-")); - const tracePath = path.join(tempDir, "server.trace.ndjson"); - - try { - const sink = yield* makeTraceSink({ - filePath: tracePath, - maxBytes: 1024, - maxFiles: 2, - batchWindowMs: 10_000, - }); - - sink.push(makeRecord("alpha")); - sink.push(makeRecord("beta")); - yield* sink.close(); - - const lines = fs - .readFileSync(tracePath, "utf8") - .trim() - .split("\n") - .map((line) => JSON.parse(line) as TraceRecord); - - assert.equal(lines.length, 2); - assert.equal(lines[0]?.name, "alpha"); - assert.equal(lines[1]?.name, "beta"); - } finally { - fs.rmSync(tempDir, { recursive: true, force: true }); - } - }), - ), - ); - - it.effect("rotates the trace file when the configured max size is exceeded", () => - Effect.scoped( - Effect.gen(function* () { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "t3-trace-sink-")); - const tracePath = path.join(tempDir, "server.trace.ndjson"); - - try { - const sink = yield* makeTraceSink({ - filePath: tracePath, - maxBytes: 180, - maxFiles: 2, - batchWindowMs: 10_000, - }); - - for (let index = 0; index < 8; index += 1) { - sink.push(makeRecord("rotate", `${index}-${"x".repeat(48)}`)); - yield* sink.flush; - } - yield* sink.close(); - - const matchingFiles = fs - .readdirSync(tempDir) - .filter( - (entry) => - entry === "server.trace.ndjson" || entry.startsWith("server.trace.ndjson."), - ) - .toSorted(); - - assert.equal( - matchingFiles.some((entry) => entry === "server.trace.ndjson.1"), - true, - ); - assert.equal( - matchingFiles.some((entry) => entry === "server.trace.ndjson.3"), - false, - ); - } finally { - fs.rmSync(tempDir, { recursive: true, force: true }); - } - }), - ), - ); - - it.effect("drops only the invalid record when serialization fails", () => - Effect.scoped( - Effect.gen(function* () { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "t3-trace-sink-")); - const tracePath = path.join(tempDir, "server.trace.ndjson"); - - try { - const sink = yield* makeTraceSink({ - filePath: tracePath, - maxBytes: 1024, - maxFiles: 2, - batchWindowMs: 10_000, - }); - - const circular: Array = []; - circular.push(circular); - - sink.push(makeRecord("alpha")); - sink.push({ - ...makeRecord("invalid"), - attributes: { - circular, - }, - } as TraceRecord); - sink.push(makeRecord("beta")); - yield* sink.close(); - - const lines = fs - .readFileSync(tracePath, "utf8") - .trim() - .split("\n") - .map((line) => JSON.parse(line) as TraceRecord); - - assert.deepStrictEqual( - lines.map((line) => line.name), - ["alpha", "beta"], - ); - } finally { - fs.rmSync(tempDir, { recursive: true, force: true }); - } - }), - ), - ); -}); diff --git a/apps/server/src/observability/TraceSink.ts b/apps/server/src/observability/TraceSink.ts deleted file mode 100644 index 1bd00b4734..0000000000 --- a/apps/server/src/observability/TraceSink.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { RotatingFileSink } from "@t3tools/shared/logging"; -import { Effect } from "effect"; - -import type { TraceRecord } from "./TraceRecord.ts"; - -const FLUSH_BUFFER_THRESHOLD = 32; - -export interface TraceSinkOptions { - readonly filePath: string; - readonly maxBytes: number; - readonly maxFiles: number; - readonly batchWindowMs: number; -} - -export interface TraceSink { - readonly filePath: string; - push: (record: TraceRecord) => void; - flush: Effect.Effect; - close: () => Effect.Effect; -} - -export const makeTraceSink = Effect.fn("makeTraceSink")(function* (options: TraceSinkOptions) { - const sink = new RotatingFileSink({ - filePath: options.filePath, - maxBytes: options.maxBytes, - maxFiles: options.maxFiles, - }); - - let buffer: Array = []; - - const flushUnsafe = () => { - if (buffer.length === 0) { - return; - } - - const chunk = buffer.join(""); - buffer = []; - - try { - sink.write(chunk); - } catch { - buffer.unshift(chunk); - } - }; - - const flush = Effect.sync(flushUnsafe).pipe(Effect.withTracerEnabled(false)); - - yield* Effect.addFinalizer(() => flush.pipe(Effect.ignore)); - yield* Effect.forkScoped( - Effect.sleep(`${options.batchWindowMs} millis`).pipe(Effect.andThen(flush), Effect.forever), - ); - - return { - filePath: options.filePath, - push(record) { - try { - buffer.push(`${JSON.stringify(record)}\n`); - if (buffer.length >= FLUSH_BUFFER_THRESHOLD) { - flushUnsafe(); - } - } catch { - return; - } - }, - flush, - close: () => flush, - } satisfies TraceSink; -}); diff --git a/apps/server/src/open.logic.ts b/apps/server/src/open.logic.ts new file mode 100644 index 0000000000..7423f8cb7a --- /dev/null +++ b/apps/server/src/open.logic.ts @@ -0,0 +1,138 @@ +import { spawn } from "node:child_process"; + +import { EDITORS, OpenError, type EditorId } from "@t3tools/contracts"; +import { Effect } from "effect"; +import { isCommandAvailable } from "./open.shared"; + +export interface OpenInEditorInput { + readonly cwd: string; + readonly editor: EditorId; +} + +interface EditorLaunch { + readonly command: string; + readonly args: ReadonlyArray; +} + +const TARGET_WITH_POSITION_PATTERN = /^(.*?):(\d+)(?::(\d+))?$/; + +function parseTargetPathAndPosition(target: string): { + path: string; + line: string | undefined; + column: string | undefined; +} | null { + const match = TARGET_WITH_POSITION_PATTERN.exec(target); + if (!match?.[1] || !match[2]) { + return null; + } + + return { + path: match[1], + line: match[2], + column: match[3], + }; +} + +function resolveCommandEditorArgs( + editor: (typeof EDITORS)[number], + target: string, +): ReadonlyArray { + const parsedTarget = parseTargetPathAndPosition(target); + + switch (editor.launchStyle) { + case "direct-path": + return [target]; + case "goto": + return parsedTarget ? ["--goto", target] : [target]; + case "line-column": { + if (!parsedTarget) { + return [target]; + } + + const { path, line, column } = parsedTarget; + return [...(line ? ["--line", line] : []), ...(column ? ["--column", column] : []), path]; + } + } +} + +function fileManagerCommandForPlatform(platform: NodeJS.Platform): string { + switch (platform) { + case "darwin": + return "open"; + case "win32": + return "explorer"; + default: + return "xdg-open"; + } +} + +export function resolveAvailableEditors( + platform: NodeJS.Platform = process.platform, + env: NodeJS.ProcessEnv = process.env, +): ReadonlyArray { + const available: EditorId[] = []; + + for (const editor of EDITORS) { + const command = editor.command ?? fileManagerCommandForPlatform(platform); + if (isCommandAvailable(command, { platform, env })) { + available.push(editor.id); + } + } + + return available; +} + +export const resolveEditorLaunch = Effect.fnUntraced(function* ( + input: OpenInEditorInput, + platform: NodeJS.Platform = process.platform, +): Effect.fn.Return { + const editorDef = EDITORS.find((editor) => editor.id === input.editor); + if (!editorDef) { + return yield* new OpenError({ message: `Unknown editor: ${input.editor}` }); + } + + if (editorDef.command) { + return { + command: editorDef.command, + args: resolveCommandEditorArgs(editorDef, input.cwd), + }; + } + + if (editorDef.id !== "file-manager") { + return yield* new OpenError({ message: `Unsupported editor: ${input.editor}` }); + } + + return { command: fileManagerCommandForPlatform(platform), args: [input.cwd] }; +}); + +export const launchDetached = (launch: EditorLaunch) => + Effect.gen(function* () { + if (!isCommandAvailable(launch.command)) { + return yield* new OpenError({ message: `Editor command not found: ${launch.command}` }); + } + + yield* Effect.callback((resume) => { + let child; + try { + child = spawn(launch.command, [...launch.args], { + detached: true, + stdio: "ignore", + shell: process.platform === "win32", + }); + } catch (error) { + return resume( + Effect.fail(new OpenError({ message: "failed to spawn detached process", cause: error })), + ); + } + + const handleSpawn = () => { + child.unref(); + resume(Effect.void); + }; + + child.once("spawn", handleSpawn); + child.once("error", (cause) => + resume(Effect.fail(new OpenError({ message: "failed to spawn detached process", cause }))), + ); + }); + }); diff --git a/apps/server/src/open.shared.ts b/apps/server/src/open.shared.ts new file mode 100644 index 0000000000..a0c68073a6 --- /dev/null +++ b/apps/server/src/open.shared.ts @@ -0,0 +1,112 @@ +import { accessSync, constants, statSync } from "node:fs"; +import { extname, join } from "node:path"; + +interface CommandAvailabilityOptions { + readonly platform?: NodeJS.Platform; + readonly env?: NodeJS.ProcessEnv; +} + +function stripWrappingQuotes(value: string): string { + return value.replace(/^"+|"+$/g, ""); +} + +function resolvePathEnvironmentVariable(env: NodeJS.ProcessEnv): string { + return env.PATH ?? env.Path ?? env.path ?? ""; +} + +function resolveWindowsPathExtensions(env: NodeJS.ProcessEnv): ReadonlyArray { + const rawValue = env.PATHEXT; + const fallback = [".COM", ".EXE", ".BAT", ".CMD"]; + if (!rawValue) return fallback; + + const parsed = rawValue + .split(";") + .map((entry) => entry.trim()) + .filter((entry) => entry.length > 0) + .map((entry) => (entry.startsWith(".") ? entry.toUpperCase() : `.${entry.toUpperCase()}`)); + return parsed.length > 0 ? Array.from(new Set(parsed)) : fallback; +} + +function resolveCommandCandidates( + command: string, + platform: NodeJS.Platform, + windowsPathExtensions: ReadonlyArray, +): ReadonlyArray { + if (platform !== "win32") return [command]; + const extension = extname(command); + const normalizedExtension = extension.toUpperCase(); + + if (extension.length > 0 && windowsPathExtensions.includes(normalizedExtension)) { + const commandWithoutExtension = command.slice(0, -extension.length); + return Array.from( + new Set([ + command, + `${commandWithoutExtension}${normalizedExtension}`, + `${commandWithoutExtension}${normalizedExtension.toLowerCase()}`, + ]), + ); + } + + const candidates: string[] = []; + for (const extension of windowsPathExtensions) { + candidates.push(`${command}${extension}`); + candidates.push(`${command}${extension.toLowerCase()}`); + } + return Array.from(new Set(candidates)); +} + +function isExecutableFile( + filePath: string, + platform: NodeJS.Platform, + windowsPathExtensions: ReadonlyArray, +): boolean { + try { + const stat = statSync(filePath); + if (!stat.isFile()) return false; + if (platform === "win32") { + const extension = extname(filePath); + if (extension.length === 0) return false; + return windowsPathExtensions.includes(extension.toUpperCase()); + } + accessSync(filePath, constants.X_OK); + return true; + } catch { + return false; + } +} + +function resolvePathDelimiter(platform: NodeJS.Platform): string { + return platform === "win32" ? ";" : ":"; +} + +export function isCommandAvailable( + command: string, + options: CommandAvailabilityOptions = {}, +): boolean { + const platform = options.platform ?? process.platform; + const env = options.env ?? process.env; + const windowsPathExtensions = platform === "win32" ? resolveWindowsPathExtensions(env) : []; + const commandCandidates = resolveCommandCandidates(command, platform, windowsPathExtensions); + + if (command.includes("/") || command.includes("\\")) { + return commandCandidates.some((candidate) => + isExecutableFile(candidate, platform, windowsPathExtensions), + ); + } + + const pathValue = resolvePathEnvironmentVariable(env); + if (pathValue.length === 0) return false; + const pathEntries = pathValue + .split(resolvePathDelimiter(platform)) + .map((entry) => stripWrappingQuotes(entry.trim())) + .filter((entry) => entry.length > 0); + + for (const pathEntry of pathEntries) { + for (const candidate of commandCandidates) { + if (isExecutableFile(join(pathEntry, candidate), platform, windowsPathExtensions)) { + return true; + } + } + } + return false; +} diff --git a/apps/server/src/open.test.ts b/apps/server/src/open.test.ts index 76b14c8597..0ac0549f65 100644 --- a/apps/server/src/open.test.ts +++ b/apps/server/src/open.test.ts @@ -3,12 +3,8 @@ import { assert, it } from "@effect/vitest"; import { assertSuccess } from "@effect/vitest/utils"; import { FileSystem, Path, Effect } from "effect"; -import { - isCommandAvailable, - launchDetached, - resolveAvailableEditors, - resolveEditorLaunch, -} from "./open"; +import { launchDetached, resolveAvailableEditors, resolveEditorLaunch } from "./open.logic"; +import { isCommandAvailable } from "./open.shared"; it.layer(NodeServices.layer)("resolveEditorLaunch", (it) => { it.effect("returns commands for command-based editors", () => diff --git a/apps/server/src/open.ts b/apps/server/src/open.ts index 58074ceef2..71dffa3aa7 100644 --- a/apps/server/src/open.ts +++ b/apps/server/src/open.ts @@ -1,216 +1,17 @@ -/** - * Open - Browser/editor launch service interface. - * - * Owns process launch helpers for opening URLs in a browser and workspace - * paths in a configured editor. - * - * @module Open - */ -import { spawn } from "node:child_process"; -import { accessSync, constants, statSync } from "node:fs"; -import { extname, join } from "node:path"; - -import { EDITORS, OpenError, type EditorId } from "@t3tools/contracts"; +import { OpenError } from "@t3tools/contracts"; import { ServiceMap, Effect, Layer } from "effect"; - -// ============================== -// Definitions -// ============================== - -export { OpenError }; - -export interface OpenInEditorInput { - readonly cwd: string; - readonly editor: EditorId; -} - -interface EditorLaunch { - readonly command: string; - readonly args: ReadonlyArray; -} - -interface CommandAvailabilityOptions { - readonly platform?: NodeJS.Platform; - readonly env?: NodeJS.ProcessEnv; -} - -const TARGET_WITH_POSITION_PATTERN = /^(.*?):(\d+)(?::(\d+))?$/; - -function parseTargetPathAndPosition(target: string): { - path: string; - line: string | undefined; - column: string | undefined; -} | null { - const match = TARGET_WITH_POSITION_PATTERN.exec(target); - if (!match?.[1] || !match[2]) { - return null; - } - - return { - path: match[1], - line: match[2], - column: match[3], - }; -} - -function resolveCommandEditorArgs( - editor: (typeof EDITORS)[number], - target: string, -): ReadonlyArray { - const parsedTarget = parseTargetPathAndPosition(target); - - switch (editor.launchStyle) { - case "direct-path": - return [target]; - case "goto": - return parsedTarget ? ["--goto", target] : [target]; - case "line-column": { - if (!parsedTarget) { - return [target]; - } - - const { path, line, column } = parsedTarget; - return [...(line ? ["--line", line] : []), ...(column ? ["--column", column] : []), path]; - } - } -} - -function fileManagerCommandForPlatform(platform: NodeJS.Platform): string { - switch (platform) { - case "darwin": - return "open"; - case "win32": - return "explorer"; - default: - return "xdg-open"; - } -} - -function stripWrappingQuotes(value: string): string { - return value.replace(/^"+|"+$/g, ""); -} - -function resolvePathEnvironmentVariable(env: NodeJS.ProcessEnv): string { - return env.PATH ?? env.Path ?? env.path ?? ""; -} - -function resolveWindowsPathExtensions(env: NodeJS.ProcessEnv): ReadonlyArray { - const rawValue = env.PATHEXT; - const fallback = [".COM", ".EXE", ".BAT", ".CMD"]; - if (!rawValue) return fallback; - - const parsed = rawValue - .split(";") - .map((entry) => entry.trim()) - .filter((entry) => entry.length > 0) - .map((entry) => (entry.startsWith(".") ? entry.toUpperCase() : `.${entry.toUpperCase()}`)); - return parsed.length > 0 ? Array.from(new Set(parsed)) : fallback; -} - -function resolveCommandCandidates( - command: string, - platform: NodeJS.Platform, - windowsPathExtensions: ReadonlyArray, -): ReadonlyArray { - if (platform !== "win32") return [command]; - const extension = extname(command); - const normalizedExtension = extension.toUpperCase(); - - if (extension.length > 0 && windowsPathExtensions.includes(normalizedExtension)) { - const commandWithoutExtension = command.slice(0, -extension.length); - return Array.from( - new Set([ - command, - `${commandWithoutExtension}${normalizedExtension}`, - `${commandWithoutExtension}${normalizedExtension.toLowerCase()}`, - ]), - ); - } - - const candidates: string[] = []; - for (const extension of windowsPathExtensions) { - candidates.push(`${command}${extension}`); - candidates.push(`${command}${extension.toLowerCase()}`); - } - return Array.from(new Set(candidates)); -} - -function isExecutableFile( - filePath: string, - platform: NodeJS.Platform, - windowsPathExtensions: ReadonlyArray, -): boolean { - try { - const stat = statSync(filePath); - if (!stat.isFile()) return false; - if (platform === "win32") { - const extension = extname(filePath); - if (extension.length === 0) return false; - return windowsPathExtensions.includes(extension.toUpperCase()); - } - accessSync(filePath, constants.X_OK); - return true; - } catch { - return false; - } -} - -function resolvePathDelimiter(platform: NodeJS.Platform): string { - return platform === "win32" ? ";" : ":"; -} - -export function isCommandAvailable( - command: string, - options: CommandAvailabilityOptions = {}, -): boolean { - const platform = options.platform ?? process.platform; - const env = options.env ?? process.env; - const windowsPathExtensions = platform === "win32" ? resolveWindowsPathExtensions(env) : []; - const commandCandidates = resolveCommandCandidates(command, platform, windowsPathExtensions); - - if (command.includes("/") || command.includes("\\")) { - return commandCandidates.some((candidate) => - isExecutableFile(candidate, platform, windowsPathExtensions), - ); - } - - const pathValue = resolvePathEnvironmentVariable(env); - if (pathValue.length === 0) return false; - const pathEntries = pathValue - .split(resolvePathDelimiter(platform)) - .map((entry) => stripWrappingQuotes(entry.trim())) - .filter((entry) => entry.length > 0); - - for (const pathEntry of pathEntries) { - for (const candidate of commandCandidates) { - if (isExecutableFile(join(pathEntry, candidate), platform, windowsPathExtensions)) { - return true; - } - } - } - return false; -} - -export function resolveAvailableEditors( - platform: NodeJS.Platform = process.platform, - env: NodeJS.ProcessEnv = process.env, -): ReadonlyArray { - const available: EditorId[] = []; - - for (const editor of EDITORS) { - const command = editor.command ?? fileManagerCommandForPlatform(platform); - if (isCommandAvailable(command, { platform, env })) { - available.push(editor.id); - } - } - - return available; -} +import { + launchDetached, + resolveEditorLaunch, + resolveAvailableEditors, + type OpenInEditorInput, +} from "./open.logic"; +export { resolveAvailableEditors }; /** * OpenShape - Service API for browser and editor launch actions. */ -export interface OpenShape { +interface OpenShape { /** * Open a URL target in the default browser. */ @@ -233,66 +34,6 @@ export class Open extends ServiceMap.Service()("t3/open") {} // Implementations // ============================== -export const resolveEditorLaunch = Effect.fn("resolveEditorLaunch")(function* ( - input: OpenInEditorInput, - platform: NodeJS.Platform = process.platform, -): Effect.fn.Return { - yield* Effect.annotateCurrentSpan({ - "open.editor": input.editor, - "open.cwd": input.cwd, - "open.platform": platform, - }); - const editorDef = EDITORS.find((editor) => editor.id === input.editor); - if (!editorDef) { - return yield* new OpenError({ message: `Unknown editor: ${input.editor}` }); - } - - if (editorDef.command) { - return { - command: editorDef.command, - args: resolveCommandEditorArgs(editorDef, input.cwd), - }; - } - - if (editorDef.id !== "file-manager") { - return yield* new OpenError({ message: `Unsupported editor: ${input.editor}` }); - } - - return { command: fileManagerCommandForPlatform(platform), args: [input.cwd] }; -}); - -export const launchDetached = (launch: EditorLaunch) => - Effect.gen(function* () { - if (!isCommandAvailable(launch.command)) { - return yield* new OpenError({ message: `Editor command not found: ${launch.command}` }); - } - - yield* Effect.callback((resume) => { - let child; - try { - child = spawn(launch.command, [...launch.args], { - detached: true, - stdio: "ignore", - shell: process.platform === "win32", - }); - } catch (error) { - return resume( - Effect.fail(new OpenError({ message: "failed to spawn detached process", cause: error })), - ); - } - - const handleSpawn = () => { - child.unref(); - resume(Effect.void); - }; - - child.once("spawn", handleSpawn); - child.once("error", (cause) => - resume(Effect.fail(new OpenError({ message: "failed to spawn detached process", cause }))), - ); - }); - }); - const make = Effect.gen(function* () { const open = yield* Effect.tryPromise({ try: () => import("open"), diff --git a/apps/server/src/orchestration/Errors.ts b/apps/server/src/orchestration/Errors.ts index 1ea038e1d1..3f5217e588 100644 --- a/apps/server/src/orchestration/Errors.ts +++ b/apps/server/src/orchestration/Errors.ts @@ -2,30 +2,6 @@ import { SchemaIssue, Schema } from "effect"; import type { ProjectionRepositoryError } from "../persistence/Errors.ts"; -export class OrchestrationCommandJsonParseError extends Schema.TaggedErrorClass()( - "OrchestrationCommandJsonParseError", - { - detail: Schema.String, - cause: Schema.optional(Schema.Defect), - }, -) { - override get message(): string { - return `Invalid orchestration command JSON: ${this.detail}`; - } -} - -export class OrchestrationCommandDecodeError extends Schema.TaggedErrorClass()( - "OrchestrationCommandDecodeError", - { - issue: Schema.String, - cause: Schema.optional(Schema.Defect), - }, -) { - override get message(): string { - return `Invalid orchestration command payload: ${this.issue}`; - } -} - export class OrchestrationCommandInvariantError extends Schema.TaggedErrorClass()( "OrchestrationCommandInvariantError", { @@ -84,19 +60,6 @@ export type OrchestrationDispatchError = | OrchestrationCommandPreviouslyRejectedError | OrchestrationProjectorDecodeError | OrchestrationListenerCallbackError; - -export type OrchestrationEngineError = - | OrchestrationDispatchError - | OrchestrationCommandJsonParseError - | OrchestrationCommandDecodeError; - -export function toOrchestrationCommandDecodeError(error: Schema.SchemaError) { - return new OrchestrationCommandDecodeError({ - issue: SchemaIssue.makeFormatterDefault()(error.issue), - cause: error, - }); -} - export function toProjectorDecodeError(eventType: string) { return (error: Schema.SchemaError): OrchestrationProjectorDecodeError => new OrchestrationProjectorDecodeError({ @@ -105,19 +68,3 @@ export function toProjectorDecodeError(eventType: string) { cause: error, }); } - -export function toOrchestrationJsonParseError(cause: unknown) { - return new OrchestrationCommandJsonParseError({ - detail: `Failed to parse orchestration command JSON`, - cause, - }); -} - -export function toListenerCallbackError(listener: "read-model" | "domain-event") { - return (cause: unknown): OrchestrationListenerCallbackError => - new OrchestrationListenerCallbackError({ - listener, - detail: `Failed to invoke orchestration ${listener} listener`, - cause, - }); -} diff --git a/apps/server/src/orchestration/Layers/CheckpointReactor.test.ts b/apps/server/src/orchestration/Layers/CheckpointReactor.test.ts index ab9f633e02..ae367ffb57 100644 --- a/apps/server/src/orchestration/Layers/CheckpointReactor.test.ts +++ b/apps/server/src/orchestration/Layers/CheckpointReactor.test.ts @@ -27,7 +27,7 @@ import { OrchestrationProjectionSnapshotQueryLive } from "./ProjectionSnapshotQu import { RuntimeReceiptBusLive } from "./RuntimeReceiptBus.ts"; import { OrchestrationEventStoreLive } from "../../persistence/Layers/OrchestrationEventStore.ts"; import { OrchestrationCommandReceiptRepositoryLive } from "../../persistence/Layers/OrchestrationCommandReceipts.ts"; -import { SqlitePersistenceMemory } from "../../persistence/Layers/Sqlite.ts"; +import { SqlitePersistenceMemory } from "../../persistence/Layers/Sqlite.testing.ts"; import { OrchestrationEngineService, type OrchestrationEngineShape, diff --git a/apps/server/src/orchestration/Layers/OrchestrationEngine.test.ts b/apps/server/src/orchestration/Layers/OrchestrationEngine.test.ts index 5a0a6113f0..5d353cc571 100644 --- a/apps/server/src/orchestration/Layers/OrchestrationEngine.test.ts +++ b/apps/server/src/orchestration/Layers/OrchestrationEngine.test.ts @@ -14,7 +14,7 @@ import { describe, expect, it } from "vitest"; import { PersistenceSqlError } from "../../persistence/Errors.ts"; import { OrchestrationCommandReceiptRepositoryLive } from "../../persistence/Layers/OrchestrationCommandReceipts.ts"; import { OrchestrationEventStoreLive } from "../../persistence/Layers/OrchestrationEventStore.ts"; -import { SqlitePersistenceMemory } from "../../persistence/Layers/Sqlite.ts"; +import { SqlitePersistenceMemory } from "../../persistence/Layers/Sqlite.testing.ts"; import { OrchestrationEventStore, type OrchestrationEventStoreShape, diff --git a/apps/server/src/orchestration/Layers/OrchestrationReactor.test.ts b/apps/server/src/orchestration/Layers/OrchestrationReactor.test.ts index d60f0cf722..6b9ddda68b 100644 --- a/apps/server/src/orchestration/Layers/OrchestrationReactor.test.ts +++ b/apps/server/src/orchestration/Layers/OrchestrationReactor.test.ts @@ -5,7 +5,7 @@ import { CheckpointReactor } from "../Services/CheckpointReactor.ts"; import { ProviderCommandReactor } from "../Services/ProviderCommandReactor.ts"; import { ProviderRuntimeIngestionService } from "../Services/ProviderRuntimeIngestion.ts"; import { OrchestrationReactor } from "../Services/OrchestrationReactor.ts"; -import { makeOrchestrationReactor } from "./OrchestrationReactor.ts"; +import { OrchestrationReactorLive } from "./OrchestrationReactor.ts"; describe("OrchestrationReactor", () => { let runtime: ManagedRuntime.ManagedRuntime | null = null; @@ -21,7 +21,7 @@ describe("OrchestrationReactor", () => { const started: string[] = []; runtime = ManagedRuntime.make( - Layer.effect(OrchestrationReactor, makeOrchestrationReactor).pipe( + OrchestrationReactorLive.pipe( Layer.provideMerge( Layer.succeed(ProviderRuntimeIngestionService, { start: () => { diff --git a/apps/server/src/orchestration/Layers/OrchestrationReactor.ts b/apps/server/src/orchestration/Layers/OrchestrationReactor.ts index 99d30c57a2..20f2b1e492 100644 --- a/apps/server/src/orchestration/Layers/OrchestrationReactor.ts +++ b/apps/server/src/orchestration/Layers/OrchestrationReactor.ts @@ -8,7 +8,7 @@ import { CheckpointReactor } from "../Services/CheckpointReactor.ts"; import { ProviderCommandReactor } from "../Services/ProviderCommandReactor.ts"; import { ProviderRuntimeIngestionService } from "../Services/ProviderRuntimeIngestion.ts"; -export const makeOrchestrationReactor = Effect.gen(function* () { +const makeOrchestrationReactor = Effect.gen(function* () { const providerRuntimeIngestion = yield* ProviderRuntimeIngestionService; const providerCommandReactor = yield* ProviderCommandReactor; const checkpointReactor = yield* CheckpointReactor; diff --git a/apps/server/src/orchestration/Layers/ProjectionPipeline.test.ts b/apps/server/src/orchestration/Layers/ProjectionPipeline.test.ts index 1850745469..614746823b 100644 --- a/apps/server/src/orchestration/Layers/ProjectionPipeline.test.ts +++ b/apps/server/src/orchestration/Layers/ProjectionPipeline.test.ts @@ -18,7 +18,7 @@ import { OrchestrationEventStoreLive } from "../../persistence/Layers/Orchestrat import { makeSqlitePersistenceLive, SqlitePersistenceMemory, -} from "../../persistence/Layers/Sqlite.ts"; +} from "../../persistence/Layers/Sqlite.testing.ts"; import { OrchestrationEventStore } from "../../persistence/Services/OrchestrationEventStore.ts"; import { OrchestrationEngineLive } from "./OrchestrationEngine.ts"; import { diff --git a/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.test.ts b/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.test.ts index c038bc9d2c..0195b42e54 100644 --- a/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.test.ts +++ b/apps/server/src/orchestration/Layers/ProjectionSnapshotQuery.test.ts @@ -3,7 +3,7 @@ import { assert, it } from "@effect/vitest"; import { Effect, Layer } from "effect"; import * as SqlClient from "effect/unstable/sql/SqlClient"; -import { SqlitePersistenceMemory } from "../../persistence/Layers/Sqlite.ts"; +import { SqlitePersistenceMemory } from "../../persistence/Layers/Sqlite.testing.ts"; import { ORCHESTRATION_PROJECTOR_NAMES } from "./ProjectionPipeline.ts"; import { OrchestrationProjectionSnapshotQueryLive } from "./ProjectionSnapshotQuery.ts"; import { ProjectionSnapshotQuery } from "../Services/ProjectionSnapshotQuery.ts"; diff --git a/apps/server/src/orchestration/Layers/ProviderCommandReactor.test.ts b/apps/server/src/orchestration/Layers/ProviderCommandReactor.test.ts index ca3dc04517..fd0e003b96 100644 --- a/apps/server/src/orchestration/Layers/ProviderCommandReactor.test.ts +++ b/apps/server/src/orchestration/Layers/ProviderCommandReactor.test.ts @@ -21,7 +21,7 @@ import { TextGenerationError } from "@t3tools/contracts"; import { ProviderAdapterRequestError } from "../../provider/Errors.ts"; import { OrchestrationEventStoreLive } from "../../persistence/Layers/OrchestrationEventStore.ts"; import { OrchestrationCommandReceiptRepositoryLive } from "../../persistence/Layers/OrchestrationCommandReceipts.ts"; -import { SqlitePersistenceMemory } from "../../persistence/Layers/Sqlite.ts"; +import { SqlitePersistenceMemory } from "../../persistence/Layers/Sqlite.testing.ts"; import { ProviderService, type ProviderServiceShape, diff --git a/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.test.ts b/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.test.ts index 6c27e1010c..55a1566e66 100644 --- a/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.test.ts +++ b/apps/server/src/orchestration/Layers/ProviderRuntimeIngestion.test.ts @@ -24,7 +24,7 @@ import { afterEach, describe, expect, it } from "vitest"; import { OrchestrationEventStoreLive } from "../../persistence/Layers/OrchestrationEventStore.ts"; import { OrchestrationCommandReceiptRepositoryLive } from "../../persistence/Layers/OrchestrationCommandReceipts.ts"; -import { SqlitePersistenceMemory } from "../../persistence/Layers/Sqlite.ts"; +import { SqlitePersistenceMemory } from "../../persistence/Layers/Sqlite.testing.ts"; import { ProviderService, type ProviderServiceShape, diff --git a/apps/server/src/orchestration/Schemas.ts b/apps/server/src/orchestration/Schemas.ts index f7ebf69344..24d9625e39 100644 --- a/apps/server/src/orchestration/Schemas.ts +++ b/apps/server/src/orchestration/Schemas.ts @@ -15,11 +15,6 @@ import { ThreadTurnDiffCompletedPayload as ContractsThreadTurnDiffCompletedPayloadSchema, ThreadRevertedPayload as ContractsThreadRevertedPayloadSchema, ThreadActivityAppendedPayload as ContractsThreadActivityAppendedPayloadSchema, - ThreadTurnStartRequestedPayload as ContractsThreadTurnStartRequestedPayloadSchema, - ThreadTurnInterruptRequestedPayload as ContractsThreadTurnInterruptRequestedPayloadSchema, - ThreadApprovalResponseRequestedPayload as ContractsThreadApprovalResponseRequestedPayloadSchema, - ThreadCheckpointRevertRequestedPayload as ContractsThreadCheckpointRevertRequestedPayloadSchema, - ThreadSessionStopRequestedPayload as ContractsThreadSessionStopRequestedPayloadSchema, } from "@t3tools/contracts"; // Server-internal alias surface, backed by contract schemas as the source of truth. @@ -41,12 +36,3 @@ export const ThreadSessionSetPayload = ContractsThreadSessionSetPayloadSchema; export const ThreadTurnDiffCompletedPayload = ContractsThreadTurnDiffCompletedPayloadSchema; export const ThreadRevertedPayload = ContractsThreadRevertedPayloadSchema; export const ThreadActivityAppendedPayload = ContractsThreadActivityAppendedPayloadSchema; - -export const ThreadTurnStartRequestedPayload = ContractsThreadTurnStartRequestedPayloadSchema; -export const ThreadTurnInterruptRequestedPayload = - ContractsThreadTurnInterruptRequestedPayloadSchema; -export const ThreadApprovalResponseRequestedPayload = - ContractsThreadApprovalResponseRequestedPayloadSchema; -export const ThreadCheckpointRevertRequestedPayload = - ContractsThreadCheckpointRevertRequestedPayloadSchema; -export const ThreadSessionStopRequestedPayload = ContractsThreadSessionStopRequestedPayloadSchema; diff --git a/apps/server/src/orchestration/Services/RuntimeReceiptBus.shared.ts b/apps/server/src/orchestration/Services/RuntimeReceiptBus.shared.ts new file mode 100644 index 0000000000..5e93c53b69 --- /dev/null +++ b/apps/server/src/orchestration/Services/RuntimeReceiptBus.shared.ts @@ -0,0 +1,22 @@ +import { CheckpointRef, IsoDateTime, NonNegativeInt, ThreadId, TurnId } from "@t3tools/contracts"; +import { Schema } from "effect"; + +export const CheckpointDiffFinalizedReceipt = Schema.Struct({ + type: Schema.Literal("checkpoint.diff.finalized"), + threadId: ThreadId, + turnId: TurnId, + checkpointTurnCount: NonNegativeInt, + checkpointRef: CheckpointRef, + status: Schema.Literals(["ready", "missing", "error"]), + createdAt: IsoDateTime, +}); +export type CheckpointDiffFinalizedReceipt = typeof CheckpointDiffFinalizedReceipt.Type; + +export const TurnProcessingQuiescedReceipt = Schema.Struct({ + type: Schema.Literal("turn.processing.quiesced"), + threadId: ThreadId, + turnId: TurnId, + checkpointTurnCount: NonNegativeInt, + createdAt: IsoDateTime, +}); +export type TurnProcessingQuiescedReceipt = typeof TurnProcessingQuiescedReceipt.Type; diff --git a/apps/server/src/orchestration/Services/RuntimeReceiptBus.ts b/apps/server/src/orchestration/Services/RuntimeReceiptBus.ts index a08ff91685..8b9d44213d 100644 --- a/apps/server/src/orchestration/Services/RuntimeReceiptBus.ts +++ b/apps/server/src/orchestration/Services/RuntimeReceiptBus.ts @@ -1,35 +1,18 @@ -import { CheckpointRef, IsoDateTime, NonNegativeInt, ThreadId, TurnId } from "@t3tools/contracts"; +import { CheckpointRef, IsoDateTime, NonNegativeInt, ThreadId } from "@t3tools/contracts"; import { Schema, ServiceMap } from "effect"; import type { Effect, Stream } from "effect"; +import { + CheckpointDiffFinalizedReceipt, + TurnProcessingQuiescedReceipt, +} from "./RuntimeReceiptBus.shared.ts"; -export const CheckpointBaselineCapturedReceipt = Schema.Struct({ +const CheckpointBaselineCapturedReceipt = Schema.Struct({ type: Schema.Literal("checkpoint.baseline.captured"), threadId: ThreadId, checkpointTurnCount: NonNegativeInt, checkpointRef: CheckpointRef, createdAt: IsoDateTime, }); -export type CheckpointBaselineCapturedReceipt = typeof CheckpointBaselineCapturedReceipt.Type; - -export const CheckpointDiffFinalizedReceipt = Schema.Struct({ - type: Schema.Literal("checkpoint.diff.finalized"), - threadId: ThreadId, - turnId: TurnId, - checkpointTurnCount: NonNegativeInt, - checkpointRef: CheckpointRef, - status: Schema.Literals(["ready", "missing", "error"]), - createdAt: IsoDateTime, -}); -export type CheckpointDiffFinalizedReceipt = typeof CheckpointDiffFinalizedReceipt.Type; - -export const TurnProcessingQuiescedReceipt = Schema.Struct({ - type: Schema.Literal("turn.processing.quiesced"), - threadId: ThreadId, - turnId: TurnId, - checkpointTurnCount: NonNegativeInt, - createdAt: IsoDateTime, -}); -export type TurnProcessingQuiescedReceipt = typeof TurnProcessingQuiescedReceipt.Type; export const OrchestrationRuntimeReceipt = Schema.Union([ CheckpointBaselineCapturedReceipt, diff --git a/apps/server/src/orchestration/commandInvariants.shared.ts b/apps/server/src/orchestration/commandInvariants.shared.ts new file mode 100644 index 0000000000..bd51162705 --- /dev/null +++ b/apps/server/src/orchestration/commandInvariants.shared.ts @@ -0,0 +1,8 @@ +import type { OrchestrationReadModel, OrchestrationThread, ThreadId } from "@t3tools/contracts"; + +export function findThreadById( + readModel: OrchestrationReadModel, + threadId: ThreadId, +): OrchestrationThread | undefined { + return readModel.threads.find((thread) => thread.id === threadId); +} diff --git a/apps/server/src/orchestration/commandInvariants.test.ts b/apps/server/src/orchestration/commandInvariants.test.ts index 43d665a2c9..2dd63fa921 100644 --- a/apps/server/src/orchestration/commandInvariants.test.ts +++ b/apps/server/src/orchestration/commandInvariants.test.ts @@ -10,13 +10,12 @@ import { } from "@t3tools/contracts"; import { Effect } from "effect"; +import { requireThread, requireThreadAbsent } from "./commandInvariants.ts"; import { findThreadById, listThreadsByProjectId, requireNonNegativeInteger, - requireThread, - requireThreadAbsent, -} from "./commandInvariants.ts"; +} from "./commandInvariants.testing.ts"; const now = new Date().toISOString(); diff --git a/apps/server/src/orchestration/commandInvariants.testing.ts b/apps/server/src/orchestration/commandInvariants.testing.ts new file mode 100644 index 0000000000..6f6a78df17 --- /dev/null +++ b/apps/server/src/orchestration/commandInvariants.testing.ts @@ -0,0 +1,41 @@ +export { findThreadById } from "./commandInvariants.shared.ts"; + +import type { + OrchestrationCommand, + OrchestrationReadModel, + OrchestrationThread, + ProjectId, +} from "@t3tools/contracts"; +import { Effect } from "effect"; + +import { OrchestrationCommandInvariantError } from "./Errors.ts"; + +function invariantError(commandType: string, detail: string): OrchestrationCommandInvariantError { + return new OrchestrationCommandInvariantError({ + commandType, + detail, + }); +} + +export function listThreadsByProjectId( + readModel: OrchestrationReadModel, + projectId: ProjectId, +): ReadonlyArray { + return readModel.threads.filter((thread) => thread.projectId === projectId); +} + +export function requireNonNegativeInteger(input: { + readonly commandType: OrchestrationCommand["type"]; + readonly field: string; + readonly value: number; +}): Effect.Effect { + if (Number.isInteger(input.value) && input.value >= 0) { + return Effect.void; + } + return Effect.fail( + invariantError( + input.commandType, + `${input.field} must be an integer greater than or equal to 0.`, + ), + ); +} diff --git a/apps/server/src/orchestration/commandInvariants.ts b/apps/server/src/orchestration/commandInvariants.ts index 009fdb190e..969fc689b3 100644 --- a/apps/server/src/orchestration/commandInvariants.ts +++ b/apps/server/src/orchestration/commandInvariants.ts @@ -9,6 +9,7 @@ import type { import { Effect } from "effect"; import { OrchestrationCommandInvariantError } from "./Errors.ts"; +import { findThreadById } from "./commandInvariants.shared.ts"; function invariantError(commandType: string, detail: string): OrchestrationCommandInvariantError { return new OrchestrationCommandInvariantError({ @@ -17,27 +18,13 @@ function invariantError(commandType: string, detail: string): OrchestrationComma }); } -export function findThreadById( - readModel: OrchestrationReadModel, - threadId: ThreadId, -): OrchestrationThread | undefined { - return readModel.threads.find((thread) => thread.id === threadId); -} - -export function findProjectById( +function findProjectById( readModel: OrchestrationReadModel, projectId: ProjectId, ): OrchestrationProject | undefined { return readModel.projects.find((project) => project.id === projectId); } -export function listThreadsByProjectId( - readModel: OrchestrationReadModel, - projectId: ProjectId, -): ReadonlyArray { - return readModel.threads.filter((thread) => thread.projectId === projectId); -} - export function requireProject(input: { readonly readModel: OrchestrationReadModel; readonly command: OrchestrationCommand; @@ -141,19 +128,3 @@ export function requireThreadAbsent(input: { ), ); } - -export function requireNonNegativeInteger(input: { - readonly commandType: OrchestrationCommand["type"]; - readonly field: string; - readonly value: number; -}): Effect.Effect { - if (Number.isInteger(input.value) && input.value >= 0) { - return Effect.void; - } - return Effect.fail( - invariantError( - input.commandType, - `${input.field} must be an integer greater than or equal to 0.`, - ), - ); -} diff --git a/apps/server/src/os-jank.ts b/apps/server/src/os-jank.ts index c3629e8fde..99c59d5090 100644 --- a/apps/server/src/os-jank.ts +++ b/apps/server/src/os-jank.ts @@ -26,7 +26,7 @@ export function fixPath( } } -export const expandHomePath = Effect.fn(function* (input: string) { +const expandHomePath = Effect.fn(function* (input: string) { const { join } = yield* Path.Path; if (input === "~") { return OS.homedir(); diff --git a/apps/server/src/persistence/Errors.ts b/apps/server/src/persistence/Errors.ts index cb1cb2f3f8..884d79ffd8 100644 --- a/apps/server/src/persistence/Errors.ts +++ b/apps/server/src/persistence/Errors.ts @@ -47,55 +47,10 @@ export function toPersistenceDecodeError(operation: string) { cause: error, }); } - -export function toPersistenceDecodeCauseError(operation: string) { - return (cause: unknown): PersistenceDecodeError => - new PersistenceDecodeError({ - operation, - issue: `Failed to execute ${operation}`, - cause, - }); -} - export const isPersistenceError = (u: unknown) => Schema.is(PersistenceSqlError)(u) || Schema.is(PersistenceDecodeError)(u); -// =============================== -// Provider Session Repository Errors -// =============================== - -export class ProviderSessionRepositoryValidationError extends Schema.TaggedErrorClass()( - "ProviderSessionRepositoryValidationError", - { - operation: Schema.String, - issue: Schema.String, - cause: Schema.optional(Schema.Defect), - }, -) { - override get message(): string { - return `Provider session repository validation failed in ${this.operation}: ${this.issue}`; - } -} - -export class ProviderSessionRepositoryPersistenceError extends Schema.TaggedErrorClass()( - "ProviderSessionRepositoryPersistenceError", - { - operation: Schema.String, - detail: Schema.String, - cause: Schema.optional(Schema.Defect), - }, -) { - override get message(): string { - return `Provider session repository persistence error in ${this.operation}: ${this.detail}`; - } -} - export type OrchestrationEventStoreError = PersistenceSqlError | PersistenceDecodeError; - -export type ProviderSessionRepositoryError = - | ProviderSessionRepositoryValidationError - | ProviderSessionRepositoryPersistenceError; - export type OrchestrationCommandReceiptRepositoryError = | PersistenceSqlError | PersistenceDecodeError; diff --git a/apps/server/src/persistence/Layers/OrchestrationEventStore.test.ts b/apps/server/src/persistence/Layers/OrchestrationEventStore.test.ts index 249e9d1e36..0c6ee147f4 100644 --- a/apps/server/src/persistence/Layers/OrchestrationEventStore.test.ts +++ b/apps/server/src/persistence/Layers/OrchestrationEventStore.test.ts @@ -6,7 +6,7 @@ import * as SqlClient from "effect/unstable/sql/SqlClient"; import { PersistenceDecodeError } from "../Errors.ts"; import { OrchestrationEventStore } from "../Services/OrchestrationEventStore.ts"; import { OrchestrationEventStoreLive } from "./OrchestrationEventStore.ts"; -import { SqlitePersistenceMemory } from "./Sqlite.ts"; +import { SqlitePersistenceMemory } from "./Sqlite.testing.ts"; const layer = it.layer( OrchestrationEventStoreLive.pipe(Layer.provideMerge(SqlitePersistenceMemory)), diff --git a/apps/server/src/persistence/Layers/ProjectionRepositories.test.ts b/apps/server/src/persistence/Layers/ProjectionRepositories.test.ts index b0e1774837..70b0c75bca 100644 --- a/apps/server/src/persistence/Layers/ProjectionRepositories.test.ts +++ b/apps/server/src/persistence/Layers/ProjectionRepositories.test.ts @@ -3,7 +3,7 @@ import { assert, it } from "@effect/vitest"; import { Effect, Layer, Option } from "effect"; import * as SqlClient from "effect/unstable/sql/SqlClient"; -import { SqlitePersistenceMemory } from "./Sqlite.ts"; +import { SqlitePersistenceMemory } from "./Sqlite.testing.ts"; import { ProjectionProjectRepositoryLive } from "./ProjectionProjects.ts"; import { ProjectionThreadRepositoryLive } from "./ProjectionThreads.ts"; import { ProjectionProjectRepository } from "../Services/ProjectionProjects.ts"; diff --git a/apps/server/src/persistence/Layers/ProjectionThreadMessages.test.ts b/apps/server/src/persistence/Layers/ProjectionThreadMessages.test.ts index 5993ad6c20..fde27e7f6f 100644 --- a/apps/server/src/persistence/Layers/ProjectionThreadMessages.test.ts +++ b/apps/server/src/persistence/Layers/ProjectionThreadMessages.test.ts @@ -4,7 +4,7 @@ import { Effect, Layer } from "effect"; import { ProjectionThreadMessageRepository } from "../Services/ProjectionThreadMessages.ts"; import { ProjectionThreadMessageRepositoryLive } from "./ProjectionThreadMessages.ts"; -import { SqlitePersistenceMemory } from "./Sqlite.ts"; +import { SqlitePersistenceMemory } from "./Sqlite.testing.ts"; const layer = it.layer( ProjectionThreadMessageRepositoryLive.pipe(Layer.provideMerge(SqlitePersistenceMemory)), diff --git a/apps/server/src/persistence/Layers/Sqlite.shared.ts b/apps/server/src/persistence/Layers/Sqlite.shared.ts new file mode 100644 index 0000000000..cc7979f0db --- /dev/null +++ b/apps/server/src/persistence/Layers/Sqlite.shared.ts @@ -0,0 +1,46 @@ +import { mkdir } from "node:fs/promises"; +import { dirname } from "node:path"; + +import { Effect, Layer } from "effect"; +import * as SqlClient from "effect/unstable/sql/SqlClient"; + +import { runMigrations } from "../Migrations.ts"; + +type RuntimeSqliteLayerConfig = { + readonly filename: string; +}; + +type Loader = { + layer: (config: RuntimeSqliteLayerConfig) => Layer.Layer; +}; + +const defaultSqliteClientLoaders = { + bun: () => import("@effect/sql-sqlite-bun/SqliteClient"), + node: () => import("../NodeSqliteClient.ts"), +} satisfies Record Promise>; + +const makeRuntimeSqliteLayer = ( + config: RuntimeSqliteLayerConfig, +): Layer.Layer => + Effect.gen(function* () { + const runtime = process.versions.bun !== undefined ? "bun" : "node"; + const loader = defaultSqliteClientLoaders[runtime]; + const clientModule = yield* Effect.promise(loader); + return clientModule.layer(config); + }).pipe(Layer.unwrap); + +const setup = Layer.effectDiscard( + Effect.gen(function* () { + const sql = yield* SqlClient.SqlClient; + yield* sql`PRAGMA journal_mode = WAL;`; + yield* sql`PRAGMA foreign_keys = ON;`; + yield* runMigrations(); + }), +); + +export const makeSqlitePersistenceLive = (dbPath: string) => + Effect.gen(function* () { + yield* Effect.tryPromise(() => mkdir(dirname(dbPath), { recursive: true })); + + return Layer.provideMerge(setup, makeRuntimeSqliteLayer({ filename: dbPath })); + }).pipe(Layer.unwrap); diff --git a/apps/server/src/persistence/Layers/Sqlite.testing.ts b/apps/server/src/persistence/Layers/Sqlite.testing.ts new file mode 100644 index 0000000000..ba0c33b8d2 --- /dev/null +++ b/apps/server/src/persistence/Layers/Sqlite.testing.ts @@ -0,0 +1,5 @@ +import { makeSqlitePersistenceLive } from "./Sqlite.shared"; + +export { makeSqlitePersistenceLive }; + +export const SqlitePersistenceMemory = makeSqlitePersistenceLive(":memory:"); diff --git a/apps/server/src/persistence/Layers/Sqlite.ts b/apps/server/src/persistence/Layers/Sqlite.ts index 58556099db..14ef63e3a2 100644 --- a/apps/server/src/persistence/Layers/Sqlite.ts +++ b/apps/server/src/persistence/Layers/Sqlite.ts @@ -1,63 +1,6 @@ -import { Effect, Layer, FileSystem, Path } from "effect"; -import * as SqlClient from "effect/unstable/sql/SqlClient"; - -import { runMigrations } from "../Migrations.ts"; +import { Effect, Layer } from "effect"; import { ServerConfig } from "../../config.ts"; - -type RuntimeSqliteLayerConfig = { - readonly filename: string; - readonly spanAttributes?: Record; -}; - -type Loader = { - layer: (config: RuntimeSqliteLayerConfig) => Layer.Layer; -}; -const defaultSqliteClientLoaders = { - bun: () => import("@effect/sql-sqlite-bun/SqliteClient"), - node: () => import("../NodeSqliteClient.ts"), -} satisfies Record Promise>; - -const makeRuntimeSqliteLayer = Effect.fn("makeRuntimeSqliteLayer")(function* ( - config: RuntimeSqliteLayerConfig, -) { - const runtime = process.versions.bun !== undefined ? "bun" : "node"; - const loader = defaultSqliteClientLoaders[runtime]; - const clientModule = yield* Effect.promise(loader); - return clientModule.layer(config); -}, Layer.unwrap); - -const setup = Layer.effectDiscard( - Effect.gen(function* () { - const sql = yield* SqlClient.SqlClient; - yield* sql`PRAGMA journal_mode = WAL;`; - yield* sql`PRAGMA foreign_keys = ON;`; - yield* runMigrations(); - }), -); - -export const makeSqlitePersistenceLive = Effect.fn("makeSqlitePersistenceLive")(function* ( - dbPath: string, -) { - const fs = yield* FileSystem.FileSystem; - const path = yield* Path.Path; - yield* fs.makeDirectory(path.dirname(dbPath), { recursive: true }); - - return Layer.provideMerge( - setup, - makeRuntimeSqliteLayer({ - filename: dbPath, - spanAttributes: { - "db.name": path.basename(dbPath), - "service.name": "t3-server", - }, - }), - ); -}, Layer.unwrap); - -export const SqlitePersistenceMemory = Layer.provideMerge( - setup, - makeRuntimeSqliteLayer({ filename: ":memory:" }), -); +import { makeSqlitePersistenceLive } from "./Sqlite.shared"; export const layerConfig = Layer.unwrap( Effect.map(Effect.service(ServerConfig), ({ dbPath }) => makeSqlitePersistenceLive(dbPath)), diff --git a/apps/server/src/persistence/Migrations.ts b/apps/server/src/persistence/Migrations.ts index a03c3c2d18..631c766b46 100644 --- a/apps/server/src/persistence/Migrations.ts +++ b/apps/server/src/persistence/Migrations.ts @@ -9,7 +9,6 @@ */ import * as Migrator from "effect/unstable/sql/Migrator"; -import * as Layer from "effect/Layer"; import * as Effect from "effect/Effect"; // Import all migrations statically @@ -43,7 +42,7 @@ import Migration0019 from "./Migrations/019_ProjectionSnapshotLookupIndexes.ts"; * Uses Migrator.fromRecord which parses the key format and * returns migrations sorted by ID. */ -export const migrationEntries = [ +const migrationEntries = [ [1, "OrchestrationEvents", Migration0001], [2, "OrchestrationCommandReceipts", Migration0002], [3, "CheckpointDiffBlobs", Migration0003], @@ -65,7 +64,7 @@ export const migrationEntries = [ [19, "ProjectionSnapshotLookupIndexes", Migration0019], ] as const; -export const makeMigrationLoader = (throughId?: number) => +const makeMigrationLoader = (throughId?: number) => Migrator.fromRecord( Object.fromEntries( migrationEntries @@ -80,7 +79,7 @@ export const makeMigrationLoader = (throughId?: number) => */ const run = Migrator.make({}); -export interface RunMigrationsOptions { +interface RunMigrationsOptions { readonly toMigrationInclusive?: number | undefined; } @@ -108,22 +107,3 @@ export const runMigrations = Effect.fn("runMigrations")(function* ({ ); return executedMigrations; }); - -/** - * Layer that runs migrations when the layer is built. - * - * Use this to ensure migrations run before your application starts. - * Migrations are run automatically - no separate script is needed. - * - * @example - * ```typescript - * import { MigrationsLive } from "@acme/db/Migrations" - * import * as SqliteClient from "@acme/db/SqliteClient" - * - * // Migrations run automatically when SqliteClient is provided - * const AppLayer = MigrationsLive.pipe( - * Layer.provideMerge(SqliteClient.layer({ filename: "database.sqlite" })) - * ) - * ``` - */ -export const MigrationsLive = Layer.effectDiscard(runMigrations()); diff --git a/apps/server/src/persistence/Services/ProjectionCheckpoints.ts b/apps/server/src/persistence/Services/ProjectionCheckpoints.ts index 191d0af968..94540da115 100644 --- a/apps/server/src/persistence/Services/ProjectionCheckpoints.ts +++ b/apps/server/src/persistence/Services/ProjectionCheckpoints.ts @@ -16,10 +16,7 @@ import { ThreadId, TurnId, } from "@t3tools/contracts"; -import { Option, ServiceMap, Schema } from "effect"; -import type { Effect } from "effect"; - -import type { ProjectionRepositoryError } from "../Errors.ts"; +import { Schema } from "effect"; export const ProjectionCheckpoint = Schema.Struct({ threadId: ThreadId, @@ -32,62 +29,3 @@ export const ProjectionCheckpoint = Schema.Struct({ completedAt: IsoDateTime, }); export type ProjectionCheckpoint = typeof ProjectionCheckpoint.Type; - -export const ListByThreadIdInput = Schema.Struct({ - threadId: ThreadId, -}); -export type ListByThreadIdInput = typeof ListByThreadIdInput.Type; - -export const GetByThreadAndTurnCountInput = Schema.Struct({ - threadId: ThreadId, - checkpointTurnCount: NonNegativeInt, -}); -export type GetByThreadAndTurnCountInput = typeof GetByThreadAndTurnCountInput.Type; - -export const DeleteByThreadIdInput = Schema.Struct({ - threadId: ThreadId, -}); -export type DeleteByThreadIdInput = typeof DeleteByThreadIdInput.Type; - -/** - * ProjectionCheckpointRepositoryShape - Service API for projected checkpoints. - */ -export interface ProjectionCheckpointRepositoryShape { - /** - * Insert or replace a projected checkpoint row. - * - * Upserts by composite key `(threadId, checkpointTurnCount)`. - */ - readonly upsert: (row: ProjectionCheckpoint) => Effect.Effect; - - /** - * List projected checkpoints for a thread. - * - * Returned in ascending checkpoint turn-count order. - */ - readonly listByThreadId: ( - input: ListByThreadIdInput, - ) => Effect.Effect, ProjectionRepositoryError>; - - /** - * Read a projected checkpoint by thread and turn-count key. - */ - readonly getByThreadAndTurnCount: ( - input: GetByThreadAndTurnCountInput, - ) => Effect.Effect, ProjectionRepositoryError>; - - /** - * Delete projected checkpoint rows by thread. - */ - readonly deleteByThreadId: ( - input: DeleteByThreadIdInput, - ) => Effect.Effect; -} - -/** - * ProjectionCheckpointRepository - Service tag for checkpoint projection persistence. - */ -export class ProjectionCheckpointRepository extends ServiceMap.Service< - ProjectionCheckpointRepository, - ProjectionCheckpointRepositoryShape ->()("t3/persistence/Services/ProjectionCheckpoints/ProjectionCheckpointRepository") {} diff --git a/apps/server/src/persistence/Services/ProjectionTurns.ts b/apps/server/src/persistence/Services/ProjectionTurns.ts index 95dab450bf..16bef6d373 100644 --- a/apps/server/src/persistence/Services/ProjectionTurns.ts +++ b/apps/server/src/persistence/Services/ProjectionTurns.ts @@ -21,15 +21,13 @@ import { Option, Schema, ServiceMap } from "effect"; import type { Effect } from "effect"; import type { ProjectionRepositoryError } from "../Errors.ts"; - -export const ProjectionTurnState = Schema.Literals([ +const ProjectionTurnState = Schema.Literals([ "pending", "running", "interrupted", "completed", "error", ]); -export type ProjectionTurnState = typeof ProjectionTurnState.Type; export const ProjectionTurn = Schema.Struct({ threadId: ThreadId, diff --git a/apps/server/src/processRunner.ts b/apps/server/src/processRunner.ts index 5402612887..cd9e2a498c 100644 --- a/apps/server/src/processRunner.ts +++ b/apps/server/src/processRunner.ts @@ -1,6 +1,6 @@ import { type ChildProcess as ChildProcessHandle, spawn, spawnSync } from "node:child_process"; -export interface ProcessRunOptions { +interface ProcessRunOptions { cwd?: string | undefined; timeoutMs?: number | undefined; env?: NodeJS.ProcessEnv | undefined; diff --git a/apps/server/src/project/Layers/ProjectFaviconResolver.ts b/apps/server/src/project/Layers/ProjectFaviconResolver.ts index 3004a7a45c..991f1ea6fb 100644 --- a/apps/server/src/project/Layers/ProjectFaviconResolver.ts +++ b/apps/server/src/project/Layers/ProjectFaviconResolver.ts @@ -54,7 +54,7 @@ function extractIconHref(source: string): string | null { return null; } -export const makeProjectFaviconResolver = Effect.gen(function* () { +const makeProjectFaviconResolver = Effect.gen(function* () { const fileSystem = yield* FileSystem.FileSystem; const path = yield* Path.Path; diff --git a/apps/server/src/provider/Layers/ClaudeAdapter.test.ts b/apps/server/src/provider/Layers/ClaudeAdapter.test.ts index 5a09d8b6ba..89e9fa53e0 100644 --- a/apps/server/src/provider/Layers/ClaudeAdapter.test.ts +++ b/apps/server/src/provider/Layers/ClaudeAdapter.test.ts @@ -24,7 +24,7 @@ import { ServerConfig } from "../../config.ts"; import { ServerSettingsService } from "../../serverSettings.ts"; import { ProviderAdapterValidationError } from "../Errors.ts"; import { ClaudeAdapter } from "../Services/ClaudeAdapter.ts"; -import { makeClaudeAdapterLive, type ClaudeAdapterLiveOptions } from "./ClaudeAdapter.ts"; +import { makeClaudeAdapterLive } from "./ClaudeAdapter.ts"; class FakeClaudeQuery implements AsyncIterable { private readonly queue: Array = []; @@ -132,7 +132,9 @@ class FakeClaudeQuery implements AsyncIterable { function makeHarness(config?: { readonly nativeEventLogPath?: string; - readonly nativeEventLogger?: ClaudeAdapterLiveOptions["nativeEventLogger"]; + readonly nativeEventLogger?: NonNullable< + Parameters[0] + >["nativeEventLogger"]; readonly cwd?: string; readonly baseDir?: string; }) { @@ -144,7 +146,7 @@ function makeHarness(config?: { } | undefined; - const adapterOptions: ClaudeAdapterLiveOptions = { + const adapterOptions: NonNullable[0]> = { createQuery: (input) => { createInput = input; return query; diff --git a/apps/server/src/provider/Layers/ClaudeAdapter.ts b/apps/server/src/provider/Layers/ClaudeAdapter.ts index d99e2ad203..8e67c56687 100644 --- a/apps/server/src/provider/Layers/ClaudeAdapter.ts +++ b/apps/server/src/provider/Layers/ClaudeAdapter.ts @@ -173,7 +173,7 @@ interface ClaudeQueryRuntime extends AsyncIterable { readonly close: () => void; } -export interface ClaudeAdapterLiveOptions { +interface ClaudeAdapterLiveOptions { readonly createQuery?: (input: { readonly prompt: AsyncIterable; readonly options: ClaudeQueryOptions; @@ -3057,9 +3057,6 @@ const makeClaudeAdapter = Effect.fn("makeClaudeAdapter")(function* ( streamEvents: Stream.fromQueue(runtimeEventQueue), } satisfies ClaudeAdapterShape; }); - -export const ClaudeAdapterLive = Layer.effect(ClaudeAdapter, makeClaudeAdapter()); - export function makeClaudeAdapterLive(options?: ClaudeAdapterLiveOptions) { return Layer.effect(ClaudeAdapter, makeClaudeAdapter(options)); } diff --git a/apps/server/src/provider/Layers/ClaudeProvider.logic.ts b/apps/server/src/provider/Layers/ClaudeProvider.logic.ts new file mode 100644 index 0000000000..a40c3158e4 --- /dev/null +++ b/apps/server/src/provider/Layers/ClaudeProvider.logic.ts @@ -0,0 +1,607 @@ +import type { + ModelCapabilities, + ServerProviderModel, + ServerProviderAuth, + ServerProviderState, +} from "@t3tools/contracts"; +import { Effect, Option, Result, Schema } from "effect"; +import { ChildProcess } from "effect/unstable/process"; +import { decodeJsonResult } from "@t3tools/shared/schemaJson"; +import { query as claudeQuery } from "@anthropic-ai/claude-agent-sdk"; + +import { + buildServerProvider, + DEFAULT_TIMEOUT_MS, + detailFromResult, + extractAuthBoolean, + isCommandMissingCause, + parseGenericCliVersion, + providerModelsFromSettings, + spawnAndCollect, + type CommandResult, +} from "../providerSnapshot"; +import { ServerSettingsService } from "../../serverSettings"; + +const PROVIDER = "claudeAgent" as const; +const BUILT_IN_MODELS: ReadonlyArray = [ + { + slug: "claude-opus-4-6", + name: "Claude Opus 4.6", + isCustom: false, + capabilities: { + reasoningEffortLevels: [ + { value: "low", label: "Low" }, + { value: "medium", label: "Medium" }, + { value: "high", label: "High", isDefault: true }, + { value: "max", label: "Max" }, + { value: "ultrathink", label: "Ultrathink" }, + ], + supportsFastMode: true, + supportsThinkingToggle: false, + contextWindowOptions: [ + { value: "200k", label: "200k", isDefault: true }, + { value: "1m", label: "1M" }, + ], + promptInjectedEffortLevels: ["ultrathink"], + } satisfies ModelCapabilities, + }, + { + slug: "claude-sonnet-4-6", + name: "Claude Sonnet 4.6", + isCustom: false, + capabilities: { + reasoningEffortLevels: [ + { value: "low", label: "Low" }, + { value: "medium", label: "Medium" }, + { value: "high", label: "High", isDefault: true }, + { value: "ultrathink", label: "Ultrathink" }, + ], + supportsFastMode: false, + supportsThinkingToggle: false, + contextWindowOptions: [ + { value: "200k", label: "200k", isDefault: true }, + { value: "1m", label: "1M" }, + ], + promptInjectedEffortLevels: ["ultrathink"], + } satisfies ModelCapabilities, + }, + { + slug: "claude-haiku-4-5", + name: "Claude Haiku 4.5", + isCustom: false, + capabilities: { + reasoningEffortLevels: [], + supportsFastMode: false, + supportsThinkingToggle: true, + contextWindowOptions: [], + promptInjectedEffortLevels: [], + } satisfies ModelCapabilities, + }, +]; + +export function getClaudeModelCapabilities(model: string | null | undefined): ModelCapabilities { + const slug = model?.trim(); + return ( + BUILT_IN_MODELS.find((candidate) => candidate.slug === slug)?.capabilities ?? { + reasoningEffortLevels: [], + supportsFastMode: false, + supportsThinkingToggle: false, + contextWindowOptions: [], + promptInjectedEffortLevels: [], + } + ); +} + +function parseClaudeAuthStatusFromOutput(result: CommandResult): { + readonly status: Exclude; + readonly auth: Pick; + readonly message?: string; +} { + const lowerOutput = `${result.stdout}\n${result.stderr}`.toLowerCase(); + + if ( + lowerOutput.includes("unknown command") || + lowerOutput.includes("unrecognized command") || + lowerOutput.includes("unexpected argument") + ) { + return { + status: "warning", + auth: { status: "unknown" }, + message: + "Claude Agent authentication status command is unavailable in this version of Claude.", + }; + } + + if ( + lowerOutput.includes("not logged in") || + lowerOutput.includes("login required") || + lowerOutput.includes("authentication required") || + lowerOutput.includes("run `claude login`") || + lowerOutput.includes("run claude login") + ) { + return { + status: "error", + auth: { status: "unauthenticated" }, + message: "Claude is not authenticated. Run `claude auth login` and try again.", + }; + } + + const parsedAuth = (() => { + const trimmed = result.stdout.trim(); + if (!trimmed || (!trimmed.startsWith("{") && !trimmed.startsWith("["))) { + return { attemptedJsonParse: false as const, auth: undefined as boolean | undefined }; + } + try { + return { + attemptedJsonParse: true as const, + auth: extractAuthBoolean(JSON.parse(trimmed)), + }; + } catch { + return { attemptedJsonParse: true as const, auth: undefined as boolean | undefined }; + } + })(); + + if (parsedAuth.auth === true) { + return { status: "ready", auth: { status: "authenticated" } }; + } + if (parsedAuth.auth === false) { + return { + status: "error", + auth: { status: "unauthenticated" }, + message: "Claude is not authenticated. Run `claude auth login` and try again.", + }; + } + if (parsedAuth.attemptedJsonParse) { + return { + status: "warning", + auth: { status: "unknown" }, + message: + "Could not verify Claude authentication status from JSON output (missing auth marker).", + }; + } + if (result.code === 0) { + return { status: "ready", auth: { status: "authenticated" } }; + } + + const detail = detailFromResult(result); + return { + status: "warning", + auth: { status: "unknown" }, + message: detail + ? `Could not verify Claude authentication status. ${detail}` + : "Could not verify Claude authentication status.", + }; +} + +// ── Subscription type detection ───────────────────────────────────── +// +// The SDK probe returns typed `AccountInfo.subscriptionType` directly. +// This walker is a best-effort fallback for the `claude auth status` +// JSON output whose shape is not guaranteed. + +/** Keys that directly hold a subscription/plan identifier. */ +const SUBSCRIPTION_TYPE_KEYS = [ + "subscriptionType", + "subscription_type", + "plan", + "tier", + "planType", + "plan_type", +] as const; + +/** Keys whose value may be a nested object containing subscription info. */ +const SUBSCRIPTION_CONTAINER_KEYS = ["account", "subscription", "user", "billing"] as const; +const AUTH_METHOD_KEYS = ["authMethod", "auth_method"] as const; +const AUTH_METHOD_CONTAINER_KEYS = ["auth", "account", "session"] as const; + +/** Lift an unknown value into `Option` if it is a non-empty string. */ +const asNonEmptyString = (v: unknown): Option.Option => + typeof v === "string" && v.length > 0 ? Option.some(v) : Option.none(); + +/** Lift an unknown value into `Option` if it is a plain object. */ +const asRecord = (v: unknown): Option.Option> => + typeof v === "object" && v !== null && !globalThis.Array.isArray(v) + ? Option.some(v as Record) + : Option.none(); + +/** + * Walk an unknown parsed JSON value looking for a subscription/plan + * identifier, returning the first match as an `Option`. + */ +function findSubscriptionType(value: unknown): Option.Option { + if (globalThis.Array.isArray(value)) { + return Option.firstSomeOf(value.map(findSubscriptionType)); + } + + return asRecord(value).pipe( + Option.flatMap((record) => { + const direct = Option.firstSomeOf( + SUBSCRIPTION_TYPE_KEYS.map((key) => asNonEmptyString(record[key])), + ); + if (Option.isSome(direct)) return direct; + + return Option.firstSomeOf( + SUBSCRIPTION_CONTAINER_KEYS.map((key) => + asRecord(record[key]).pipe(Option.flatMap(findSubscriptionType)), + ), + ); + }), + ); +} + +function findAuthMethod(value: unknown): Option.Option { + if (globalThis.Array.isArray(value)) { + return Option.firstSomeOf(value.map(findAuthMethod)); + } + + return asRecord(value).pipe( + Option.flatMap((record) => { + const direct = Option.firstSomeOf( + AUTH_METHOD_KEYS.map((key) => asNonEmptyString(record[key])), + ); + if (Option.isSome(direct)) return direct; + + return Option.firstSomeOf( + AUTH_METHOD_CONTAINER_KEYS.map((key) => + asRecord(record[key]).pipe(Option.flatMap(findAuthMethod)), + ), + ); + }), + ); +} + +/** + * Try to extract a subscription type from the `claude auth status` JSON + * output. This is a zero-cost operation on data we already have. + */ +const decodeUnknownJson = decodeJsonResult(Schema.Unknown); + +function extractSubscriptionTypeFromOutput(result: CommandResult): string | undefined { + const parsed = decodeUnknownJson(result.stdout.trim()); + if (Result.isFailure(parsed)) return undefined; + return Option.getOrUndefined(findSubscriptionType(parsed.success)); +} + +function extractClaudeAuthMethodFromOutput(result: CommandResult): string | undefined { + const parsed = decodeUnknownJson(result.stdout.trim()); + if (Result.isFailure(parsed)) return undefined; + return Option.getOrUndefined(findAuthMethod(parsed.success)); +} + +// ── Dynamic model capability adjustment ───────────────────────────── + +/** Subscription types where the 1M context window is included in the plan. */ +const PREMIUM_SUBSCRIPTION_TYPES = new Set([ + "max", + "maxplan", + "max5", + "max20", + "enterprise", + "team", +]); + +function toTitleCaseWords(value: string): string { + return value + .split(/[\s_-]+/g) + .filter(Boolean) + .map((part) => part[0]!.toUpperCase() + part.slice(1).toLowerCase()) + .join(" "); +} + +function claudeSubscriptionLabel(subscriptionType: string | undefined): string | undefined { + const normalized = subscriptionType?.toLowerCase().replace(/[\s_-]+/g, ""); + if (!normalized) return undefined; + + switch (normalized) { + case "max": + case "maxplan": + case "max5": + case "max20": + return "Max"; + case "enterprise": + return "Enterprise"; + case "team": + return "Team"; + case "pro": + return "Pro"; + case "free": + return "Free"; + default: + return toTitleCaseWords(subscriptionType!); + } +} + +function normalizeClaudeAuthMethod(authMethod: string | undefined): string | undefined { + const normalized = authMethod?.toLowerCase().replace(/[\s_-]+/g, ""); + if (!normalized) return undefined; + if (normalized === "apikey") return "apiKey"; + return undefined; +} + +function claudeAuthMetadata(input: { + readonly subscriptionType: string | undefined; + readonly authMethod: string | undefined; +}): { readonly type: string; readonly label: string } | undefined { + if (normalizeClaudeAuthMethod(input.authMethod) === "apiKey") { + return { + type: "apiKey", + label: "Claude API Key", + }; + } + + if (input.subscriptionType) { + const subscriptionLabel = claudeSubscriptionLabel(input.subscriptionType); + return { + type: input.subscriptionType, + label: `Claude ${subscriptionLabel ?? toTitleCaseWords(input.subscriptionType)} Subscription`, + }; + } + + return undefined; +} + +/** + * Adjust the built-in model list based on the user's detected subscription. + * + * - Premium tiers (Max, Enterprise, Team): 1M context becomes the default. + * - Other tiers (Pro, free, unknown): 200k context stays the default; + * 1M remains available as a manual option so users can still enable it. + */ +function adjustModelsForSubscription( + baseModels: ReadonlyArray, + subscriptionType: string | undefined, +): ReadonlyArray { + const normalized = subscriptionType?.toLowerCase().replace(/[\s_-]+/g, ""); + if (!normalized || !PREMIUM_SUBSCRIPTION_TYPES.has(normalized)) { + return baseModels; + } + + // Flip 1M to be the default for premium users + return baseModels.map((model) => { + const caps = model.capabilities; + if (!caps || caps.contextWindowOptions.length === 0) return model; + + return { + ...model, + capabilities: { + ...caps, + contextWindowOptions: caps.contextWindowOptions.map((opt) => + opt.value === "1m" + ? { value: opt.value, label: opt.label, isDefault: true as const } + : { value: opt.value, label: opt.label }, + ), + }, + }; + }); +} + +// ── SDK capability probe ──────────────────────────────────────────── + +const CAPABILITIES_PROBE_TIMEOUT_MS = 8_000; + +/** + * Probe account information by spawning a lightweight Claude Agent SDK + * session and reading the initialization result. + * + * The prompt is never sent to the Anthropic API — we abort immediately + * after the local initialization phase completes. This gives us the + * user's subscription type without incurring any token cost. + * + * This is used as a fallback when `claude auth status` does not include + * subscription type information. + */ +export const probeClaudeCapabilities = (binaryPath: string) => { + const abort = new AbortController(); + return Effect.tryPromise(async () => { + const q = claudeQuery({ + prompt: ".", + options: { + persistSession: false, + pathToClaudeCodeExecutable: binaryPath, + abortController: abort, + maxTurns: 0, + settingSources: [], + allowedTools: [], + stderr: () => {}, + }, + }); + const init = await q.initializationResult(); + return { subscriptionType: init.account?.subscriptionType }; + }).pipe( + Effect.ensuring( + Effect.sync(() => { + if (!abort.signal.aborted) abort.abort(); + }), + ), + Effect.timeoutOption(CAPABILITIES_PROBE_TIMEOUT_MS), + Effect.result, + Effect.map((result) => { + if (Result.isFailure(result)) return undefined; + return Option.isSome(result.success) ? result.success.value : undefined; + }), + ); +}; + +const runClaudeCommand = Effect.fn("runClaudeCommand")(function* (args: ReadonlyArray) { + const claudeSettings = yield* Effect.service(ServerSettingsService).pipe( + Effect.flatMap((service) => service.getSettings), + Effect.map((settings) => settings.providers.claudeAgent), + ); + const command = ChildProcess.make(claudeSettings.binaryPath, [...args], { + shell: process.platform === "win32", + }); + return yield* spawnAndCollect(claudeSettings.binaryPath, command); +}); + +export const checkClaudeProviderStatus = Effect.fn("checkClaudeProviderStatus")(function* ( + resolveSubscriptionType?: (binaryPath: string) => Effect.Effect, +) { + const claudeSettings = yield* Effect.service(ServerSettingsService).pipe( + Effect.flatMap((service) => service.getSettings), + Effect.map((settings) => settings.providers.claudeAgent), + ); + const checkedAt = new Date().toISOString(); + const models = providerModelsFromSettings(BUILT_IN_MODELS, PROVIDER, claudeSettings.customModels); + + if (!claudeSettings.enabled) { + return buildServerProvider({ + provider: PROVIDER, + enabled: false, + checkedAt, + models, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "Claude is disabled in T3 Code settings.", + }, + }); + } + + const versionProbe = yield* runClaudeCommand(["--version"]).pipe( + Effect.timeoutOption(DEFAULT_TIMEOUT_MS), + Effect.result, + ); + + if (Result.isFailure(versionProbe)) { + const error = versionProbe.failure; + return buildServerProvider({ + provider: PROVIDER, + enabled: claudeSettings.enabled, + checkedAt, + models, + probe: { + installed: !isCommandMissingCause(error), + version: null, + status: "error", + auth: { status: "unknown" }, + message: isCommandMissingCause(error) + ? "Claude Agent CLI (`claude`) is not installed or not on PATH." + : `Failed to execute Claude Agent CLI health check: ${error instanceof Error ? error.message : String(error)}.`, + }, + }); + } + + if (Option.isNone(versionProbe.success)) { + return buildServerProvider({ + provider: PROVIDER, + enabled: claudeSettings.enabled, + checkedAt, + models, + probe: { + installed: true, + version: null, + status: "error", + auth: { status: "unknown" }, + message: + "Claude Agent CLI is installed but failed to run. Timed out while running command.", + }, + }); + } + + const version = versionProbe.success.value; + const parsedVersion = parseGenericCliVersion(`${version.stdout}\n${version.stderr}`); + if (version.code !== 0) { + const detail = detailFromResult(version); + return buildServerProvider({ + provider: PROVIDER, + enabled: claudeSettings.enabled, + checkedAt, + models, + probe: { + installed: true, + version: parsedVersion, + status: "error", + auth: { status: "unknown" }, + message: detail + ? `Claude Agent CLI is installed but failed to run. ${detail}` + : "Claude Agent CLI is installed but failed to run.", + }, + }); + } + + // ── Auth check + subscription detection ──────────────────────────── + + const authProbe = yield* runClaudeCommand(["auth", "status"]).pipe( + Effect.timeoutOption(DEFAULT_TIMEOUT_MS), + Effect.result, + ); + + // Determine subscription type from multiple sources (cheapest first): + // 1. `claude auth status` JSON output (may or may not contain it) + // 2. Cached SDK probe (spawns a Claude process on miss, reads + // `initializationResult()` for account metadata, then aborts + // immediately — no API tokens are consumed) + + let subscriptionType: string | undefined; + let authMethod: string | undefined; + + if (Result.isSuccess(authProbe) && Option.isSome(authProbe.success)) { + subscriptionType = extractSubscriptionTypeFromOutput(authProbe.success.value); + authMethod = extractClaudeAuthMethodFromOutput(authProbe.success.value); + } + + if (!subscriptionType && resolveSubscriptionType) { + subscriptionType = yield* resolveSubscriptionType(claudeSettings.binaryPath); + } + + const resolvedModels = adjustModelsForSubscription(models, subscriptionType); + + // ── Handle auth results (same logic as before, adjusted models) ── + + if (Result.isFailure(authProbe)) { + const error = authProbe.failure; + return buildServerProvider({ + provider: PROVIDER, + enabled: claudeSettings.enabled, + checkedAt, + models: resolvedModels, + probe: { + installed: true, + version: parsedVersion, + status: "warning", + auth: { status: "unknown" }, + message: + error instanceof Error + ? `Could not verify Claude authentication status: ${error.message}.` + : "Could not verify Claude authentication status.", + }, + }); + } + + if (Option.isNone(authProbe.success)) { + return buildServerProvider({ + provider: PROVIDER, + enabled: claudeSettings.enabled, + checkedAt, + models: resolvedModels, + probe: { + installed: true, + version: parsedVersion, + status: "warning", + auth: { status: "unknown" }, + message: "Could not verify Claude authentication status. Timed out while running command.", + }, + }); + } + + const parsed = parseClaudeAuthStatusFromOutput(authProbe.success.value); + const authMetadata = claudeAuthMetadata({ subscriptionType, authMethod }); + return buildServerProvider({ + provider: PROVIDER, + enabled: claudeSettings.enabled, + checkedAt, + models: resolvedModels, + probe: { + installed: true, + version: parsedVersion, + status: parsed.status, + auth: { + ...parsed.auth, + ...(authMetadata ? authMetadata : {}), + }, + ...(parsed.message ? { message: parsed.message } : {}), + }, + }); +}); diff --git a/apps/server/src/provider/Layers/ClaudeProvider.ts b/apps/server/src/provider/Layers/ClaudeProvider.ts index 761b795fe5..6759d9ca84 100644 --- a/apps/server/src/provider/Layers/ClaudeProvider.ts +++ b/apps/server/src/provider/Layers/ClaudeProvider.ts @@ -1,619 +1,17 @@ -import type { - ClaudeSettings, - ModelCapabilities, - ServerProvider, - ServerProviderModel, - ServerProviderAuth, - ServerProviderState, -} from "@t3tools/contracts"; -import { Cache, Duration, Effect, Equal, Layer, Option, Result, Schema, Stream } from "effect"; -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; -import { decodeJsonResult } from "@t3tools/shared/schemaJson"; -import { query as claudeQuery } from "@anthropic-ai/claude-agent-sdk"; +import type { ClaudeSettings } from "@t3tools/contracts"; +import { Cache, Duration, Effect, Equal, Layer, Stream } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; -import { - buildServerProvider, - DEFAULT_TIMEOUT_MS, - detailFromResult, - extractAuthBoolean, - isCommandMissingCause, - parseGenericCliVersion, - providerModelsFromSettings, - spawnAndCollect, - type CommandResult, -} from "../providerSnapshot"; import { makeManagedServerProvider } from "../makeManagedServerProvider"; import { ClaudeProvider } from "../Services/ClaudeProvider"; import { ServerSettingsService } from "../../serverSettings"; -import { ServerSettingsError } from "@t3tools/contracts"; - -const PROVIDER = "claudeAgent" as const; -const BUILT_IN_MODELS: ReadonlyArray = [ - { - slug: "claude-opus-4-6", - name: "Claude Opus 4.6", - isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "low", label: "Low" }, - { value: "medium", label: "Medium" }, - { value: "high", label: "High", isDefault: true }, - { value: "max", label: "Max" }, - { value: "ultrathink", label: "Ultrathink" }, - ], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [ - { value: "200k", label: "200k", isDefault: true }, - { value: "1m", label: "1M" }, - ], - promptInjectedEffortLevels: ["ultrathink"], - } satisfies ModelCapabilities, - }, - { - slug: "claude-sonnet-4-6", - name: "Claude Sonnet 4.6", - isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "low", label: "Low" }, - { value: "medium", label: "Medium" }, - { value: "high", label: "High", isDefault: true }, - { value: "ultrathink", label: "Ultrathink" }, - ], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [ - { value: "200k", label: "200k", isDefault: true }, - { value: "1m", label: "1M" }, - ], - promptInjectedEffortLevels: ["ultrathink"], - } satisfies ModelCapabilities, - }, - { - slug: "claude-haiku-4-5", - name: "Claude Haiku 4.5", - isCustom: false, - capabilities: { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: true, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - } satisfies ModelCapabilities, - }, -]; - -export function getClaudeModelCapabilities(model: string | null | undefined): ModelCapabilities { - const slug = model?.trim(); - return ( - BUILT_IN_MODELS.find((candidate) => candidate.slug === slug)?.capabilities ?? { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - } - ); -} - -export function parseClaudeAuthStatusFromOutput(result: CommandResult): { - readonly status: Exclude; - readonly auth: Pick; - readonly message?: string; -} { - const lowerOutput = `${result.stdout}\n${result.stderr}`.toLowerCase(); - - if ( - lowerOutput.includes("unknown command") || - lowerOutput.includes("unrecognized command") || - lowerOutput.includes("unexpected argument") - ) { - return { - status: "warning", - auth: { status: "unknown" }, - message: - "Claude Agent authentication status command is unavailable in this version of Claude.", - }; - } - - if ( - lowerOutput.includes("not logged in") || - lowerOutput.includes("login required") || - lowerOutput.includes("authentication required") || - lowerOutput.includes("run `claude login`") || - lowerOutput.includes("run claude login") - ) { - return { - status: "error", - auth: { status: "unauthenticated" }, - message: "Claude is not authenticated. Run `claude auth login` and try again.", - }; - } - - const parsedAuth = (() => { - const trimmed = result.stdout.trim(); - if (!trimmed || (!trimmed.startsWith("{") && !trimmed.startsWith("["))) { - return { attemptedJsonParse: false as const, auth: undefined as boolean | undefined }; - } - try { - return { - attemptedJsonParse: true as const, - auth: extractAuthBoolean(JSON.parse(trimmed)), - }; - } catch { - return { attemptedJsonParse: false as const, auth: undefined as boolean | undefined }; - } - })(); - - if (parsedAuth.auth === true) { - return { status: "ready", auth: { status: "authenticated" } }; - } - if (parsedAuth.auth === false) { - return { - status: "error", - auth: { status: "unauthenticated" }, - message: "Claude is not authenticated. Run `claude auth login` and try again.", - }; - } - if (parsedAuth.attemptedJsonParse) { - return { - status: "warning", - auth: { status: "unknown" }, - message: - "Could not verify Claude authentication status from JSON output (missing auth marker).", - }; - } - if (result.code === 0) { - return { status: "ready", auth: { status: "authenticated" } }; - } - - const detail = detailFromResult(result); - return { - status: "warning", - auth: { status: "unknown" }, - message: detail - ? `Could not verify Claude authentication status. ${detail}` - : "Could not verify Claude authentication status.", - }; -} - -// ── Subscription type detection ───────────────────────────────────── -// -// The SDK probe returns typed `AccountInfo.subscriptionType` directly. -// This walker is a best-effort fallback for the `claude auth status` -// JSON output whose shape is not guaranteed. - -/** Keys that directly hold a subscription/plan identifier. */ -const SUBSCRIPTION_TYPE_KEYS = [ - "subscriptionType", - "subscription_type", - "plan", - "tier", - "planType", - "plan_type", -] as const; - -/** Keys whose value may be a nested object containing subscription info. */ -const SUBSCRIPTION_CONTAINER_KEYS = ["account", "subscription", "user", "billing"] as const; -const AUTH_METHOD_KEYS = ["authMethod", "auth_method"] as const; -const AUTH_METHOD_CONTAINER_KEYS = ["auth", "account", "session"] as const; - -/** Lift an unknown value into `Option` if it is a non-empty string. */ -const asNonEmptyString = (v: unknown): Option.Option => - typeof v === "string" && v.length > 0 ? Option.some(v) : Option.none(); - -/** Lift an unknown value into `Option` if it is a plain object. */ -const asRecord = (v: unknown): Option.Option> => - typeof v === "object" && v !== null && !globalThis.Array.isArray(v) - ? Option.some(v as Record) - : Option.none(); - -/** - * Walk an unknown parsed JSON value looking for a subscription/plan - * identifier, returning the first match as an `Option`. - */ -function findSubscriptionType(value: unknown): Option.Option { - if (globalThis.Array.isArray(value)) { - return Option.firstSomeOf(value.map(findSubscriptionType)); - } - - return asRecord(value).pipe( - Option.flatMap((record) => { - const direct = Option.firstSomeOf( - SUBSCRIPTION_TYPE_KEYS.map((key) => asNonEmptyString(record[key])), - ); - if (Option.isSome(direct)) return direct; - - return Option.firstSomeOf( - SUBSCRIPTION_CONTAINER_KEYS.map((key) => - asRecord(record[key]).pipe(Option.flatMap(findSubscriptionType)), - ), - ); - }), - ); -} - -function findAuthMethod(value: unknown): Option.Option { - if (globalThis.Array.isArray(value)) { - return Option.firstSomeOf(value.map(findAuthMethod)); - } - - return asRecord(value).pipe( - Option.flatMap((record) => { - const direct = Option.firstSomeOf( - AUTH_METHOD_KEYS.map((key) => asNonEmptyString(record[key])), - ); - if (Option.isSome(direct)) return direct; - - return Option.firstSomeOf( - AUTH_METHOD_CONTAINER_KEYS.map((key) => - asRecord(record[key]).pipe(Option.flatMap(findAuthMethod)), - ), - ); - }), - ); -} - -/** - * Try to extract a subscription type from the `claude auth status` JSON - * output. This is a zero-cost operation on data we already have. - */ -const decodeUnknownJson = decodeJsonResult(Schema.Unknown); - -function extractSubscriptionTypeFromOutput(result: CommandResult): string | undefined { - const parsed = decodeUnknownJson(result.stdout.trim()); - if (Result.isFailure(parsed)) return undefined; - return Option.getOrUndefined(findSubscriptionType(parsed.success)); -} - -function extractClaudeAuthMethodFromOutput(result: CommandResult): string | undefined { - const parsed = decodeUnknownJson(result.stdout.trim()); - if (Result.isFailure(parsed)) return undefined; - return Option.getOrUndefined(findAuthMethod(parsed.success)); -} - -// ── Dynamic model capability adjustment ───────────────────────────── - -/** Subscription types where the 1M context window is included in the plan. */ -const PREMIUM_SUBSCRIPTION_TYPES = new Set([ - "max", - "maxplan", - "max5", - "max20", - "enterprise", - "team", -]); - -function toTitleCaseWords(value: string): string { - return value - .split(/[\s_-]+/g) - .filter(Boolean) - .map((part) => part[0]!.toUpperCase() + part.slice(1).toLowerCase()) - .join(" "); -} - -function claudeSubscriptionLabel(subscriptionType: string | undefined): string | undefined { - const normalized = subscriptionType?.toLowerCase().replace(/[\s_-]+/g, ""); - if (!normalized) return undefined; - - switch (normalized) { - case "max": - case "maxplan": - case "max5": - case "max20": - return "Max"; - case "enterprise": - return "Enterprise"; - case "team": - return "Team"; - case "pro": - return "Pro"; - case "free": - return "Free"; - default: - return toTitleCaseWords(subscriptionType!); - } -} - -function normalizeClaudeAuthMethod(authMethod: string | undefined): string | undefined { - const normalized = authMethod?.toLowerCase().replace(/[\s_-]+/g, ""); - if (!normalized) return undefined; - if (normalized === "apikey") return "apiKey"; - return undefined; -} - -function claudeAuthMetadata(input: { - readonly subscriptionType: string | undefined; - readonly authMethod: string | undefined; -}): { readonly type: string; readonly label: string } | undefined { - if (normalizeClaudeAuthMethod(input.authMethod) === "apiKey") { - return { - type: "apiKey", - label: "Claude API Key", - }; - } - - if (input.subscriptionType) { - const subscriptionLabel = claudeSubscriptionLabel(input.subscriptionType); - return { - type: input.subscriptionType, - label: `Claude ${subscriptionLabel ?? toTitleCaseWords(input.subscriptionType)} Subscription`, - }; - } - - return undefined; -} - -/** - * Adjust the built-in model list based on the user's detected subscription. - * - * - Premium tiers (Max, Enterprise, Team): 1M context becomes the default. - * - Other tiers (Pro, free, unknown): 200k context stays the default; - * 1M remains available as a manual option so users can still enable it. - */ -export function adjustModelsForSubscription( - baseModels: ReadonlyArray, - subscriptionType: string | undefined, -): ReadonlyArray { - const normalized = subscriptionType?.toLowerCase().replace(/[\s_-]+/g, ""); - if (!normalized || !PREMIUM_SUBSCRIPTION_TYPES.has(normalized)) { - return baseModels; - } - - // Flip 1M to be the default for premium users - return baseModels.map((model) => { - const caps = model.capabilities; - if (!caps || caps.contextWindowOptions.length === 0) return model; - - return { - ...model, - capabilities: { - ...caps, - contextWindowOptions: caps.contextWindowOptions.map((opt) => - opt.value === "1m" - ? { value: opt.value, label: opt.label, isDefault: true as const } - : { value: opt.value, label: opt.label }, - ), - }, - }; - }); -} - -// ── SDK capability probe ──────────────────────────────────────────── - -const CAPABILITIES_PROBE_TIMEOUT_MS = 8_000; - -/** - * Probe account information by spawning a lightweight Claude Agent SDK - * session and reading the initialization result. - * - * The prompt is never sent to the Anthropic API — we abort immediately - * after the local initialization phase completes. This gives us the - * user's subscription type without incurring any token cost. - * - * This is used as a fallback when `claude auth status` does not include - * subscription type information. - */ -const probeClaudeCapabilities = (binaryPath: string) => { - const abort = new AbortController(); - return Effect.tryPromise(async () => { - const q = claudeQuery({ - prompt: ".", - options: { - persistSession: false, - pathToClaudeCodeExecutable: binaryPath, - abortController: abort, - maxTurns: 0, - settingSources: [], - allowedTools: [], - stderr: () => {}, - }, - }); - const init = await q.initializationResult(); - return { subscriptionType: init.account?.subscriptionType }; - }).pipe( - Effect.ensuring( - Effect.sync(() => { - if (!abort.signal.aborted) abort.abort(); - }), - ), - Effect.timeoutOption(CAPABILITIES_PROBE_TIMEOUT_MS), - Effect.result, - Effect.map((result) => { - if (Result.isFailure(result)) return undefined; - return Option.isSome(result.success) ? result.success.value : undefined; - }), - ); -}; - -const runClaudeCommand = Effect.fn("runClaudeCommand")(function* (args: ReadonlyArray) { - const claudeSettings = yield* Effect.service(ServerSettingsService).pipe( - Effect.flatMap((service) => service.getSettings), - Effect.map((settings) => settings.providers.claudeAgent), - ); - const command = ChildProcess.make(claudeSettings.binaryPath, [...args], { - shell: process.platform === "win32", - }); - return yield* spawnAndCollect(claudeSettings.binaryPath, command); -}); - -export const checkClaudeProviderStatus = Effect.fn("checkClaudeProviderStatus")(function* ( - resolveSubscriptionType?: (binaryPath: string) => Effect.Effect, -): Effect.fn.Return< - ServerProvider, - ServerSettingsError, - ChildProcessSpawner.ChildProcessSpawner | ServerSettingsService -> { - const claudeSettings = yield* Effect.service(ServerSettingsService).pipe( - Effect.flatMap((service) => service.getSettings), - Effect.map((settings) => settings.providers.claudeAgent), - ); - const checkedAt = new Date().toISOString(); - const models = providerModelsFromSettings(BUILT_IN_MODELS, PROVIDER, claudeSettings.customModels); - - if (!claudeSettings.enabled) { - return buildServerProvider({ - provider: PROVIDER, - enabled: false, - checkedAt, - models, - probe: { - installed: false, - version: null, - status: "warning", - auth: { status: "unknown" }, - message: "Claude is disabled in T3 Code settings.", - }, - }); - } - - const versionProbe = yield* runClaudeCommand(["--version"]).pipe( - Effect.timeoutOption(DEFAULT_TIMEOUT_MS), - Effect.result, - ); - - if (Result.isFailure(versionProbe)) { - const error = versionProbe.failure; - return buildServerProvider({ - provider: PROVIDER, - enabled: claudeSettings.enabled, - checkedAt, - models, - probe: { - installed: !isCommandMissingCause(error), - version: null, - status: "error", - auth: { status: "unknown" }, - message: isCommandMissingCause(error) - ? "Claude Agent CLI (`claude`) is not installed or not on PATH." - : `Failed to execute Claude Agent CLI health check: ${error instanceof Error ? error.message : String(error)}.`, - }, - }); - } - - if (Option.isNone(versionProbe.success)) { - return buildServerProvider({ - provider: PROVIDER, - enabled: claudeSettings.enabled, - checkedAt, - models, - probe: { - installed: true, - version: null, - status: "error", - auth: { status: "unknown" }, - message: - "Claude Agent CLI is installed but failed to run. Timed out while running command.", - }, - }); - } - - const version = versionProbe.success.value; - const parsedVersion = parseGenericCliVersion(`${version.stdout}\n${version.stderr}`); - if (version.code !== 0) { - const detail = detailFromResult(version); - return buildServerProvider({ - provider: PROVIDER, - enabled: claudeSettings.enabled, - checkedAt, - models, - probe: { - installed: true, - version: parsedVersion, - status: "error", - auth: { status: "unknown" }, - message: detail - ? `Claude Agent CLI is installed but failed to run. ${detail}` - : "Claude Agent CLI is installed but failed to run.", - }, - }); - } - - // ── Auth check + subscription detection ──────────────────────────── - - const authProbe = yield* runClaudeCommand(["auth", "status"]).pipe( - Effect.timeoutOption(DEFAULT_TIMEOUT_MS), - Effect.result, - ); - - // Determine subscription type from multiple sources (cheapest first): - // 1. `claude auth status` JSON output (may or may not contain it) - // 2. Cached SDK probe (spawns a Claude process on miss, reads - // `initializationResult()` for account metadata, then aborts - // immediately — no API tokens are consumed) - - let subscriptionType: string | undefined; - let authMethod: string | undefined; - - if (Result.isSuccess(authProbe) && Option.isSome(authProbe.success)) { - subscriptionType = extractSubscriptionTypeFromOutput(authProbe.success.value); - authMethod = extractClaudeAuthMethodFromOutput(authProbe.success.value); - } - - if (!subscriptionType && resolveSubscriptionType) { - subscriptionType = yield* resolveSubscriptionType(claudeSettings.binaryPath); - } - - const resolvedModels = adjustModelsForSubscription(models, subscriptionType); - - // ── Handle auth results (same logic as before, adjusted models) ── - - if (Result.isFailure(authProbe)) { - const error = authProbe.failure; - return buildServerProvider({ - provider: PROVIDER, - enabled: claudeSettings.enabled, - checkedAt, - models: resolvedModels, - probe: { - installed: true, - version: parsedVersion, - status: "warning", - auth: { status: "unknown" }, - message: - error instanceof Error - ? `Could not verify Claude authentication status: ${error.message}.` - : "Could not verify Claude authentication status.", - }, - }); - } - - if (Option.isNone(authProbe.success)) { - return buildServerProvider({ - provider: PROVIDER, - enabled: claudeSettings.enabled, - checkedAt, - models: resolvedModels, - probe: { - installed: true, - version: parsedVersion, - status: "warning", - auth: { status: "unknown" }, - message: "Could not verify Claude authentication status. Timed out while running command.", - }, - }); - } +import { + checkClaudeProviderStatus, + getClaudeModelCapabilities, + probeClaudeCapabilities, +} from "./ClaudeProvider.logic"; - const parsed = parseClaudeAuthStatusFromOutput(authProbe.success.value); - const authMetadata = claudeAuthMetadata({ subscriptionType, authMethod }); - return buildServerProvider({ - provider: PROVIDER, - enabled: claudeSettings.enabled, - checkedAt, - models: resolvedModels, - probe: { - installed: true, - version: parsedVersion, - status: parsed.status, - auth: { - ...parsed.auth, - ...(authMetadata ? authMetadata : {}), - }, - ...(parsed.message ? { message: parsed.message } : {}), - }, - }); -}); +export { getClaudeModelCapabilities }; export const ClaudeProviderLive = Layer.effect( ClaudeProvider, diff --git a/apps/server/src/provider/Layers/CodexAdapter.test.ts b/apps/server/src/provider/Layers/CodexAdapter.test.ts index b5eb873e85..91b74c8aa9 100644 --- a/apps/server/src/provider/Layers/CodexAdapter.test.ts +++ b/apps/server/src/provider/Layers/CodexAdapter.test.ts @@ -19,8 +19,8 @@ import { Effect, Fiber, Layer, Option, Stream } from "effect"; import { CodexAppServerManager, type CodexAppServerStartSessionInput, - type CodexAppServerSendTurnInput, } from "../../codexAppServerManager.ts"; +import type { CodexAppServerSendTurnInput } from "../../codexAppServerManager.shared.ts"; import { ServerConfig } from "../../config.ts"; import { ServerSettingsService } from "../../serverSettings.ts"; import { ProviderAdapterValidationError } from "../Errors.ts"; diff --git a/apps/server/src/provider/Layers/CodexAdapter.ts b/apps/server/src/provider/Layers/CodexAdapter.ts index cee6bca6ed..d3ad35df88 100644 --- a/apps/server/src/provider/Layers/CodexAdapter.ts +++ b/apps/server/src/provider/Layers/CodexAdapter.ts @@ -44,7 +44,7 @@ import { type EventNdjsonLogger, makeEventNdjsonLogger } from "./EventNdjsonLogg const PROVIDER = "codex" as const; -export interface CodexAdapterLiveOptions { +interface CodexAdapterLiveOptions { readonly manager?: CodexAppServerManager; readonly makeManager?: (services?: ServiceMap.ServiceMap) => CodexAppServerManager; readonly nativeEventLogPath?: string; @@ -1634,9 +1634,6 @@ const makeCodexAdapter = Effect.fn("makeCodexAdapter")(function* ( streamEvents: Stream.fromQueue(runtimeEventQueue), } satisfies CodexAdapterShape; }); - -export const CodexAdapterLive = Layer.effect(CodexAdapter, makeCodexAdapter()); - export function makeCodexAdapterLive(options?: CodexAdapterLiveOptions) { return Layer.effect(CodexAdapter, makeCodexAdapter(options)); } diff --git a/apps/server/src/provider/Layers/CodexProvider.shared.ts b/apps/server/src/provider/Layers/CodexProvider.shared.ts new file mode 100644 index 0000000000..81c8b90609 --- /dev/null +++ b/apps/server/src/provider/Layers/CodexProvider.shared.ts @@ -0,0 +1,509 @@ +import * as OS from "node:os"; +import type { + ModelCapabilities, + ServerProvider, + ServerProviderAuth, + ServerProviderModel, + ServerProviderState, +} from "@t3tools/contracts"; +import { Effect, FileSystem, Option, Path, Result } from "effect"; +import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; + +import { + buildServerProvider, + DEFAULT_TIMEOUT_MS, + detailFromResult, + extractAuthBoolean, + isCommandMissingCause, + parseGenericCliVersion, + providerModelsFromSettings, + spawnAndCollect, + type CommandResult, +} from "../providerSnapshot"; +import { + formatCodexCliUpgradeMessage, + isCodexCliVersionSupported, + parseCodexCliVersion, +} from "../codexCliVersion"; +import { + adjustCodexModelsForAccount, + codexAuthSubLabel, + codexAuthSubType, + type CodexAccountSnapshot, +} from "../codexAccount"; +import { probeCodexAccount } from "../codexAppServer"; +import { ServerSettingsService } from "../../serverSettings"; +import { ServerSettingsError } from "@t3tools/contracts"; + +const PROVIDER = "codex" as const; +const OPENAI_AUTH_PROVIDERS = new Set(["openai"]); +const BUILT_IN_MODELS: ReadonlyArray = [ + { + slug: "gpt-5.4", + name: "GPT-5.4", + isCustom: false, + capabilities: { + reasoningEffortLevels: [ + { value: "xhigh", label: "Extra High" }, + { value: "high", label: "High", isDefault: true }, + { value: "medium", label: "Medium" }, + { value: "low", label: "Low" }, + ], + supportsFastMode: true, + supportsThinkingToggle: false, + contextWindowOptions: [], + promptInjectedEffortLevels: [], + }, + }, + { + slug: "gpt-5.4-mini", + name: "GPT-5.4 Mini", + isCustom: false, + capabilities: { + reasoningEffortLevels: [ + { value: "xhigh", label: "Extra High" }, + { value: "high", label: "High", isDefault: true }, + { value: "medium", label: "Medium" }, + { value: "low", label: "Low" }, + ], + supportsFastMode: true, + supportsThinkingToggle: false, + contextWindowOptions: [], + promptInjectedEffortLevels: [], + }, + }, + { + slug: "gpt-5.3-codex", + name: "GPT-5.3 Codex", + isCustom: false, + capabilities: { + reasoningEffortLevels: [ + { value: "xhigh", label: "Extra High" }, + { value: "high", label: "High", isDefault: true }, + { value: "medium", label: "Medium" }, + { value: "low", label: "Low" }, + ], + supportsFastMode: true, + supportsThinkingToggle: false, + contextWindowOptions: [], + promptInjectedEffortLevels: [], + }, + }, + { + slug: "gpt-5.3-codex-spark", + name: "GPT-5.3 Codex Spark", + isCustom: false, + capabilities: { + reasoningEffortLevels: [ + { value: "xhigh", label: "Extra High" }, + { value: "high", label: "High", isDefault: true }, + { value: "medium", label: "Medium" }, + { value: "low", label: "Low" }, + ], + supportsFastMode: true, + supportsThinkingToggle: false, + contextWindowOptions: [], + promptInjectedEffortLevels: [], + }, + }, + { + slug: "gpt-5.2-codex", + name: "GPT-5.2 Codex", + isCustom: false, + capabilities: { + reasoningEffortLevels: [ + { value: "xhigh", label: "Extra High" }, + { value: "high", label: "High", isDefault: true }, + { value: "medium", label: "Medium" }, + { value: "low", label: "Low" }, + ], + supportsFastMode: true, + supportsThinkingToggle: false, + contextWindowOptions: [], + promptInjectedEffortLevels: [], + }, + }, + { + slug: "gpt-5.2", + name: "GPT-5.2", + isCustom: false, + capabilities: { + reasoningEffortLevels: [ + { value: "xhigh", label: "Extra High" }, + { value: "high", label: "High", isDefault: true }, + { value: "medium", label: "Medium" }, + { value: "low", label: "Low" }, + ], + supportsFastMode: true, + supportsThinkingToggle: false, + contextWindowOptions: [], + promptInjectedEffortLevels: [], + }, + }, +]; + +export function getCodexModelCapabilities(model: string | null | undefined): ModelCapabilities { + const slug = model?.trim(); + return ( + BUILT_IN_MODELS.find((candidate) => candidate.slug === slug)?.capabilities ?? { + reasoningEffortLevels: [], + supportsFastMode: false, + supportsThinkingToggle: false, + contextWindowOptions: [], + promptInjectedEffortLevels: [], + } + ); +} + +function parseAuthStatusFromOutput(result: CommandResult): { + readonly status: Exclude; + readonly auth: Pick; + readonly message?: string; +} { + const lowerOutput = `${result.stdout}\n${result.stderr}`.toLowerCase(); + + if ( + lowerOutput.includes("unknown command") || + lowerOutput.includes("unrecognized command") || + lowerOutput.includes("unexpected argument") + ) { + return { + status: "warning", + auth: { status: "unknown" }, + message: "Codex CLI authentication status command is unavailable in this Codex version.", + }; + } + + if ( + lowerOutput.includes("not logged in") || + lowerOutput.includes("login required") || + lowerOutput.includes("authentication required") || + lowerOutput.includes("run `codex login`") || + lowerOutput.includes("run codex login") + ) { + return { + status: "error", + auth: { status: "unauthenticated" }, + message: "Codex CLI is not authenticated. Run `codex login` and try again.", + }; + } + + const parsedAuth = (() => { + const trimmed = result.stdout.trim(); + if (!trimmed || (!trimmed.startsWith("{") && !trimmed.startsWith("["))) { + return { attemptedJsonParse: false as const, auth: undefined as boolean | undefined }; + } + try { + return { + attemptedJsonParse: true as const, + auth: extractAuthBoolean(JSON.parse(trimmed)), + }; + } catch { + return { attemptedJsonParse: false as const, auth: undefined as boolean | undefined }; + } + })(); + + if (parsedAuth.auth === true) { + return { status: "ready", auth: { status: "authenticated" } }; + } + if (parsedAuth.auth === false) { + return { + status: "error", + auth: { status: "unauthenticated" }, + message: "Codex CLI is not authenticated. Run `codex login` and try again.", + }; + } + if (parsedAuth.attemptedJsonParse) { + return { + status: "warning", + auth: { status: "unknown" }, + message: + "Could not verify Codex authentication status from JSON output (missing auth marker).", + }; + } + if (result.code === 0) { + return { status: "ready", auth: { status: "authenticated" } }; + } + + const detail = detailFromResult(result); + return { + status: "warning", + auth: { status: "unknown" }, + message: detail + ? `Could not verify Codex authentication status. ${detail}` + : "Could not verify Codex authentication status.", + }; +} + +const readCodexConfigModelProvider = Effect.fn("readCodexConfigModelProvider")(function* () { + const fileSystem = yield* FileSystem.FileSystem; + const path = yield* Path.Path; + const settingsService = yield* ServerSettingsService; + const codexHome = yield* settingsService.getSettings.pipe( + Effect.map( + (settings) => + settings.providers.codex.homePath || + process.env.CODEX_HOME || + path.join(OS.homedir(), ".codex"), + ), + ); + const configPath = path.join(codexHome, "config.toml"); + + const content = yield* fileSystem + .readFileString(configPath) + .pipe(Effect.orElseSucceed(() => undefined)); + if (content === undefined) { + return undefined; + } + + let inTopLevel = true; + for (const line of content.split("\n")) { + const trimmed = line.trim(); + if (!trimmed || trimmed.startsWith("#")) continue; + if (trimmed.startsWith("[")) { + inTopLevel = false; + continue; + } + if (!inTopLevel) continue; + + const match = trimmed.match(/^model_provider\s*=\s*["']([^"']+)["']/); + if (match) return match[1]; + } + return undefined; +}); + +const hasCustomModelProvider = readCodexConfigModelProvider().pipe( + Effect.map((provider) => provider !== undefined && !OPENAI_AUTH_PROVIDERS.has(provider)), + Effect.orElseSucceed(() => false), +); + +const CAPABILITIES_PROBE_TIMEOUT_MS = 8_000; + +export const probeCodexCapabilities = (input: { + readonly binaryPath: string; + readonly homePath?: string; +}) => + Effect.tryPromise((signal) => probeCodexAccount({ ...input, signal })).pipe( + Effect.timeoutOption(CAPABILITIES_PROBE_TIMEOUT_MS), + Effect.result, + Effect.map((result) => { + if (Result.isFailure(result)) return undefined; + return Option.isSome(result.success) ? result.success.value : undefined; + }), + ); + +const runCodexCommand = Effect.fn("runCodexCommand")(function* (args: ReadonlyArray) { + const settingsService = yield* ServerSettingsService; + const codexSettings = yield* settingsService.getSettings.pipe( + Effect.map((settings) => settings.providers.codex), + ); + const command = ChildProcess.make(codexSettings.binaryPath, [...args], { + shell: process.platform === "win32", + env: { + ...process.env, + ...(codexSettings.homePath ? { CODEX_HOME: codexSettings.homePath } : {}), + }, + }); + return yield* spawnAndCollect(codexSettings.binaryPath, command); +}); + +export const checkCodexProviderStatus = Effect.fn("checkCodexProviderStatus")(function* ( + resolveAccount?: (input: { + readonly binaryPath: string; + readonly homePath?: string; + }) => Effect.Effect, +): Effect.fn.Return< + ServerProvider, + ServerSettingsError, + | ChildProcessSpawner.ChildProcessSpawner + | FileSystem.FileSystem + | Path.Path + | ServerSettingsService +> { + const codexSettings = yield* Effect.service(ServerSettingsService).pipe( + Effect.flatMap((service) => service.getSettings), + Effect.map((settings) => settings.providers.codex), + ); + const checkedAt = new Date().toISOString(); + const models = providerModelsFromSettings(BUILT_IN_MODELS, PROVIDER, codexSettings.customModels); + + if (!codexSettings.enabled) { + return buildServerProvider({ + provider: PROVIDER, + enabled: false, + checkedAt, + models, + probe: { + installed: false, + version: null, + status: "warning", + auth: { status: "unknown" }, + message: "Codex is disabled in T3 Code settings.", + }, + }); + } + + const versionProbe = yield* runCodexCommand(["--version"]).pipe( + Effect.timeoutOption(DEFAULT_TIMEOUT_MS), + Effect.result, + ); + + if (Result.isFailure(versionProbe)) { + const error = versionProbe.failure; + return buildServerProvider({ + provider: PROVIDER, + enabled: codexSettings.enabled, + checkedAt, + models, + probe: { + installed: !isCommandMissingCause(error), + version: null, + status: "error", + auth: { status: "unknown" }, + message: isCommandMissingCause(error) + ? "Codex CLI (`codex`) is not installed or not on PATH." + : `Failed to execute Codex CLI health check: ${error instanceof Error ? error.message : String(error)}.`, + }, + }); + } + + if (Option.isNone(versionProbe.success)) { + return buildServerProvider({ + provider: PROVIDER, + enabled: codexSettings.enabled, + checkedAt, + models, + probe: { + installed: true, + version: null, + status: "error", + auth: { status: "unknown" }, + message: "Codex CLI is installed but failed to run. Timed out while running command.", + }, + }); + } + + const version = versionProbe.success.value; + const parsedVersion = + parseCodexCliVersion(`${version.stdout}\n${version.stderr}`) ?? + parseGenericCliVersion(`${version.stdout}\n${version.stderr}`); + if (version.code !== 0) { + const detail = detailFromResult(version); + return buildServerProvider({ + provider: PROVIDER, + enabled: codexSettings.enabled, + checkedAt, + models, + probe: { + installed: true, + version: parsedVersion, + status: "error", + auth: { status: "unknown" }, + message: detail + ? `Codex CLI is installed but failed to run. ${detail}` + : "Codex CLI is installed but failed to run.", + }, + }); + } + + if (parsedVersion && !isCodexCliVersionSupported(parsedVersion)) { + return buildServerProvider({ + provider: PROVIDER, + enabled: codexSettings.enabled, + checkedAt, + models, + probe: { + installed: true, + version: parsedVersion, + status: "error", + auth: { status: "unknown" }, + message: formatCodexCliUpgradeMessage(parsedVersion), + }, + }); + } + + if (yield* hasCustomModelProvider) { + return buildServerProvider({ + provider: PROVIDER, + enabled: codexSettings.enabled, + checkedAt, + models, + probe: { + installed: true, + version: parsedVersion, + status: "ready", + auth: { status: "unknown" }, + message: "Using a custom Codex model provider; OpenAI login check skipped.", + }, + }); + } + + const authProbe = yield* runCodexCommand(["login", "status"]).pipe( + Effect.timeoutOption(DEFAULT_TIMEOUT_MS), + Effect.result, + ); + const account = resolveAccount + ? yield* resolveAccount({ + binaryPath: codexSettings.binaryPath, + homePath: codexSettings.homePath, + }) + : undefined; + const resolvedModels = adjustCodexModelsForAccount(models, account); + + if (Result.isFailure(authProbe)) { + const error = authProbe.failure; + return buildServerProvider({ + provider: PROVIDER, + enabled: codexSettings.enabled, + checkedAt, + models: resolvedModels, + probe: { + installed: true, + version: parsedVersion, + status: "warning", + auth: { status: "unknown" }, + message: + error instanceof Error + ? `Could not verify Codex authentication status: ${error.message}.` + : "Could not verify Codex authentication status.", + }, + }); + } + + if (Option.isNone(authProbe.success)) { + return buildServerProvider({ + provider: PROVIDER, + enabled: codexSettings.enabled, + checkedAt, + models: resolvedModels, + probe: { + installed: true, + version: parsedVersion, + status: "warning", + auth: { status: "unknown" }, + message: "Could not verify Codex authentication status. Timed out while running command.", + }, + }); + } + + const parsed = parseAuthStatusFromOutput(authProbe.success.value); + const authType = codexAuthSubType(account); + const authLabel = codexAuthSubLabel(account); + return buildServerProvider({ + provider: PROVIDER, + enabled: codexSettings.enabled, + checkedAt, + models: resolvedModels, + probe: { + installed: true, + version: parsedVersion, + status: parsed.status, + auth: { + ...parsed.auth, + ...(authType ? { type: authType } : {}), + ...(authLabel ? { label: authLabel } : {}), + }, + ...(parsed.message ? { message: parsed.message } : {}), + }, + }); +}); diff --git a/apps/server/src/provider/Layers/CodexProvider.ts b/apps/server/src/provider/Layers/CodexProvider.ts index 667bdf048b..31f6d7e726 100644 --- a/apps/server/src/provider/Layers/CodexProvider.ts +++ b/apps/server/src/provider/Layers/CodexProvider.ts @@ -1,526 +1,13 @@ -import * as OS from "node:os"; -import type { - ModelCapabilities, - CodexSettings, - ServerProvider, - ServerProviderModel, - ServerProviderAuth, - ServerProviderState, -} from "@t3tools/contracts"; -import { - Cache, - Duration, - Effect, - Equal, - FileSystem, - Layer, - Option, - Path, - Result, - Stream, -} from "effect"; -import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"; +import type { CodexSettings } from "@t3tools/contracts"; +import { Cache, Duration, Effect, Equal, FileSystem, Layer, Path, Stream } from "effect"; +import { ChildProcessSpawner } from "effect/unstable/process"; -import { - buildServerProvider, - DEFAULT_TIMEOUT_MS, - detailFromResult, - extractAuthBoolean, - isCommandMissingCause, - parseGenericCliVersion, - providerModelsFromSettings, - spawnAndCollect, - type CommandResult, -} from "../providerSnapshot"; import { makeManagedServerProvider } from "../makeManagedServerProvider"; -import { - formatCodexCliUpgradeMessage, - isCodexCliVersionSupported, - parseCodexCliVersion, -} from "../codexCliVersion"; -import { - adjustCodexModelsForAccount, - codexAuthSubLabel, - codexAuthSubType, - type CodexAccountSnapshot, -} from "../codexAccount"; -import { probeCodexAccount } from "../codexAppServer"; import { CodexProvider } from "../Services/CodexProvider"; import { ServerSettingsService } from "../../serverSettings"; -import { ServerSettingsError } from "@t3tools/contracts"; +import { checkCodexProviderStatus, probeCodexCapabilities } from "./CodexProvider.shared"; -const PROVIDER = "codex" as const; -const OPENAI_AUTH_PROVIDERS = new Set(["openai"]); -const BUILT_IN_MODELS: ReadonlyArray = [ - { - slug: "gpt-5.4", - name: "GPT-5.4", - isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "xhigh", label: "Extra High" }, - { value: "high", label: "High", isDefault: true }, - { value: "medium", label: "Medium" }, - { value: "low", label: "Low" }, - ], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, - }, - { - slug: "gpt-5.4-mini", - name: "GPT-5.4 Mini", - isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "xhigh", label: "Extra High" }, - { value: "high", label: "High", isDefault: true }, - { value: "medium", label: "Medium" }, - { value: "low", label: "Low" }, - ], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, - }, - { - slug: "gpt-5.3-codex", - name: "GPT-5.3 Codex", - isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "xhigh", label: "Extra High" }, - { value: "high", label: "High", isDefault: true }, - { value: "medium", label: "Medium" }, - { value: "low", label: "Low" }, - ], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, - }, - { - slug: "gpt-5.3-codex-spark", - name: "GPT-5.3 Codex Spark", - isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "xhigh", label: "Extra High" }, - { value: "high", label: "High", isDefault: true }, - { value: "medium", label: "Medium" }, - { value: "low", label: "Low" }, - ], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, - }, - { - slug: "gpt-5.2-codex", - name: "GPT-5.2 Codex", - isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "xhigh", label: "Extra High" }, - { value: "high", label: "High", isDefault: true }, - { value: "medium", label: "Medium" }, - { value: "low", label: "Low" }, - ], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, - }, - { - slug: "gpt-5.2", - name: "GPT-5.2", - isCustom: false, - capabilities: { - reasoningEffortLevels: [ - { value: "xhigh", label: "Extra High" }, - { value: "high", label: "High", isDefault: true }, - { value: "medium", label: "Medium" }, - { value: "low", label: "Low" }, - ], - supportsFastMode: true, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - }, - }, -]; - -export function getCodexModelCapabilities(model: string | null | undefined): ModelCapabilities { - const slug = model?.trim(); - return ( - BUILT_IN_MODELS.find((candidate) => candidate.slug === slug)?.capabilities ?? { - reasoningEffortLevels: [], - supportsFastMode: false, - supportsThinkingToggle: false, - contextWindowOptions: [], - promptInjectedEffortLevels: [], - } - ); -} - -export function parseAuthStatusFromOutput(result: CommandResult): { - readonly status: Exclude; - readonly auth: Pick; - readonly message?: string; -} { - const lowerOutput = `${result.stdout}\n${result.stderr}`.toLowerCase(); - - if ( - lowerOutput.includes("unknown command") || - lowerOutput.includes("unrecognized command") || - lowerOutput.includes("unexpected argument") - ) { - return { - status: "warning", - auth: { status: "unknown" }, - message: "Codex CLI authentication status command is unavailable in this Codex version.", - }; - } - - if ( - lowerOutput.includes("not logged in") || - lowerOutput.includes("login required") || - lowerOutput.includes("authentication required") || - lowerOutput.includes("run `codex login`") || - lowerOutput.includes("run codex login") - ) { - return { - status: "error", - auth: { status: "unauthenticated" }, - message: "Codex CLI is not authenticated. Run `codex login` and try again.", - }; - } - - const parsedAuth = (() => { - const trimmed = result.stdout.trim(); - if (!trimmed || (!trimmed.startsWith("{") && !trimmed.startsWith("["))) { - return { attemptedJsonParse: false as const, auth: undefined as boolean | undefined }; - } - try { - return { - attemptedJsonParse: true as const, - auth: extractAuthBoolean(JSON.parse(trimmed)), - }; - } catch { - return { attemptedJsonParse: false as const, auth: undefined as boolean | undefined }; - } - })(); - - if (parsedAuth.auth === true) { - return { status: "ready", auth: { status: "authenticated" } }; - } - if (parsedAuth.auth === false) { - return { - status: "error", - auth: { status: "unauthenticated" }, - message: "Codex CLI is not authenticated. Run `codex login` and try again.", - }; - } - if (parsedAuth.attemptedJsonParse) { - return { - status: "warning", - auth: { status: "unknown" }, - message: - "Could not verify Codex authentication status from JSON output (missing auth marker).", - }; - } - if (result.code === 0) { - return { status: "ready", auth: { status: "authenticated" } }; - } - - const detail = detailFromResult(result); - return { - status: "warning", - auth: { status: "unknown" }, - message: detail - ? `Could not verify Codex authentication status. ${detail}` - : "Could not verify Codex authentication status.", - }; -} - -export const readCodexConfigModelProvider = Effect.fn("readCodexConfigModelProvider")(function* () { - const fileSystem = yield* FileSystem.FileSystem; - const path = yield* Path.Path; - const settingsService = yield* ServerSettingsService; - const codexHome = yield* settingsService.getSettings.pipe( - Effect.map( - (settings) => - settings.providers.codex.homePath || - process.env.CODEX_HOME || - path.join(OS.homedir(), ".codex"), - ), - ); - const configPath = path.join(codexHome, "config.toml"); - - const content = yield* fileSystem - .readFileString(configPath) - .pipe(Effect.orElseSucceed(() => undefined)); - if (content === undefined) { - return undefined; - } - - let inTopLevel = true; - for (const line of content.split("\n")) { - const trimmed = line.trim(); - if (!trimmed || trimmed.startsWith("#")) continue; - if (trimmed.startsWith("[")) { - inTopLevel = false; - continue; - } - if (!inTopLevel) continue; - - const match = trimmed.match(/^model_provider\s*=\s*["']([^"']+)["']/); - if (match) return match[1]; - } - return undefined; -}); - -export const hasCustomModelProvider = readCodexConfigModelProvider().pipe( - Effect.map((provider) => provider !== undefined && !OPENAI_AUTH_PROVIDERS.has(provider)), - Effect.orElseSucceed(() => false), -); - -const CAPABILITIES_PROBE_TIMEOUT_MS = 8_000; - -const probeCodexCapabilities = (input: { - readonly binaryPath: string; - readonly homePath?: string; -}) => - Effect.tryPromise((signal) => probeCodexAccount({ ...input, signal })).pipe( - Effect.timeoutOption(CAPABILITIES_PROBE_TIMEOUT_MS), - Effect.result, - Effect.map((result) => { - if (Result.isFailure(result)) return undefined; - return Option.isSome(result.success) ? result.success.value : undefined; - }), - ); - -const runCodexCommand = Effect.fn("runCodexCommand")(function* (args: ReadonlyArray) { - const settingsService = yield* ServerSettingsService; - const codexSettings = yield* settingsService.getSettings.pipe( - Effect.map((settings) => settings.providers.codex), - ); - const command = ChildProcess.make(codexSettings.binaryPath, [...args], { - shell: process.platform === "win32", - env: { - ...process.env, - ...(codexSettings.homePath ? { CODEX_HOME: codexSettings.homePath } : {}), - }, - }); - return yield* spawnAndCollect(codexSettings.binaryPath, command); -}); - -export const checkCodexProviderStatus = Effect.fn("checkCodexProviderStatus")(function* ( - resolveAccount?: (input: { - readonly binaryPath: string; - readonly homePath?: string; - }) => Effect.Effect, -): Effect.fn.Return< - ServerProvider, - ServerSettingsError, - | ChildProcessSpawner.ChildProcessSpawner - | FileSystem.FileSystem - | Path.Path - | ServerSettingsService -> { - const codexSettings = yield* Effect.service(ServerSettingsService).pipe( - Effect.flatMap((service) => service.getSettings), - Effect.map((settings) => settings.providers.codex), - ); - const checkedAt = new Date().toISOString(); - const models = providerModelsFromSettings(BUILT_IN_MODELS, PROVIDER, codexSettings.customModels); - - if (!codexSettings.enabled) { - return buildServerProvider({ - provider: PROVIDER, - enabled: false, - checkedAt, - models, - probe: { - installed: false, - version: null, - status: "warning", - auth: { status: "unknown" }, - message: "Codex is disabled in T3 Code settings.", - }, - }); - } - - const versionProbe = yield* runCodexCommand(["--version"]).pipe( - Effect.timeoutOption(DEFAULT_TIMEOUT_MS), - Effect.result, - ); - - if (Result.isFailure(versionProbe)) { - const error = versionProbe.failure; - return buildServerProvider({ - provider: PROVIDER, - enabled: codexSettings.enabled, - checkedAt, - models, - probe: { - installed: !isCommandMissingCause(error), - version: null, - status: "error", - auth: { status: "unknown" }, - message: isCommandMissingCause(error) - ? "Codex CLI (`codex`) is not installed or not on PATH." - : `Failed to execute Codex CLI health check: ${error instanceof Error ? error.message : String(error)}.`, - }, - }); - } - - if (Option.isNone(versionProbe.success)) { - return buildServerProvider({ - provider: PROVIDER, - enabled: codexSettings.enabled, - checkedAt, - models, - probe: { - installed: true, - version: null, - status: "error", - auth: { status: "unknown" }, - message: "Codex CLI is installed but failed to run. Timed out while running command.", - }, - }); - } - - const version = versionProbe.success.value; - const parsedVersion = - parseCodexCliVersion(`${version.stdout}\n${version.stderr}`) ?? - parseGenericCliVersion(`${version.stdout}\n${version.stderr}`); - if (version.code !== 0) { - const detail = detailFromResult(version); - return buildServerProvider({ - provider: PROVIDER, - enabled: codexSettings.enabled, - checkedAt, - models, - probe: { - installed: true, - version: parsedVersion, - status: "error", - auth: { status: "unknown" }, - message: detail - ? `Codex CLI is installed but failed to run. ${detail}` - : "Codex CLI is installed but failed to run.", - }, - }); - } - - if (parsedVersion && !isCodexCliVersionSupported(parsedVersion)) { - return buildServerProvider({ - provider: PROVIDER, - enabled: codexSettings.enabled, - checkedAt, - models, - probe: { - installed: true, - version: parsedVersion, - status: "error", - auth: { status: "unknown" }, - message: formatCodexCliUpgradeMessage(parsedVersion), - }, - }); - } - - if (yield* hasCustomModelProvider) { - return buildServerProvider({ - provider: PROVIDER, - enabled: codexSettings.enabled, - checkedAt, - models, - probe: { - installed: true, - version: parsedVersion, - status: "ready", - auth: { status: "unknown" }, - message: "Using a custom Codex model provider; OpenAI login check skipped.", - }, - }); - } - - const authProbe = yield* runCodexCommand(["login", "status"]).pipe( - Effect.timeoutOption(DEFAULT_TIMEOUT_MS), - Effect.result, - ); - const account = resolveAccount - ? yield* resolveAccount({ - binaryPath: codexSettings.binaryPath, - homePath: codexSettings.homePath, - }) - : undefined; - const resolvedModels = adjustCodexModelsForAccount(models, account); - - if (Result.isFailure(authProbe)) { - const error = authProbe.failure; - return buildServerProvider({ - provider: PROVIDER, - enabled: codexSettings.enabled, - checkedAt, - models: resolvedModels, - probe: { - installed: true, - version: parsedVersion, - status: "warning", - auth: { status: "unknown" }, - message: - error instanceof Error - ? `Could not verify Codex authentication status: ${error.message}.` - : "Could not verify Codex authentication status.", - }, - }); - } - - if (Option.isNone(authProbe.success)) { - return buildServerProvider({ - provider: PROVIDER, - enabled: codexSettings.enabled, - checkedAt, - models: resolvedModels, - probe: { - installed: true, - version: parsedVersion, - status: "warning", - auth: { status: "unknown" }, - message: "Could not verify Codex authentication status. Timed out while running command.", - }, - }); - } - - const parsed = parseAuthStatusFromOutput(authProbe.success.value); - const authType = codexAuthSubType(account); - const authLabel = codexAuthSubLabel(account); - return buildServerProvider({ - provider: PROVIDER, - enabled: codexSettings.enabled, - checkedAt, - models: resolvedModels, - probe: { - installed: true, - version: parsedVersion, - status: parsed.status, - auth: { - ...parsed.auth, - ...(authType ? { type: authType } : {}), - ...(authLabel ? { label: authLabel } : {}), - }, - ...(parsed.message ? { message: parsed.message } : {}), - }, - }); -}); +export { getCodexModelCapabilities } from "./CodexProvider.shared"; export const CodexProviderLive = Layer.effect( CodexProvider, diff --git a/apps/server/src/provider/Layers/EventNdjsonLogger.ts b/apps/server/src/provider/Layers/EventNdjsonLogger.ts index a4fd6f235d..831f9a63da 100644 --- a/apps/server/src/provider/Layers/EventNdjsonLogger.ts +++ b/apps/server/src/provider/Layers/EventNdjsonLogger.ts @@ -20,7 +20,7 @@ const DEFAULT_BATCH_WINDOW_MS = 200; const GLOBAL_THREAD_SEGMENT = "_global"; const LOG_SCOPE = "provider-observability"; -export type EventNdjsonStream = "native" | "canonical" | "orchestration"; +type EventNdjsonStream = "native" | "canonical" | "orchestration"; export interface EventNdjsonLogger { readonly filePath: string; @@ -28,7 +28,7 @@ export interface EventNdjsonLogger { close: () => Effect.Effect; } -export interface EventNdjsonLoggerOptions { +interface EventNdjsonLoggerOptions { readonly stream: EventNdjsonStream; readonly maxBytes?: number; readonly maxFiles?: number; diff --git a/apps/server/src/provider/Layers/ProviderAdapterRegistry.ts b/apps/server/src/provider/Layers/ProviderAdapterRegistry.ts index b6c987c64c..aff945fa9a 100644 --- a/apps/server/src/provider/Layers/ProviderAdapterRegistry.ts +++ b/apps/server/src/provider/Layers/ProviderAdapterRegistry.ts @@ -18,7 +18,7 @@ import { import { ClaudeAdapter } from "../Services/ClaudeAdapter.ts"; import { CodexAdapter } from "../Services/CodexAdapter.ts"; -export interface ProviderAdapterRegistryLiveOptions { +interface ProviderAdapterRegistryLiveOptions { readonly adapters?: ReadonlyArray>; } diff --git a/apps/server/src/provider/Layers/ProviderRegistry.shared.ts b/apps/server/src/provider/Layers/ProviderRegistry.shared.ts new file mode 100644 index 0000000000..cc88ccc928 --- /dev/null +++ b/apps/server/src/provider/Layers/ProviderRegistry.shared.ts @@ -0,0 +1,7 @@ +import type { ServerProvider } from "@t3tools/contracts"; +import { Equal } from "effect"; + +export const haveProvidersChanged = ( + previousProviders: ReadonlyArray, + nextProviders: ReadonlyArray, +): boolean => !Equal.equals(previousProviders, nextProviders); diff --git a/apps/server/src/provider/Layers/ProviderRegistry.test.ts b/apps/server/src/provider/Layers/ProviderRegistry.test.ts index 116c008d67..296a8c96ec 100644 --- a/apps/server/src/provider/Layers/ProviderRegistry.test.ts +++ b/apps/server/src/provider/Layers/ProviderRegistry.test.ts @@ -17,21 +17,18 @@ import { DEFAULT_SERVER_SETTINGS, ServerSettings, type ServerProvider, + type ServerSettingsPatch, type ServerSettings as ContractServerSettings, } from "@t3tools/contracts"; import * as PlatformError from "effect/PlatformError"; import { ChildProcessSpawner } from "effect/unstable/process"; import { deepMerge } from "@t3tools/shared/Struct"; -import { - checkCodexProviderStatus, - hasCustomModelProvider, - parseAuthStatusFromOutput, - readCodexConfigModelProvider, -} from "./CodexProvider"; -import { checkClaudeProviderStatus, parseClaudeAuthStatusFromOutput } from "./ClaudeProvider"; -import { haveProvidersChanged, ProviderRegistryLive } from "./ProviderRegistry"; -import { ServerSettingsService, type ServerSettingsShape } from "../../serverSettings"; +import { checkCodexProviderStatus } from "./CodexProvider.shared"; +import { checkClaudeProviderStatus } from "./ClaudeProvider.logic"; +import { ProviderRegistryLive } from "./ProviderRegistry"; +import { haveProvidersChanged } from "./ProviderRegistry.shared"; +import { ServerSettingsService } from "../../serverSettings"; import { ProviderRegistry } from "../Services/ProviderRegistry"; // ── Test helpers ──────────────────────────────────────────────────── @@ -107,7 +104,7 @@ function makeMutableServerSettingsService( start: Effect.void, ready: Effect.void, getSettings: Ref.get(settingsRef), - updateSettings: (patch) => + updateSettings: (patch: ServerSettingsPatch) => Effect.gen(function* () { const current = yield* Ref.get(settingsRef); const next = Schema.decodeSync(ServerSettings)(deepMerge(current, patch)); @@ -116,7 +113,7 @@ function makeMutableServerSettingsService( return next; }), streamChanges: Stream.fromPubSub(changes), - } satisfies ServerSettingsShape; + }; }); } @@ -482,6 +479,62 @@ it.layer(Layer.mergeAll(NodeServices.layer, ServerSettingsService.layerTest()))( ), ), ); + + it.effect("treats login status JSON with authenticated=false as unauthenticated", () => + Effect.gen(function* () { + yield* withTempCodexHome(); + const status = yield* checkCodexProviderStatus(); + assert.strictEqual(status.status, "error"); + assert.strictEqual(status.auth.status, "unauthenticated"); + assert.strictEqual( + status.message, + "Codex CLI is not authenticated. Run `codex login` and try again.", + ); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { + const joined = args.join(" "); + if (joined === "--version") return { stdout: "codex 1.0.0\n", stderr: "", code: 0 }; + if (joined === "login status") { + return { + stdout: '[{"authenticated":false}]\n', + stderr: "", + code: 0, + }; + } + throw new Error(`Unexpected args: ${joined}`); + }), + ), + ), + ); + + it.effect("treats login status JSON without auth markers as a warning", () => + Effect.gen(function* () { + yield* withTempCodexHome(); + const status = yield* checkCodexProviderStatus(); + assert.strictEqual(status.status, "warning"); + assert.strictEqual(status.auth.status, "unknown"); + assert.strictEqual( + status.message, + "Could not verify Codex authentication status from JSON output (missing auth marker).", + ); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { + const joined = args.join(" "); + if (joined === "--version") return { stdout: "codex 1.0.0\n", stderr: "", code: 0 }; + if (joined === "login status") { + return { + stdout: '[{"ok":true}]\n', + stderr: "", + code: 0, + }; + } + throw new Error(`Unexpected args: ${joined}`); + }), + ), + ), + ); }); describe("ProviderRegistryLive", () => { @@ -518,7 +571,7 @@ it.layer(Layer.mergeAll(NodeServices.layer, ServerSettingsService.layerTest()))( const scope = yield* Scope.make(); yield* Effect.addFinalizer(() => Scope.close(scope, Exit.void)); const providerRegistryLayer = ProviderRegistryLive.pipe( - Layer.provideMerge(Layer.succeed(ServerSettingsService, serverSettings)), + Layer.provideMerge(Layer.succeed(ServerSettingsService, serverSettings as never)), Layer.provideMerge( mockCommandSpawnerLayer((command, args) => { const joined = args.join(" "); @@ -537,7 +590,7 @@ it.layer(Layer.mergeAll(NodeServices.layer, ServerSettingsService.layerTest()))( ); const runtimeServices = yield* Layer.build( Layer.mergeAll( - Layer.succeed(ServerSettingsService, serverSettings), + Layer.succeed(ServerSettingsService, serverSettings as never), providerRegistryLayer, ), ).pipe(Scope.provide(scope)); @@ -638,6 +691,57 @@ it.layer(Layer.mergeAll(NodeServices.layer, ServerSettingsService.layerTest()))( ), ); + it.effect("parses commented and whitespace-padded custom model_provider values", () => + Effect.gen(function* () { + yield* withTempCodexHome( + [ + "# This is a comment", + "", + ' model_provider = "azure" ', + "", + "[profiles.deep-review]", + 'model = "gpt-5-pro"', + ].join("\n"), + ); + const status = yield* checkCodexProviderStatus(); + assert.strictEqual(status.status, "ready"); + assert.strictEqual(status.auth.status, "unknown"); + assert.strictEqual( + status.message, + "Using a custom Codex model provider; OpenAI login check skipped.", + ); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { + const joined = args.join(" "); + if (joined === "--version") return { stdout: "codex 1.0.0\n", stderr: "", code: 0 }; + throw new Error(`Auth probe should have been skipped but got args: ${joined}`); + }), + ), + ), + ); + + it.effect("parses single-quoted custom model_provider values", () => + Effect.gen(function* () { + yield* withTempCodexHome("model_provider = 'mistral'\n"); + const status = yield* checkCodexProviderStatus(); + assert.strictEqual(status.status, "ready"); + assert.strictEqual(status.auth.status, "unknown"); + assert.strictEqual( + status.message, + "Using a custom Codex model provider; OpenAI login check skipped.", + ); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { + const joined = args.join(" "); + if (joined === "--version") return { stdout: "codex 1.0.0\n", stderr: "", code: 0 }; + throw new Error(`Auth probe should have been skipped but got args: ${joined}`); + }), + ), + ), + ); + it.effect("still reports error when codex CLI is missing even with custom provider", () => Effect.gen(function* () { yield* withTempCodexHome( @@ -676,70 +780,8 @@ it.layer(Layer.mergeAll(NodeServices.layer, ServerSettingsService.layerTest()))( ), ), ); - }); - - // ── parseAuthStatusFromOutput pure tests ────────────────────────── - - describe("parseAuthStatusFromOutput", () => { - it("exit code 0 with no auth markers is ready", () => { - const parsed = parseAuthStatusFromOutput({ stdout: "OK\n", stderr: "", code: 0 }); - assert.strictEqual(parsed.status, "ready"); - assert.strictEqual(parsed.auth.status, "authenticated"); - }); - - it("JSON with authenticated=false is unauthenticated", () => { - const parsed = parseAuthStatusFromOutput({ - stdout: '[{"authenticated":false}]\n', - stderr: "", - code: 0, - }); - assert.strictEqual(parsed.status, "error"); - assert.strictEqual(parsed.auth.status, "unauthenticated"); - }); - - it("JSON without auth marker is warning", () => { - const parsed = parseAuthStatusFromOutput({ - stdout: '[{"ok":true}]\n', - stderr: "", - code: 0, - }); - assert.strictEqual(parsed.status, "warning"); - assert.strictEqual(parsed.auth.status, "unknown"); - }); - }); - - // ── readCodexConfigModelProvider tests ───────────────────────────── - - describe("readCodexConfigModelProvider", () => { - it.effect("returns undefined when config file does not exist", () => - Effect.gen(function* () { - yield* withTempCodexHome(); - assert.strictEqual(yield* readCodexConfigModelProvider(), undefined); - }), - ); - it.effect("returns undefined when config has no model_provider key", () => - Effect.gen(function* () { - yield* withTempCodexHome('model = "gpt-5-codex"\n'); - assert.strictEqual(yield* readCodexConfigModelProvider(), undefined); - }), - ); - - it.effect("returns the provider when model_provider is set at top level", () => - Effect.gen(function* () { - yield* withTempCodexHome('model = "gpt-5-codex"\nmodel_provider = "portkey"\n'); - assert.strictEqual(yield* readCodexConfigModelProvider(), "portkey"); - }), - ); - - it.effect("returns openai when model_provider is openai", () => - Effect.gen(function* () { - yield* withTempCodexHome('model_provider = "openai"\n'); - assert.strictEqual(yield* readCodexConfigModelProvider(), "openai"); - }), - ); - - it.effect("ignores model_provider inside section headers", () => + it.effect("ignores model_provider declarations inside TOML sections", () => Effect.gen(function* () { yield* withTempCodexHome( [ @@ -751,84 +793,20 @@ it.layer(Layer.mergeAll(NodeServices.layer, ServerSettingsService.layerTest()))( "", ].join("\n"), ); - assert.strictEqual(yield* readCodexConfigModelProvider(), undefined); - }), - ); - - it.effect("handles comments and whitespace", () => - Effect.gen(function* () { - yield* withTempCodexHome( - [ - "# This is a comment", - "", - ' model_provider = "azure" ', - "", - "[profiles.deep-review]", - 'model = "gpt-5-pro"', - ].join("\n"), - ); - assert.strictEqual(yield* readCodexConfigModelProvider(), "azure"); - }), - ); - - it.effect("handles single-quoted values in TOML", () => - Effect.gen(function* () { - yield* withTempCodexHome("model_provider = 'mistral'\n"); - assert.strictEqual(yield* readCodexConfigModelProvider(), "mistral"); - }), - ); - }); - - // ── hasCustomModelProvider tests ─────────────────────────────────── - - describe("hasCustomModelProvider", () => { - it.effect("returns false when no config file exists", () => - Effect.gen(function* () { - yield* withTempCodexHome(); - assert.strictEqual(yield* hasCustomModelProvider, false); - }), - ); - - it.effect("returns false when model_provider is not set", () => - Effect.gen(function* () { - yield* withTempCodexHome('model = "gpt-5-codex"\n'); - assert.strictEqual(yield* hasCustomModelProvider, false); - }), - ); - - it.effect("returns false when model_provider is openai", () => - Effect.gen(function* () { - yield* withTempCodexHome('model_provider = "openai"\n'); - assert.strictEqual(yield* hasCustomModelProvider, false); - }), - ); - - it.effect("returns true when model_provider is portkey", () => - Effect.gen(function* () { - yield* withTempCodexHome('model_provider = "portkey"\n'); - assert.strictEqual(yield* hasCustomModelProvider, true); - }), - ); - - it.effect("returns true when model_provider is azure", () => - Effect.gen(function* () { - yield* withTempCodexHome('model_provider = "azure"\n'); - assert.strictEqual(yield* hasCustomModelProvider, true); - }), - ); - - it.effect("returns true when model_provider is ollama", () => - Effect.gen(function* () { - yield* withTempCodexHome('model_provider = "ollama"\n'); - assert.strictEqual(yield* hasCustomModelProvider, true); - }), - ); - - it.effect("returns true when model_provider is a custom proxy", () => - Effect.gen(function* () { - yield* withTempCodexHome('model_provider = "my-company-proxy"\n'); - assert.strictEqual(yield* hasCustomModelProvider, true); - }), + const status = yield* checkCodexProviderStatus(); + assert.strictEqual(status.status, "error"); + assert.strictEqual(status.auth.status, "unauthenticated"); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { + const joined = args.join(" "); + if (joined === "--version") return { stdout: "codex 1.0.0\n", stderr: "", code: 0 }; + if (joined === "login status") + return { stdout: "Not logged in\n", stderr: "", code: 1 }; + throw new Error(`Unexpected args: ${joined}`); + }), + ), + ), ); }); @@ -1012,46 +990,34 @@ it.layer(Layer.mergeAll(NodeServices.layer, ServerSettingsService.layerTest()))( ), ), ); - }); - - // ── parseClaudeAuthStatusFromOutput pure tests ──────────────────── - - describe("parseClaudeAuthStatusFromOutput", () => { - it("exit code 0 with no auth markers is ready", () => { - const parsed = parseClaudeAuthStatusFromOutput({ stdout: "OK\n", stderr: "", code: 0 }); - assert.strictEqual(parsed.status, "ready"); - assert.strictEqual(parsed.auth.status, "authenticated"); - }); - - it("JSON with loggedIn=true is authenticated", () => { - const parsed = parseClaudeAuthStatusFromOutput({ - stdout: '{"loggedIn":true,"authMethod":"claude.ai"}\n', - stderr: "", - code: 0, - }); - assert.strictEqual(parsed.status, "ready"); - assert.strictEqual(parsed.auth.status, "authenticated"); - }); - - it("JSON with loggedIn=false is unauthenticated", () => { - const parsed = parseClaudeAuthStatusFromOutput({ - stdout: '{"loggedIn":false}\n', - stderr: "", - code: 0, - }); - assert.strictEqual(parsed.status, "error"); - assert.strictEqual(parsed.auth.status, "unauthenticated"); - }); - it("JSON without auth marker is warning", () => { - const parsed = parseClaudeAuthStatusFromOutput({ - stdout: '{"ok":true}\n', - stderr: "", - code: 0, - }); - assert.strictEqual(parsed.status, "warning"); - assert.strictEqual(parsed.auth.status, "unknown"); - }); + it.effect("returns warning when auth status emits malformed json", () => + Effect.gen(function* () { + const status = yield* checkClaudeProviderStatus(); + assert.strictEqual(status.provider, "claudeAgent"); + assert.strictEqual(status.status, "warning"); + assert.strictEqual(status.installed, true); + assert.strictEqual(status.auth.status, "unknown"); + assert.strictEqual( + status.message, + "Could not verify Claude authentication status from JSON output (missing auth marker).", + ); + }).pipe( + Effect.provide( + mockSpawnerLayer((args) => { + const joined = args.join(" "); + if (joined === "--version") return { stdout: "1.0.0\n", stderr: "", code: 0 }; + if (joined === "auth status") + return { + stdout: '{"loggedIn":true\n', + stderr: "", + code: 0, + }; + throw new Error(`Unexpected args: ${joined}`); + }), + ), + ), + ); }); }, ); diff --git a/apps/server/src/provider/Layers/ProviderRegistry.ts b/apps/server/src/provider/Layers/ProviderRegistry.ts index fb2f33c293..e095815fb5 100644 --- a/apps/server/src/provider/Layers/ProviderRegistry.ts +++ b/apps/server/src/provider/Layers/ProviderRegistry.ts @@ -4,7 +4,7 @@ * @module ProviderRegistryLive */ import type { ProviderKind, ServerProvider } from "@t3tools/contracts"; -import { Effect, Equal, Layer, PubSub, Ref, Stream } from "effect"; +import { Effect, Layer, PubSub, Ref, Stream } from "effect"; import { ClaudeProviderLive } from "./ClaudeProvider"; import { CodexProviderLive } from "./CodexProvider"; @@ -13,6 +13,7 @@ import { ClaudeProvider } from "../Services/ClaudeProvider"; import type { CodexProviderShape } from "../Services/CodexProvider"; import { CodexProvider } from "../Services/CodexProvider"; import { ProviderRegistry, type ProviderRegistryShape } from "../Services/ProviderRegistry"; +import { haveProvidersChanged } from "./ProviderRegistry.shared"; const loadProviders = ( codexProvider: CodexProviderShape, @@ -22,11 +23,6 @@ const loadProviders = ( concurrency: "unbounded", }); -export const haveProvidersChanged = ( - previousProviders: ReadonlyArray, - nextProviders: ReadonlyArray, -): boolean => !Equal.equals(previousProviders, nextProviders); - export const ProviderRegistryLive = Layer.effect( ProviderRegistry, Effect.gen(function* () { diff --git a/apps/server/src/provider/Layers/ProviderService.test.ts b/apps/server/src/provider/Layers/ProviderService.test.ts index cd6c81405b..e8261d66cd 100644 --- a/apps/server/src/provider/Layers/ProviderService.test.ts +++ b/apps/server/src/provider/Layers/ProviderService.test.ts @@ -41,7 +41,7 @@ import { ProviderSessionRuntimeRepository } from "../../persistence/Services/Pro import { makeSqlitePersistenceLive, SqlitePersistenceMemory, -} from "../../persistence/Layers/Sqlite.ts"; +} from "../../persistence/Layers/Sqlite.testing.ts"; import { ServerSettingsService } from "../../serverSettings.ts"; import { AnalyticsService } from "../../telemetry/Services/AnalyticsService.ts"; diff --git a/apps/server/src/provider/Layers/ProviderService.ts b/apps/server/src/provider/Layers/ProviderService.ts index da2c293fac..870c0dccbe 100644 --- a/apps/server/src/provider/Layers/ProviderService.ts +++ b/apps/server/src/provider/Layers/ProviderService.ts @@ -45,7 +45,7 @@ import { type EventNdjsonLogger, makeEventNdjsonLogger } from "./EventNdjsonLogg import { AnalyticsService } from "../../telemetry/Services/AnalyticsService.ts"; import { ServerSettingsService } from "../../serverSettings.ts"; -export interface ProviderServiceLiveOptions { +interface ProviderServiceLiveOptions { readonly canonicalEventLogPath?: string; readonly canonicalEventLogger?: EventNdjsonLogger; } @@ -209,11 +209,6 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( readonly binding: ProviderRuntimeBinding; readonly operation: string; }) { - yield* Effect.annotateCurrentSpan({ - "provider.operation": "recover-session", - "provider.kind": input.binding.provider, - "provider.thread_id": input.binding.threadId, - }); return yield* Effect.gen(function* () { const adapter = yield* registry.getByProvider(input.binding.provider); const hasResumeCursor = @@ -318,12 +313,6 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( threadId, provider: parsed.provider ?? "codex", }; - yield* Effect.annotateCurrentSpan({ - "provider.operation": "start-session", - "provider.kind": input.provider, - "provider.thread_id": threadId, - "provider.runtime_mode": input.runtimeMode, - }); return yield* Effect.gen(function* () { const settings = yield* serverSettings.getSettings.pipe( Effect.mapError((error) => @@ -401,12 +390,6 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( "Either input text or at least one attachment is required", ); } - yield* Effect.annotateCurrentSpan({ - "provider.operation": "send-turn", - "provider.thread_id": input.threadId, - "provider.interaction_mode": input.interactionMode, - "provider.attachment_count": input.attachments.length, - }); let metricProvider = "unknown"; let metricModel = input.modelSelection?.model; return yield* Effect.gen(function* () { @@ -417,10 +400,6 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( }); metricProvider = routed.adapter.provider; metricModel = input.modelSelection?.model; - yield* Effect.annotateCurrentSpan({ - "provider.kind": routed.adapter.provider, - ...(input.modelSelection?.model ? { "provider.model": input.modelSelection.model } : {}), - }); const turn = yield* routed.adapter.sendTurn(input); yield* directory.upsert({ threadId: input.threadId, @@ -473,12 +452,6 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( allowRecovery: true, }); metricProvider = routed.adapter.provider; - yield* Effect.annotateCurrentSpan({ - "provider.operation": "interrupt-turn", - "provider.kind": routed.adapter.provider, - "provider.thread_id": input.threadId, - "provider.turn_id": input.turnId, - }); yield* routed.adapter.interruptTurn(routed.threadId, input.turnId); yield* analytics.record("provider.turn.interrupted", { provider: routed.adapter.provider, @@ -510,12 +483,6 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( allowRecovery: true, }); metricProvider = routed.adapter.provider; - yield* Effect.annotateCurrentSpan({ - "provider.operation": "respond-to-request", - "provider.kind": routed.adapter.provider, - "provider.thread_id": input.threadId, - "provider.request_id": input.requestId, - }); yield* routed.adapter.respondToRequest(routed.threadId, input.requestId, input.decision); yield* analytics.record("provider.request.responded", { provider: routed.adapter.provider, @@ -549,12 +516,6 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( allowRecovery: true, }); metricProvider = routed.adapter.provider; - yield* Effect.annotateCurrentSpan({ - "provider.operation": "respond-to-user-input", - "provider.kind": routed.adapter.provider, - "provider.thread_id": input.threadId, - "provider.request_id": input.requestId, - }); yield* routed.adapter.respondToUserInput(routed.threadId, input.requestId, input.answers); }).pipe( withMetrics({ @@ -582,11 +543,6 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( allowRecovery: false, }); metricProvider = routed.adapter.provider; - yield* Effect.annotateCurrentSpan({ - "provider.operation": "stop-session", - "provider.kind": routed.adapter.provider, - "provider.thread_id": input.threadId, - }); if (routed.isActive) { yield* routed.adapter.stopSession(routed.threadId); } @@ -676,12 +632,6 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( allowRecovery: true, }); metricProvider = routed.adapter.provider; - yield* Effect.annotateCurrentSpan({ - "provider.operation": "rollback-conversation", - "provider.kind": routed.adapter.provider, - "provider.thread_id": input.threadId, - "provider.rollback_turns": input.numTurns, - }); yield* routed.adapter.rollbackThread(routed.threadId, input.numTurns); yield* analytics.record("provider.conversation.rolled_back", { provider: routed.adapter.provider, @@ -757,8 +707,6 @@ const makeProviderService = Effect.fn("makeProviderService")(function* ( } satisfies ProviderServiceShape; }); -export const ProviderServiceLive = Layer.effect(ProviderService, makeProviderService()); - export function makeProviderServiceLive(options?: ProviderServiceLiveOptions) { return Layer.effect(ProviderService, makeProviderService(options)); } diff --git a/apps/server/src/provider/Layers/ProviderSessionDirectory.test.ts b/apps/server/src/provider/Layers/ProviderSessionDirectory.test.ts index d23b247f21..d217cb8436 100644 --- a/apps/server/src/provider/Layers/ProviderSessionDirectory.test.ts +++ b/apps/server/src/provider/Layers/ProviderSessionDirectory.test.ts @@ -12,7 +12,7 @@ import * as SqlClient from "effect/unstable/sql/SqlClient"; import { makeSqlitePersistenceLive, SqlitePersistenceMemory, -} from "../../persistence/Layers/Sqlite.ts"; +} from "../../persistence/Layers/Sqlite.testing.ts"; import { ProviderSessionRuntimeRepositoryLive } from "../../persistence/Layers/ProviderSessionRuntime.ts"; import { ProviderSessionRuntimeRepository } from "../../persistence/Services/ProviderSessionRuntime.ts"; import { ProviderSessionDirectoryPersistenceError } from "../Errors.ts"; diff --git a/apps/server/src/provider/Layers/ProviderSessionDirectory.ts b/apps/server/src/provider/Layers/ProviderSessionDirectory.ts index 961c63d696..48db702a9a 100644 --- a/apps/server/src/provider/Layers/ProviderSessionDirectory.ts +++ b/apps/server/src/provider/Layers/ProviderSessionDirectory.ts @@ -158,7 +158,3 @@ export const ProviderSessionDirectoryLive = Layer.effect( ProviderSessionDirectory, makeProviderSessionDirectory, ); - -export function makeProviderSessionDirectoryLive() { - return Layer.effect(ProviderSessionDirectory, makeProviderSessionDirectory); -} diff --git a/apps/server/src/provider/codexAccount.ts b/apps/server/src/provider/codexAccount.ts index 1db00250f6..323c0be31a 100644 --- a/apps/server/src/provider/codexAccount.ts +++ b/apps/server/src/provider/codexAccount.ts @@ -17,8 +17,8 @@ export interface CodexAccountSnapshot { readonly sparkEnabled: boolean; } -export const CODEX_DEFAULT_MODEL = "gpt-5.3-codex"; -export const CODEX_SPARK_MODEL = "gpt-5.3-codex-spark"; +const CODEX_DEFAULT_MODEL = "gpt-5.3-codex"; +const CODEX_SPARK_MODEL = "gpt-5.3-codex-spark"; const CODEX_SPARK_ENABLED_PLAN_TYPES = new Set(["pro"]); function asObject(value: unknown): Record | undefined { diff --git a/apps/server/src/provider/codexCliVersion.ts b/apps/server/src/provider/codexCliVersion.ts index 544020016c..a3fb1b9e7e 100644 --- a/apps/server/src/provider/codexCliVersion.ts +++ b/apps/server/src/provider/codexCliVersion.ts @@ -1,6 +1,6 @@ const CODEX_VERSION_PATTERN = /\bv?(\d+\.\d+(?:\.\d+)?(?:-[0-9A-Za-z.-]+)?)\b/; -export const MINIMUM_CODEX_CLI_VERSION = "0.37.0"; +const MINIMUM_CODEX_CLI_VERSION = "0.37.0"; interface ParsedSemver { readonly major: number; @@ -71,7 +71,7 @@ function comparePrereleaseIdentifier(left: string, right: string): number { return left.localeCompare(right); } -export function compareCodexCliVersions(left: string, right: string): number { +function compareCodexCliVersions(left: string, right: string): number { const parsedLeft = parseSemver(left); const parsedRight = parseSemver(right); if (!parsedLeft || !parsedRight) { diff --git a/apps/server/src/provider/providerSnapshot.ts b/apps/server/src/provider/providerSnapshot.ts index e1243c4bd0..a65f6814e1 100644 --- a/apps/server/src/provider/providerSnapshot.ts +++ b/apps/server/src/provider/providerSnapshot.ts @@ -17,7 +17,7 @@ export interface CommandResult { readonly code: number; } -export interface ProviderProbeResult { +interface ProviderProbeResult { readonly installed: boolean; readonly version: string | null; readonly status: Exclude; @@ -25,7 +25,7 @@ export interface ProviderProbeResult { readonly message?: string; } -export function nonEmptyTrimmed(value: string | undefined): string | undefined { +function nonEmptyTrimmed(value: string | undefined): string | undefined { if (!value) return undefined; const trimmed = value.trim(); return trimmed.length > 0 ? trimmed : undefined; @@ -144,9 +144,7 @@ export function buildServerProvider(input: { }; } -export const collectStreamAsString = ( - stream: Stream.Stream, -): Effect.Effect => +const collectStreamAsString = (stream: Stream.Stream): Effect.Effect => stream.pipe( Stream.decodeText(), Stream.runFold( diff --git a/apps/server/src/server.routes.ts b/apps/server/src/server.routes.ts new file mode 100644 index 0000000000..64159b4a39 --- /dev/null +++ b/apps/server/src/server.routes.ts @@ -0,0 +1,11 @@ +import { Layer } from "effect"; + +import { attachmentsRouteLayer, projectFaviconRouteLayer, staticAndDevRouteLayer } from "./http"; +import { websocketRpcRouteLayer } from "./ws"; + +export const makeRoutesLayer = Layer.mergeAll( + attachmentsRouteLayer, + projectFaviconRouteLayer, + staticAndDevRouteLayer, + websocketRpcRouteLayer, +); diff --git a/apps/server/src/server.test.ts b/apps/server/src/server.test.ts index 0e8b909c99..2ae37fea54 100644 --- a/apps/server/src/server.test.ts +++ b/apps/server/src/server.test.ts @@ -25,7 +25,7 @@ import { RpcClient, RpcSerialization } from "effect/unstable/rpc"; import type { ServerConfigShape } from "./config.ts"; import { deriveServerPaths, ServerConfig } from "./config.ts"; -import { makeRoutesLayer } from "./server.ts"; +import { makeRoutesLayer } from "./server.routes.ts"; import { resolveAttachmentRelativePath } from "./attachmentPaths.ts"; import { CheckpointDiffQuery, @@ -33,8 +33,8 @@ import { } from "./checkpointing/Services/CheckpointDiffQuery.ts"; import { GitCore, type GitCoreShape } from "./git/Services/GitCore.ts"; import { GitManager, type GitManagerShape } from "./git/Services/GitManager.ts"; -import { Keybindings, type KeybindingsShape } from "./keybindings.ts"; -import { Open, type OpenShape } from "./open.ts"; +import { Keybindings } from "./keybindings.ts"; +import { Open } from "./open.ts"; import { OrchestrationEngineService, type OrchestrationEngineShape, @@ -48,9 +48,9 @@ import { ProviderRegistry, type ProviderRegistryShape, } from "./provider/Services/ProviderRegistry.ts"; -import { ServerLifecycleEvents, type ServerLifecycleEventsShape } from "./serverLifecycleEvents.ts"; -import { ServerRuntimeStartup, type ServerRuntimeStartupShape } from "./serverRuntimeStartup.ts"; -import { ServerSettingsService, type ServerSettingsShape } from "./serverSettings.ts"; +import { ServerLifecycleEvents } from "./serverLifecycleEvents.ts"; +import { ServerRuntimeStartup } from "./serverRuntimeStartup.ts"; +import { ServerSettingsService } from "./serverSettings.ts"; import { TerminalManager, type TerminalManagerShape } from "./terminal/Services/Manager.ts"; import { ProjectFaviconResolverLive } from "./project/Layers/ProjectFaviconResolver.ts"; import { WorkspaceEntriesLive } from "./workspace/Layers/WorkspaceEntries.ts"; @@ -119,18 +119,18 @@ const workspaceAndProjectServicesLayer = Layer.mergeAll( const buildAppUnderTest = (options?: { config?: Partial; layers?: { - keybindings?: Partial; + keybindings?: Partial; providerRegistry?: Partial; - serverSettings?: Partial; - open?: Partial; + serverSettings?: Partial; + open?: Partial; gitCore?: Partial; gitManager?: Partial; terminalManager?: Partial; orchestrationEngine?: Partial; projectionSnapshotQuery?: Partial; checkpointDiffQuery?: Partial; - serverLifecycleEvents?: Partial; - serverRuntimeStartup?: Partial; + serverLifecycleEvents?: Partial; + serverRuntimeStartup?: Partial; }; }) => Effect.gen(function* () { diff --git a/apps/server/src/server.ts b/apps/server/src/server.ts index f630444a6a..9a2c0c252f 100644 --- a/apps/server/src/server.ts +++ b/apps/server/src/server.ts @@ -2,9 +2,7 @@ import { Effect, Layer } from "effect"; import { FetchHttpClient, HttpRouter, HttpServer } from "effect/unstable/http"; import { ServerConfig } from "./config"; -import { attachmentsRouteLayer, projectFaviconRouteLayer, staticAndDevRouteLayer } from "./http"; import { fixPath } from "./os-jank"; -import { websocketRpcRouteLayer } from "./ws"; import { OpenLive } from "./open"; import { layerConfig as SqlitePersistenceLayerLive } from "./persistence/Layers/Sqlite"; import { ServerLifecycleEventsLive } from "./serverLifecycleEvents"; @@ -29,6 +27,7 @@ import { RoutingTextGenerationLive } from "./git/Layers/RoutingTextGeneration"; import { TerminalManagerLive } from "./terminal/Layers/Manager"; import { GitManagerLive } from "./git/Layers/GitManager"; import { KeybindingsLive } from "./keybindings"; +import { ServerLoggerLive } from "./serverLogger"; import { ServerRuntimeStartup, ServerRuntimeStartupLive } from "./serverRuntimeStartup"; import { OrchestrationReactorLive } from "./orchestration/Layers/OrchestrationReactor"; import { RuntimeReceiptBusLive } from "./orchestration/Layers/RuntimeReceiptBus"; @@ -41,7 +40,7 @@ import { ProjectFaviconResolverLive } from "./project/Layers/ProjectFaviconResol import { WorkspaceEntriesLive } from "./workspace/Layers/WorkspaceEntries"; import { WorkspaceFileSystemLive } from "./workspace/Layers/WorkspaceFileSystem"; import { WorkspacePathsLive } from "./workspace/Layers/WorkspacePaths"; -import { ObservabilityLive } from "./observability/Layers/Observability"; +import { makeRoutesLayer } from "./server.routes"; const PtyAdapterLive = Layer.unwrap( Effect.gen(function* () { @@ -200,14 +199,7 @@ const RuntimeServicesLive = Layer.empty.pipe( Layer.provideMerge(ServerLifecycleEventsLive), ); -export const makeRoutesLayer = Layer.mergeAll( - attachmentsRouteLayer, - projectFaviconRouteLayer, - staticAndDevRouteLayer, - websocketRpcRouteLayer, -); - -export const makeServerLayer = Layer.unwrap( +const makeServerLayer = Layer.unwrap( Effect.gen(function* () { const config = yield* ServerConfig; @@ -231,7 +223,7 @@ export const makeServerLayer = Layer.unwrap( return serverApplicationLayer.pipe( Layer.provideMerge(RuntimeServicesLive), Layer.provideMerge(HttpServerLive), - Layer.provide(ObservabilityLive), + Layer.provide(ServerLoggerLive), Layer.provideMerge(FetchHttpClient.layer), Layer.provideMerge(PlatformServicesLive), ); @@ -239,8 +231,6 @@ export const makeServerLayer = Layer.unwrap( ); // Important: Only `ServerConfig` should be provided by the CLI layer!!! Don't let other requirements leak into the launch layer. -export const runServer = Layer.launch(makeServerLayer) satisfies Effect.Effect< - never, - any, - ServerConfig ->; +export const runServer: Effect.Effect = Layer.launch( + makeServerLayer, +).pipe(Effect.orDie); diff --git a/apps/server/src/serverLifecycleEvents.ts b/apps/server/src/serverLifecycleEvents.ts index 4808a19d72..2c0d5f4faa 100644 --- a/apps/server/src/serverLifecycleEvents.ts +++ b/apps/server/src/serverLifecycleEvents.ts @@ -10,7 +10,7 @@ interface SnapshotState { readonly events: ReadonlyArray; } -export interface ServerLifecycleEventsShape { +interface ServerLifecycleEventsShape { readonly publish: (event: LifecycleEventInput) => Effect.Effect; readonly snapshot: Effect.Effect; readonly stream: Stream.Stream; diff --git a/apps/server/src/serverRuntimeStartup.logic.ts b/apps/server/src/serverRuntimeStartup.logic.ts new file mode 100644 index 0000000000..082c8d9f26 --- /dev/null +++ b/apps/server/src/serverRuntimeStartup.logic.ts @@ -0,0 +1,102 @@ +import { Data, Deferred, Effect, Exit, Queue, Ref } from "effect"; + +import { ProjectionSnapshotQuery } from "./orchestration/Services/ProjectionSnapshotQuery"; +import { AnalyticsService } from "./telemetry/Services/AnalyticsService"; + +export class ServerRuntimeStartupError extends Data.TaggedError("ServerRuntimeStartupError")<{ + readonly message: string; + readonly cause?: unknown; +}> {} + +interface QueuedCommand { + readonly run: Effect.Effect; +} + +type CommandReadinessState = "pending" | "ready" | ServerRuntimeStartupError; + +interface CommandGate { + readonly awaitCommandReady: Effect.Effect; + readonly signalCommandReady: Effect.Effect; + readonly failCommandReady: (error: ServerRuntimeStartupError) => Effect.Effect; + readonly enqueueCommand: ( + effect: Effect.Effect, + ) => Effect.Effect; +} + +const settleQueuedCommand = (deferred: Deferred.Deferred, exit: Exit.Exit) => + Exit.isSuccess(exit) + ? Deferred.succeed(deferred, exit.value) + : Deferred.failCause(deferred, exit.cause); + +export const makeCommandGate = Effect.gen(function* () { + const commandReady = yield* Deferred.make(); + const commandQueue = yield* Queue.unbounded(); + const commandReadinessState = yield* Ref.make("pending"); + + const commandWorker = Effect.forever( + Queue.take(commandQueue).pipe(Effect.flatMap((command) => command.run)), + ); + yield* Effect.forkScoped(commandWorker); + + return { + awaitCommandReady: Deferred.await(commandReady), + signalCommandReady: Effect.gen(function* () { + yield* Ref.set(commandReadinessState, "ready"); + yield* Deferred.succeed(commandReady, undefined).pipe(Effect.orDie); + }), + failCommandReady: (error) => + Effect.gen(function* () { + yield* Ref.set(commandReadinessState, error); + yield* Deferred.fail(commandReady, error).pipe(Effect.orDie); + }), + enqueueCommand: (effect: Effect.Effect) => + Effect.gen(function* () { + const readinessState = yield* Ref.get(commandReadinessState); + if (readinessState === "ready") { + return yield* effect; + } + if (readinessState !== "pending") { + return yield* readinessState; + } + + const result = yield* Deferred.make(); + yield* Queue.offer(commandQueue, { + run: Deferred.await(commandReady).pipe( + Effect.flatMap(() => effect), + Effect.exit, + Effect.flatMap((exit) => settleQueuedCommand(result, exit)), + ), + }); + return yield* Deferred.await(result); + }), + } satisfies CommandGate; +}); + +const recordStartupHeartbeat = Effect.gen(function* () { + const analytics = yield* AnalyticsService; + const projectionSnapshotQuery = yield* ProjectionSnapshotQuery; + + const { threadCount, projectCount } = yield* projectionSnapshotQuery.getCounts().pipe( + Effect.catch((cause) => + Effect.logWarning("failed to gather startup projection counts for telemetry", { + cause, + }).pipe( + Effect.as({ + threadCount: 0, + projectCount: 0, + }), + ), + ), + ); + + yield* analytics.record("server.boot.heartbeat", { + threadCount, + projectCount, + }); +}); + +export const launchStartupHeartbeat = recordStartupHeartbeat.pipe( + Effect.ignoreCause({ log: true }), + Effect.forkScoped, + Effect.asVoid, +); diff --git a/apps/server/src/serverRuntimeStartup.test.ts b/apps/server/src/serverRuntimeStartup.test.ts index fc06d77566..4d9d6f5a26 100644 --- a/apps/server/src/serverRuntimeStartup.test.ts +++ b/apps/server/src/serverRuntimeStartup.test.ts @@ -7,7 +7,7 @@ import { launchStartupHeartbeat, makeCommandGate, ServerRuntimeStartupError, -} from "./serverRuntimeStartup.ts"; +} from "./serverRuntimeStartup.logic.ts"; it.effect("enqueueCommand waits for readiness and then drains queued work", () => Effect.scoped( diff --git a/apps/server/src/serverRuntimeStartup.ts b/apps/server/src/serverRuntimeStartup.ts index 7c9231ac93..8bba553c67 100644 --- a/apps/server/src/serverRuntimeStartup.ts +++ b/apps/server/src/serverRuntimeStartup.ts @@ -5,19 +5,7 @@ import { ProjectId, ThreadId, } from "@t3tools/contracts"; -import { - Data, - Deferred, - Effect, - Exit, - Layer, - Option, - Path, - Queue, - Ref, - Scope, - ServiceMap, -} from "effect"; +import { Deferred, Effect, Exit, Layer, Option, Path, Scope, ServiceMap } from "effect"; import { ServerConfig } from "./config"; import { Keybindings } from "./keybindings"; @@ -27,7 +15,11 @@ import { ProjectionSnapshotQuery } from "./orchestration/Services/ProjectionSnap import { OrchestrationReactor } from "./orchestration/Services/OrchestrationReactor"; import { ServerLifecycleEvents } from "./serverLifecycleEvents"; import { ServerSettingsService } from "./serverSettings"; -import { AnalyticsService } from "./telemetry/Services/AnalyticsService"; +import { + launchStartupHeartbeat, + makeCommandGate, + ServerRuntimeStartupError, +} from "./serverRuntimeStartup.logic"; const isWildcardHost = (host: string | undefined): boolean => host === "0.0.0.0" || host === "::" || host === "[::]"; @@ -35,12 +27,7 @@ const isWildcardHost = (host: string | undefined): boolean => const formatHostForUrl = (host: string): string => host.includes(":") && !host.startsWith("[") ? `[${host}]` : host; -export class ServerRuntimeStartupError extends Data.TaggedError("ServerRuntimeStartupError")<{ - readonly message: string; - readonly cause?: unknown; -}> {} - -export interface ServerRuntimeStartupShape { +interface ServerRuntimeStartupShape { readonly awaitCommandReady: Effect.Effect; readonly markHttpListening: Effect.Effect; readonly enqueueCommand: ( @@ -53,101 +40,6 @@ export class ServerRuntimeStartup extends ServiceMap.Service< ServerRuntimeStartupShape >()("t3/serverRuntimeStartup") {} -interface QueuedCommand { - readonly run: Effect.Effect; -} - -type CommandReadinessState = "pending" | "ready" | ServerRuntimeStartupError; - -interface CommandGate { - readonly awaitCommandReady: Effect.Effect; - readonly signalCommandReady: Effect.Effect; - readonly failCommandReady: (error: ServerRuntimeStartupError) => Effect.Effect; - readonly enqueueCommand: ( - effect: Effect.Effect, - ) => Effect.Effect; -} - -const settleQueuedCommand = (deferred: Deferred.Deferred, exit: Exit.Exit) => - Exit.isSuccess(exit) - ? Deferred.succeed(deferred, exit.value) - : Deferred.failCause(deferred, exit.cause); - -export const makeCommandGate = Effect.gen(function* () { - const commandReady = yield* Deferred.make(); - const commandQueue = yield* Queue.unbounded(); - const commandReadinessState = yield* Ref.make("pending"); - - const commandWorker = Effect.forever( - Queue.take(commandQueue).pipe(Effect.flatMap((command) => command.run)), - ); - yield* Effect.forkScoped(commandWorker); - - return { - awaitCommandReady: Deferred.await(commandReady), - signalCommandReady: Effect.gen(function* () { - yield* Ref.set(commandReadinessState, "ready"); - yield* Deferred.succeed(commandReady, undefined).pipe(Effect.orDie); - }), - failCommandReady: (error) => - Effect.gen(function* () { - yield* Ref.set(commandReadinessState, error); - yield* Deferred.fail(commandReady, error).pipe(Effect.orDie); - }), - enqueueCommand: (effect: Effect.Effect) => - Effect.gen(function* () { - const readinessState = yield* Ref.get(commandReadinessState); - if (readinessState === "ready") { - return yield* effect; - } - if (readinessState !== "pending") { - return yield* readinessState; - } - - const result = yield* Deferred.make(); - yield* Queue.offer(commandQueue, { - run: Deferred.await(commandReady).pipe( - Effect.flatMap(() => effect), - Effect.exit, - Effect.flatMap((exit) => settleQueuedCommand(result, exit)), - ), - }); - return yield* Deferred.await(result); - }), - } satisfies CommandGate; -}); - -export const recordStartupHeartbeat = Effect.gen(function* () { - const analytics = yield* AnalyticsService; - const projectionSnapshotQuery = yield* ProjectionSnapshotQuery; - - const { threadCount, projectCount } = yield* projectionSnapshotQuery.getCounts().pipe( - Effect.catch((cause) => - Effect.logWarning("failed to gather startup projection counts for telemetry", { - cause, - }).pipe( - Effect.as({ - threadCount: 0, - projectCount: 0, - }), - ), - ), - ); - - yield* analytics.record("server.boot.heartbeat", { - threadCount, - projectCount, - }); -}); - -export const launchStartupHeartbeat = recordStartupHeartbeat.pipe( - Effect.annotateSpans({ "startup.phase": "heartbeat.record" }), - Effect.withSpan("server.startup.heartbeat.record"), - Effect.ignoreCause({ log: true }), - Effect.forkScoped, - Effect.asVoid, -); - const autoBootstrapWelcome = Effect.gen(function* () { const serverConfig = yield* ServerConfig; const projectionReadModelQuery = yield* ProjectionSnapshotQuery; @@ -250,14 +142,7 @@ const maybeOpenBrowser = Effect.gen(function* () { ); }); -const runStartupPhase = (phase: string, effect: Effect.Effect) => - effect.pipe( - Effect.annotateSpans({ "startup.phase": phase }), - Effect.withSpan(`server.startup.${phase}`), - ); - const makeServerRuntimeStartup = Effect.gen(function* () { - const serverConfig = yield* ServerConfig; const keybindings = yield* Keybindings; const orchestrationReactor = yield* OrchestrationReactor; const lifecycleEvents = yield* ServerLifecycleEvents; @@ -271,70 +156,51 @@ const makeServerRuntimeStartup = Effect.gen(function* () { const startup = Effect.gen(function* () { yield* Effect.logDebug("startup phase: starting keybindings runtime"); - yield* runStartupPhase( - "keybindings.start", - keybindings.start.pipe( - Effect.catch((error) => - Effect.logWarning("failed to start keybindings runtime", { - path: error.configPath, - detail: error.detail, - cause: error.cause, - }), - ), - Effect.forkScoped, + yield* keybindings.start.pipe( + Effect.catch((error) => + Effect.logWarning("failed to start keybindings runtime", { + path: error.configPath, + detail: error.detail, + cause: error.cause, + }), ), + Effect.forkScoped, ); yield* Effect.logDebug("startup phase: starting server settings runtime"); - yield* runStartupPhase( - "settings.start", - serverSettings.start.pipe( - Effect.catch((error) => - Effect.logWarning("failed to start server settings runtime", { - path: error.settingsPath, - detail: error.detail, - cause: error.cause, - }), - ), - Effect.forkScoped, + yield* serverSettings.start.pipe( + Effect.catch((error) => + Effect.logWarning("failed to start server settings runtime", { + path: error.settingsPath, + detail: error.detail, + cause: error.cause, + }), ), + Effect.forkScoped, ); yield* Effect.logDebug("startup phase: starting orchestration reactors"); - yield* runStartupPhase( - "reactors.start", - orchestrationReactor.start().pipe(Scope.provide(reactorScope)), - ); + yield* orchestrationReactor.start().pipe(Scope.provide(reactorScope)); yield* Effect.logDebug("startup phase: preparing welcome payload"); - const welcome = yield* runStartupPhase("welcome.prepare", autoBootstrapWelcome); + const welcome = yield* autoBootstrapWelcome; yield* Effect.logDebug("startup phase: publishing welcome event", { cwd: welcome.cwd, projectName: welcome.projectName, bootstrapProjectId: welcome.bootstrapProjectId, bootstrapThreadId: welcome.bootstrapThreadId, }); - yield* runStartupPhase( - "welcome.publish", - lifecycleEvents.publish({ - version: 1, - type: "welcome", - payload: welcome, - }), - ); - }).pipe( - Effect.annotateSpans({ - "server.mode": serverConfig.mode, - "server.port": serverConfig.port, - "server.host": serverConfig.host ?? "default", - }), - Effect.withSpan("server.startup", { kind: "server", root: true }), - ); + yield* lifecycleEvents.publish({ + version: 1, + type: "welcome", + payload: welcome, + }); + }); yield* Effect.forkScoped( Effect.gen(function* () { const startupExit = yield* Effect.exit(startup); - if (Exit.isFailure(startupExit)) { + if (startupExit._tag === "Failure") { const error = new ServerRuntimeStartupError({ message: "Server runtime startup failed before command readiness.", cause: startupExit.cause, @@ -347,28 +213,25 @@ const makeServerRuntimeStartup = Effect.gen(function* () { yield* Effect.logDebug("Accepting commands"); yield* commandGate.signalCommandReady; yield* Effect.logDebug("startup phase: waiting for http listener"); - yield* runStartupPhase("http.wait", Deferred.await(httpListening)); + yield* Deferred.await(httpListening); yield* Effect.logDebug("startup phase: publishing ready event"); - yield* runStartupPhase( - "ready.publish", - lifecycleEvents.publish({ - version: 1, - type: "ready", - payload: { at: new Date().toISOString() }, - }), - ); + yield* lifecycleEvents.publish({ + version: 1, + type: "ready", + payload: { at: new Date().toISOString() }, + }); yield* Effect.logDebug("startup phase: recording startup heartbeat"); yield* launchStartupHeartbeat; yield* Effect.logDebug("startup phase: browser open check"); - yield* runStartupPhase("browser.open", maybeOpenBrowser); + yield* maybeOpenBrowser; yield* Effect.logDebug("startup phase: complete"); }), ); return { awaitCommandReady: commandGate.awaitCommandReady, - markHttpListening: Deferred.succeed(httpListening, undefined), + markHttpListening: Deferred.succeed(httpListening, undefined).pipe(Effect.asVoid), enqueueCommand: commandGate.enqueueCommand, } satisfies ServerRuntimeStartupShape; }); diff --git a/apps/server/src/serverSettings.ts b/apps/server/src/serverSettings.ts index 79fdc29a8b..ac41323e24 100644 --- a/apps/server/src/serverSettings.ts +++ b/apps/server/src/serverSettings.ts @@ -43,7 +43,7 @@ import { ServerConfig } from "./config"; import { type DeepPartial, deepMerge } from "@t3tools/shared/Struct"; import { fromLenientJson } from "@t3tools/shared/schemaJson"; -export interface ServerSettingsShape { +interface ServerSettingsShape { /** Start the settings runtime and attach file watching. */ readonly start: Effect.Effect; diff --git a/apps/server/src/terminal/Layers/Manager.shared.ts b/apps/server/src/terminal/Layers/Manager.shared.ts new file mode 100644 index 0000000000..ee0866c791 --- /dev/null +++ b/apps/server/src/terminal/Layers/Manager.shared.ts @@ -0,0 +1,1820 @@ +import path from "node:path"; + +import { + DEFAULT_TERMINAL_ID, + type TerminalEvent, + type TerminalSessionSnapshot, + type TerminalSessionStatus, +} from "@t3tools/contracts"; +import { makeKeyedCoalescingWorker } from "@t3tools/shared/KeyedCoalescingWorker"; +import { + Data, + Effect, + Encoding, + Equal, + Exit, + Fiber, + FileSystem, + Option, + Scope, + Semaphore, + SynchronizedRef, +} from "effect"; + +import { runProcess } from "../../processRunner"; +import { + TerminalCwdError, + TerminalHistoryError, + TerminalNotRunningError, + TerminalSessionLookupError, + type TerminalManagerShape, +} from "../Services/Manager"; +import { + PtySpawnError, + type PtyAdapterShape, + type PtyExitEvent, + type PtyProcess, +} from "../Services/PTY"; + +const DEFAULT_HISTORY_LINE_LIMIT = 5_000; +const DEFAULT_PERSIST_DEBOUNCE_MS = 40; +const DEFAULT_SUBPROCESS_POLL_INTERVAL_MS = 1_000; +const DEFAULT_PROCESS_KILL_GRACE_MS = 1_000; +const DEFAULT_MAX_RETAINED_INACTIVE_SESSIONS = 128; +const DEFAULT_OPEN_COLS = 120; +const DEFAULT_OPEN_ROWS = 30; +const TERMINAL_ENV_BLOCKLIST = new Set(["PORT", "ELECTRON_RENDERER_PORT", "ELECTRON_RUN_AS_NODE"]); + +type TerminalSubprocessChecker = ( + terminalPid: number, +) => Effect.Effect; + +class TerminalSubprocessCheckError extends Data.TaggedError("TerminalSubprocessCheckError")<{ + readonly message: string; + readonly cause?: unknown; + readonly terminalPid: number; + readonly command: "powershell" | "pgrep" | "ps"; +}> {} + +class TerminalProcessSignalError extends Data.TaggedError("TerminalProcessSignalError")<{ + readonly message: string; + readonly cause?: unknown; + readonly signal: "SIGTERM" | "SIGKILL"; +}> {} + +interface ShellCandidate { + shell: string; + args?: string[]; +} + +interface TerminalStartInput { + threadId: string; + terminalId: string; + cwd: string; + cols: number; + rows: number; + env?: Record; +} + +interface TerminalSessionState { + threadId: string; + terminalId: string; + cwd: string; + status: TerminalSessionStatus; + pid: number | null; + history: string; + pendingHistoryControlSequence: string; + pendingProcessEvents: Array; + pendingProcessEventIndex: number; + processEventDrainRunning: boolean; + exitCode: number | null; + exitSignal: number | null; + updatedAt: string; + cols: number; + rows: number; + process: PtyProcess | null; + unsubscribeData: (() => void) | null; + unsubscribeExit: (() => void) | null; + hasRunningSubprocess: boolean; + runtimeEnv: Record | null; +} + +interface PersistHistoryRequest { + history: string; + immediate: boolean; +} + +type PendingProcessEvent = { type: "output"; data: string } | { type: "exit"; event: PtyExitEvent }; + +type DrainProcessEventAction = + | { type: "idle" } + | { + type: "output"; + threadId: string; + terminalId: string; + history: string | null; + data: string; + } + | { + type: "exit"; + process: PtyProcess | null; + threadId: string; + terminalId: string; + exitCode: number | null; + exitSignal: number | null; + }; + +interface TerminalManagerState { + sessions: Map; + killFibers: Map>; +} + +function snapshot(session: TerminalSessionState): TerminalSessionSnapshot { + return { + threadId: session.threadId, + terminalId: session.terminalId, + cwd: session.cwd, + status: session.status, + pid: session.pid, + history: session.history, + exitCode: session.exitCode, + exitSignal: session.exitSignal, + updatedAt: session.updatedAt, + }; +} + +function cleanupProcessHandles(session: TerminalSessionState): void { + session.unsubscribeData?.(); + session.unsubscribeData = null; + session.unsubscribeExit?.(); + session.unsubscribeExit = null; +} + +function enqueueProcessEvent( + session: TerminalSessionState, + expectedPid: number, + event: PendingProcessEvent, +): boolean { + if (!session.process || session.status !== "running" || session.pid !== expectedPid) { + return false; + } + + session.pendingProcessEvents.push(event); + if (session.processEventDrainRunning) { + return false; + } + + session.processEventDrainRunning = true; + return true; +} + +function defaultShellResolver(): string { + if (process.platform === "win32") { + return process.env.ComSpec ?? "cmd.exe"; + } + return process.env.SHELL ?? "bash"; +} + +function normalizeShellCommand(value: string | undefined): string | null { + if (!value) return null; + const trimmed = value.trim(); + if (trimmed.length === 0) return null; + + if (process.platform === "win32") { + return trimmed; + } + + const firstToken = trimmed.split(/\s+/g)[0]?.trim(); + if (!firstToken) return null; + return firstToken.replace(/^['"]|['"]$/g, ""); +} + +function shellCandidateFromCommand(command: string | null): ShellCandidate | null { + if (!command || command.length === 0) return null; + const shellName = path.basename(command).toLowerCase(); + if (process.platform !== "win32" && shellName === "zsh") { + return { shell: command, args: ["-o", "nopromptsp"] }; + } + return { shell: command }; +} + +function formatShellCandidate(candidate: ShellCandidate): string { + if (!candidate.args || candidate.args.length === 0) return candidate.shell; + return `${candidate.shell} ${candidate.args.join(" ")}`; +} + +function uniqueShellCandidates(candidates: Array): ShellCandidate[] { + const seen = new Set(); + const ordered: ShellCandidate[] = []; + for (const candidate of candidates) { + if (!candidate) continue; + const key = formatShellCandidate(candidate); + if (seen.has(key)) continue; + seen.add(key); + ordered.push(candidate); + } + return ordered; +} + +function resolveShellCandidates(shellResolver: () => string): ShellCandidate[] { + const requested = shellCandidateFromCommand(normalizeShellCommand(shellResolver())); + + if (process.platform === "win32") { + return uniqueShellCandidates([ + requested, + shellCandidateFromCommand(process.env.ComSpec ?? null), + shellCandidateFromCommand("powershell.exe"), + shellCandidateFromCommand("cmd.exe"), + ]); + } + + return uniqueShellCandidates([ + requested, + shellCandidateFromCommand(normalizeShellCommand(process.env.SHELL)), + shellCandidateFromCommand("/bin/zsh"), + shellCandidateFromCommand("/bin/bash"), + shellCandidateFromCommand("/bin/sh"), + shellCandidateFromCommand("zsh"), + shellCandidateFromCommand("bash"), + shellCandidateFromCommand("sh"), + ]); +} + +function isRetryableShellSpawnError(error: PtySpawnError): boolean { + const queue: unknown[] = [error]; + const seen = new Set(); + const messages: string[] = []; + + while (queue.length > 0) { + const current = queue.shift(); + if (!current || seen.has(current)) { + continue; + } + seen.add(current); + + if (typeof current === "string") { + messages.push(current); + continue; + } + + if (current instanceof Error) { + messages.push(current.message); + const cause = (current as { cause?: unknown }).cause; + if (cause) { + queue.push(cause); + } + continue; + } + + if (typeof current === "object") { + const value = current as { message?: unknown; cause?: unknown }; + if (typeof value.message === "string") { + messages.push(value.message); + } + if (value.cause) { + queue.push(value.cause); + } + } + } + + const message = messages.join(" ").toLowerCase(); + return ( + message.includes("posix_spawnp failed") || + message.includes("enoent") || + message.includes("not found") || + message.includes("file not found") || + message.includes("no such file") + ); +} + +function checkWindowsSubprocessActivity( + terminalPid: number, +): Effect.Effect { + const command = [ + `$children = Get-CimInstance Win32_Process -Filter "ParentProcessId = ${terminalPid}" -ErrorAction SilentlyContinue`, + "if ($children) { exit 0 }", + "exit 1", + ].join("; "); + return Effect.tryPromise({ + try: () => + runProcess("powershell.exe", ["-NoProfile", "-NonInteractive", "-Command", command], { + timeoutMs: 1_500, + allowNonZeroExit: true, + maxBufferBytes: 32_768, + outputMode: "truncate", + }), + catch: (cause) => + new TerminalSubprocessCheckError({ + message: "Failed to check Windows terminal subprocess activity.", + cause, + terminalPid, + command: "powershell", + }), + }).pipe(Effect.map((result) => result.code === 0)); +} + +const checkPosixSubprocessActivity = Effect.fn("terminal.checkPosixSubprocessActivity")(function* ( + terminalPid: number, +): Effect.fn.Return { + const runPgrep = Effect.tryPromise({ + try: () => + runProcess("pgrep", ["-P", String(terminalPid)], { + timeoutMs: 1_000, + allowNonZeroExit: true, + maxBufferBytes: 32_768, + outputMode: "truncate", + }), + catch: (cause) => + new TerminalSubprocessCheckError({ + message: "Failed to inspect terminal subprocesses with pgrep.", + cause, + terminalPid, + command: "pgrep", + }), + }); + + const runPs = Effect.tryPromise({ + try: () => + runProcess("ps", ["-eo", "pid=,ppid="], { + timeoutMs: 1_000, + allowNonZeroExit: true, + maxBufferBytes: 262_144, + outputMode: "truncate", + }), + catch: (cause) => + new TerminalSubprocessCheckError({ + message: "Failed to inspect terminal subprocesses with ps.", + cause, + terminalPid, + command: "ps", + }), + }); + + const pgrepResult = yield* Effect.exit(runPgrep); + if (pgrepResult._tag === "Success") { + if (pgrepResult.value.code === 0) { + return pgrepResult.value.stdout.trim().length > 0; + } + if (pgrepResult.value.code === 1) { + return false; + } + } + + const psResult = yield* Effect.exit(runPs); + if (psResult._tag === "Failure" || psResult.value.code !== 0) { + return false; + } + + for (const line of psResult.value.stdout.split(/\r?\n/g)) { + const [pidRaw, ppidRaw] = line.trim().split(/\s+/g); + const pid = Number(pidRaw); + const ppid = Number(ppidRaw); + if (!Number.isInteger(pid) || !Number.isInteger(ppid)) continue; + if (ppid === terminalPid) { + return true; + } + } + return false; +}); + +const defaultSubprocessChecker = Effect.fn("terminal.defaultSubprocessChecker")(function* ( + terminalPid: number, +): Effect.fn.Return { + if (!Number.isInteger(terminalPid) || terminalPid <= 0) { + return false; + } + if (process.platform === "win32") { + return yield* checkWindowsSubprocessActivity(terminalPid); + } + return yield* checkPosixSubprocessActivity(terminalPid); +}); + +function capHistory(history: string, maxLines: number): string { + if (history.length === 0) return history; + const hasTrailingNewline = history.endsWith("\n"); + const lines = history.split("\n"); + if (hasTrailingNewline) { + lines.pop(); + } + if (lines.length <= maxLines) return history; + const capped = lines.slice(lines.length - maxLines).join("\n"); + return hasTrailingNewline ? `${capped}\n` : capped; +} + +function isCsiFinalByte(codePoint: number): boolean { + return codePoint >= 0x40 && codePoint <= 0x7e; +} + +function shouldStripCsiSequence(body: string, finalByte: string): boolean { + if (finalByte === "n") { + return true; + } + if (finalByte === "R" && /^[0-9;?]*$/.test(body)) { + return true; + } + if (finalByte === "c" && /^[>0-9;?]*$/.test(body)) { + return true; + } + return false; +} + +function shouldStripOscSequence(content: string): boolean { + return /^(10|11|12);(?:\?|rgb:)/.test(content); +} + +function stripStringTerminator(value: string): string { + if (value.endsWith("\u001b\\")) { + return value.slice(0, -2); + } + const lastCharacter = value.at(-1); + if (lastCharacter === "\u0007" || lastCharacter === "\u009c") { + return value.slice(0, -1); + } + return value; +} + +function findStringTerminatorIndex(input: string, start: number): number | null { + for (let index = start; index < input.length; index += 1) { + const codePoint = input.charCodeAt(index); + if (codePoint === 0x07 || codePoint === 0x9c) { + return index + 1; + } + if (codePoint === 0x1b && input.charCodeAt(index + 1) === 0x5c) { + return index + 2; + } + } + return null; +} + +function isEscapeIntermediateByte(codePoint: number): boolean { + return codePoint >= 0x20 && codePoint <= 0x2f; +} + +function isEscapeFinalByte(codePoint: number): boolean { + return codePoint >= 0x30 && codePoint <= 0x7e; +} + +function findEscapeSequenceEndIndex(input: string, start: number): number | null { + let cursor = start; + while (cursor < input.length && isEscapeIntermediateByte(input.charCodeAt(cursor))) { + cursor += 1; + } + if (cursor >= input.length) { + return null; + } + return isEscapeFinalByte(input.charCodeAt(cursor)) ? cursor + 1 : start + 1; +} + +function sanitizeTerminalHistoryChunk( + pendingControlSequence: string, + data: string, +): { visibleText: string; pendingControlSequence: string } { + const input = `${pendingControlSequence}${data}`; + let visibleText = ""; + let index = 0; + + const append = (value: string) => { + visibleText += value; + }; + + while (index < input.length) { + const codePoint = input.charCodeAt(index); + + if (codePoint === 0x1b) { + const nextCodePoint = input.charCodeAt(index + 1); + if (Number.isNaN(nextCodePoint)) { + return { visibleText, pendingControlSequence: input.slice(index) }; + } + + if (nextCodePoint === 0x5b) { + let cursor = index + 2; + while (cursor < input.length) { + if (isCsiFinalByte(input.charCodeAt(cursor))) { + const sequence = input.slice(index, cursor + 1); + const body = input.slice(index + 2, cursor); + if (!shouldStripCsiSequence(body, input[cursor] ?? "")) { + append(sequence); + } + index = cursor + 1; + break; + } + cursor += 1; + } + if (cursor >= input.length) { + return { visibleText, pendingControlSequence: input.slice(index) }; + } + continue; + } + + if ( + nextCodePoint === 0x5d || + nextCodePoint === 0x50 || + nextCodePoint === 0x5e || + nextCodePoint === 0x5f + ) { + const terminatorIndex = findStringTerminatorIndex(input, index + 2); + if (terminatorIndex === null) { + return { visibleText, pendingControlSequence: input.slice(index) }; + } + const sequence = input.slice(index, terminatorIndex); + const content = stripStringTerminator(input.slice(index + 2, terminatorIndex)); + if (nextCodePoint !== 0x5d || !shouldStripOscSequence(content)) { + append(sequence); + } + index = terminatorIndex; + continue; + } + + const escapeSequenceEndIndex = findEscapeSequenceEndIndex(input, index + 1); + if (escapeSequenceEndIndex === null) { + return { visibleText, pendingControlSequence: input.slice(index) }; + } + append(input.slice(index, escapeSequenceEndIndex)); + index = escapeSequenceEndIndex; + continue; + } + + if (codePoint === 0x9b) { + let cursor = index + 1; + while (cursor < input.length) { + if (isCsiFinalByte(input.charCodeAt(cursor))) { + const sequence = input.slice(index, cursor + 1); + const body = input.slice(index + 1, cursor); + if (!shouldStripCsiSequence(body, input[cursor] ?? "")) { + append(sequence); + } + index = cursor + 1; + break; + } + cursor += 1; + } + if (cursor >= input.length) { + return { visibleText, pendingControlSequence: input.slice(index) }; + } + continue; + } + + if (codePoint === 0x9d || codePoint === 0x90 || codePoint === 0x9e || codePoint === 0x9f) { + const terminatorIndex = findStringTerminatorIndex(input, index + 1); + if (terminatorIndex === null) { + return { visibleText, pendingControlSequence: input.slice(index) }; + } + const sequence = input.slice(index, terminatorIndex); + const content = stripStringTerminator(input.slice(index + 1, terminatorIndex)); + if (codePoint !== 0x9d || !shouldStripOscSequence(content)) { + append(sequence); + } + index = terminatorIndex; + continue; + } + + append(input[index] ?? ""); + index += 1; + } + + return { visibleText, pendingControlSequence: "" }; +} + +function legacySafeThreadId(threadId: string): string { + return threadId.replace(/[^a-zA-Z0-9._-]/g, "_"); +} + +function toSafeThreadId(threadId: string): string { + return `terminal_${Encoding.encodeBase64Url(threadId)}`; +} + +function toSafeTerminalId(terminalId: string): string { + return Encoding.encodeBase64Url(terminalId); +} + +function toSessionKey(threadId: string, terminalId: string): string { + return `${threadId}\u0000${terminalId}`; +} + +function shouldExcludeTerminalEnvKey(key: string): boolean { + const normalizedKey = key.toUpperCase(); + if (normalizedKey.startsWith("T3CODE_")) { + return true; + } + if (normalizedKey.startsWith("VITE_")) { + return true; + } + return TERMINAL_ENV_BLOCKLIST.has(normalizedKey); +} + +function createTerminalSpawnEnv( + baseEnv: NodeJS.ProcessEnv, + runtimeEnv?: Record | null, +): NodeJS.ProcessEnv { + const spawnEnv: NodeJS.ProcessEnv = {}; + for (const [key, value] of Object.entries(baseEnv)) { + if (value === undefined) continue; + if (shouldExcludeTerminalEnvKey(key)) continue; + spawnEnv[key] = value; + } + if (runtimeEnv) { + for (const [key, value] of Object.entries(runtimeEnv)) { + spawnEnv[key] = value; + } + } + return spawnEnv; +} + +function normalizedRuntimeEnv( + env: Record | undefined, +): Record | null { + if (!env) return null; + const entries = Object.entries(env); + if (entries.length === 0) return null; + return Object.fromEntries(entries.toSorted(([left], [right]) => left.localeCompare(right))); +} + +interface TerminalManagerOptions { + logsDir: string; + historyLineLimit?: number; + ptyAdapter: PtyAdapterShape; + shellResolver?: () => string; + subprocessChecker?: TerminalSubprocessChecker; + subprocessPollIntervalMs?: number; + processKillGraceMs?: number; + maxRetainedInactiveSessions?: number; +} + +export const makeTerminalManagerWithOptions = Effect.fn("makeTerminalManagerWithOptions")( + function* (options: TerminalManagerOptions) { + const fileSystem = yield* FileSystem.FileSystem; + const services = yield* Effect.services(); + const runFork = Effect.runForkWith(services); + + const logsDir = options.logsDir; + const historyLineLimit = options.historyLineLimit ?? DEFAULT_HISTORY_LINE_LIMIT; + const shellResolver = options.shellResolver ?? defaultShellResolver; + const subprocessChecker = options.subprocessChecker ?? defaultSubprocessChecker; + const subprocessPollIntervalMs = + options.subprocessPollIntervalMs ?? DEFAULT_SUBPROCESS_POLL_INTERVAL_MS; + const processKillGraceMs = options.processKillGraceMs ?? DEFAULT_PROCESS_KILL_GRACE_MS; + const maxRetainedInactiveSessions = + options.maxRetainedInactiveSessions ?? DEFAULT_MAX_RETAINED_INACTIVE_SESSIONS; + + yield* fileSystem.makeDirectory(logsDir, { recursive: true }).pipe(Effect.orDie); + + const managerStateRef = yield* SynchronizedRef.make({ + sessions: new Map(), + killFibers: new Map(), + }); + const threadLocksRef = yield* SynchronizedRef.make(new Map()); + const terminalEventListeners = new Set<(event: TerminalEvent) => Effect.Effect>(); + const workerScope = yield* Scope.make("sequential"); + yield* Effect.addFinalizer(() => Scope.close(workerScope, Exit.void)); + + const publishEvent = (event: TerminalEvent) => + Effect.gen(function* () { + for (const listener of terminalEventListeners) { + yield* listener(event).pipe(Effect.ignoreCause({ log: true })); + } + }); + + const historyPath = (threadId: string, terminalId: string) => { + const threadPart = toSafeThreadId(threadId); + if (terminalId === DEFAULT_TERMINAL_ID) { + return path.join(logsDir, `${threadPart}.log`); + } + return path.join(logsDir, `${threadPart}_${toSafeTerminalId(terminalId)}.log`); + }; + + const legacyHistoryPath = (threadId: string) => + path.join(logsDir, `${legacySafeThreadId(threadId)}.log`); + + const toTerminalHistoryError = + (operation: "read" | "truncate" | "migrate", threadId: string, terminalId: string) => + (cause: unknown) => + new TerminalHistoryError({ + operation, + threadId, + terminalId, + cause, + }); + + const readManagerState = SynchronizedRef.get(managerStateRef); + + const modifyManagerState = ( + f: (state: TerminalManagerState) => readonly [A, TerminalManagerState], + ) => SynchronizedRef.modify(managerStateRef, f); + + const getThreadSemaphore = (threadId: string) => + SynchronizedRef.modifyEffect(threadLocksRef, (current) => { + const existing: Option.Option = Option.fromNullishOr( + current.get(threadId), + ); + return Option.match(existing, { + onNone: () => + Semaphore.make(1).pipe( + Effect.map((semaphore) => { + const next = new Map(current); + next.set(threadId, semaphore); + return [semaphore, next] as const; + }), + ), + onSome: (semaphore) => Effect.succeed([semaphore, current] as const), + }); + }); + + const withThreadLock = ( + threadId: string, + effect: Effect.Effect, + ): Effect.Effect => + Effect.flatMap(getThreadSemaphore(threadId), (semaphore) => semaphore.withPermit(effect)); + + const clearKillFiber = Effect.fn("terminal.clearKillFiber")(function* ( + process: PtyProcess | null, + ) { + if (!process) return; + const fiber: Option.Option> = yield* modifyManagerState< + Option.Option> + >((state) => { + const existing: Option.Option> = Option.fromNullishOr( + state.killFibers.get(process), + ); + if (Option.isNone(existing)) { + return [Option.none>(), state] as const; + } + const killFibers = new Map(state.killFibers); + killFibers.delete(process); + return [existing, { ...state, killFibers }] as const; + }); + if (Option.isSome(fiber)) { + yield* Fiber.interrupt(fiber.value).pipe(Effect.ignore); + } + }); + + const registerKillFiber = Effect.fn("terminal.registerKillFiber")(function* ( + process: PtyProcess, + fiber: Fiber.Fiber, + ) { + yield* modifyManagerState((state) => { + const killFibers = new Map(state.killFibers); + killFibers.set(process, fiber); + return [undefined, { ...state, killFibers }] as const; + }); + }); + + const runKillEscalation = Effect.fn("terminal.runKillEscalation")(function* ( + process: PtyProcess, + threadId: string, + terminalId: string, + ) { + const terminated = yield* Effect.try({ + try: () => process.kill("SIGTERM"), + catch: (cause) => + new TerminalProcessSignalError({ + message: "Failed to send SIGTERM to terminal process.", + cause, + signal: "SIGTERM", + }), + }).pipe( + Effect.as(true), + Effect.catch((error) => + Effect.logWarning("failed to kill terminal process", { + threadId, + terminalId, + signal: "SIGTERM", + error: error.message, + }).pipe(Effect.as(false)), + ), + ); + if (!terminated) { + return; + } + + yield* Effect.sleep(processKillGraceMs); + + yield* Effect.try({ + try: () => process.kill("SIGKILL"), + catch: (cause) => + new TerminalProcessSignalError({ + message: "Failed to send SIGKILL to terminal process.", + cause, + signal: "SIGKILL", + }), + }).pipe( + Effect.catch((error) => + Effect.logWarning("failed to force-kill terminal process", { + threadId, + terminalId, + signal: "SIGKILL", + error: error.message, + }), + ), + ); + }); + + const startKillEscalation = Effect.fn("terminal.startKillEscalation")(function* ( + process: PtyProcess, + threadId: string, + terminalId: string, + ) { + const fiber = yield* runKillEscalation(process, threadId, terminalId).pipe( + Effect.ensuring( + modifyManagerState((state) => { + if (!state.killFibers.has(process)) { + return [undefined, state] as const; + } + const killFibers = new Map(state.killFibers); + killFibers.delete(process); + return [undefined, { ...state, killFibers }] as const; + }), + ), + Effect.forkIn(workerScope), + ); + + yield* registerKillFiber(process, fiber); + }); + + const persistWorker = yield* makeKeyedCoalescingWorker< + string, + PersistHistoryRequest, + never, + never + >({ + merge: (current, next) => ({ + history: next.history, + immediate: current.immediate || next.immediate, + }), + process: Effect.fn("terminal.persistHistoryWorker")(function* (sessionKey, request) { + if (!request.immediate) { + yield* Effect.sleep(DEFAULT_PERSIST_DEBOUNCE_MS); + } + + const [threadId, terminalId] = sessionKey.split("\u0000"); + if (!threadId || !terminalId) { + return; + } + + yield* fileSystem.writeFileString(historyPath(threadId, terminalId), request.history).pipe( + Effect.catch((error) => + Effect.logWarning("failed to persist terminal history", { + threadId, + terminalId, + error: error instanceof Error ? error.message : String(error), + }), + ), + ); + }), + }); + + const queuePersist = Effect.fn("terminal.queuePersist")(function* ( + threadId: string, + terminalId: string, + history: string, + ) { + yield* persistWorker.enqueue(toSessionKey(threadId, terminalId), { + history, + immediate: false, + }); + }); + + const flushPersist = Effect.fn("terminal.flushPersist")(function* ( + threadId: string, + terminalId: string, + ) { + yield* persistWorker.drainKey(toSessionKey(threadId, terminalId)); + }); + + const persistHistory = Effect.fn("terminal.persistHistory")(function* ( + threadId: string, + terminalId: string, + history: string, + ) { + yield* persistWorker.enqueue(toSessionKey(threadId, terminalId), { + history, + immediate: true, + }); + yield* flushPersist(threadId, terminalId); + }); + + const readHistory = Effect.fn("terminal.readHistory")(function* ( + threadId: string, + terminalId: string, + ) { + const nextPath = historyPath(threadId, terminalId); + if ( + yield* fileSystem + .exists(nextPath) + .pipe(Effect.mapError(toTerminalHistoryError("read", threadId, terminalId))) + ) { + const raw = yield* fileSystem + .readFileString(nextPath) + .pipe(Effect.mapError(toTerminalHistoryError("read", threadId, terminalId))); + const capped = capHistory(raw, historyLineLimit); + if (capped !== raw) { + yield* fileSystem + .writeFileString(nextPath, capped) + .pipe(Effect.mapError(toTerminalHistoryError("truncate", threadId, terminalId))); + } + return capped; + } + + if (terminalId !== DEFAULT_TERMINAL_ID) { + return ""; + } + + const legacyPath = legacyHistoryPath(threadId); + if ( + !(yield* fileSystem + .exists(legacyPath) + .pipe(Effect.mapError(toTerminalHistoryError("migrate", threadId, terminalId)))) + ) { + return ""; + } + + const raw = yield* fileSystem + .readFileString(legacyPath) + .pipe(Effect.mapError(toTerminalHistoryError("migrate", threadId, terminalId))); + const capped = capHistory(raw, historyLineLimit); + yield* fileSystem + .writeFileString(nextPath, capped) + .pipe(Effect.mapError(toTerminalHistoryError("migrate", threadId, terminalId))); + yield* fileSystem.remove(legacyPath, { force: true }).pipe( + Effect.catch((cleanupError) => + Effect.logWarning("failed to remove legacy terminal history", { + threadId, + error: cleanupError instanceof Error ? cleanupError.message : String(cleanupError), + }), + ), + ); + return capped; + }); + + const deleteHistory = Effect.fn("terminal.deleteHistory")(function* ( + threadId: string, + terminalId: string, + ) { + yield* fileSystem.remove(historyPath(threadId, terminalId), { force: true }).pipe( + Effect.catch((error) => + Effect.logWarning("failed to delete terminal history", { + threadId, + terminalId, + error: error instanceof Error ? error.message : String(error), + }), + ), + ); + if (terminalId === DEFAULT_TERMINAL_ID) { + yield* fileSystem.remove(legacyHistoryPath(threadId), { force: true }).pipe( + Effect.catch((error) => + Effect.logWarning("failed to delete terminal history", { + threadId, + terminalId, + error: error instanceof Error ? error.message : String(error), + }), + ), + ); + } + }); + + const deleteAllHistoryForThread = Effect.fn("terminal.deleteAllHistoryForThread")(function* ( + threadId: string, + ) { + const threadPrefix = `${toSafeThreadId(threadId)}_`; + const entries = yield* fileSystem + .readDirectory(logsDir, { recursive: false }) + .pipe(Effect.catch(() => Effect.succeed([] as Array))); + yield* Effect.forEach( + entries.filter( + (name) => + name === `${toSafeThreadId(threadId)}.log` || + name === `${legacySafeThreadId(threadId)}.log` || + name.startsWith(threadPrefix), + ), + (name) => + fileSystem.remove(path.join(logsDir, name), { force: true }).pipe( + Effect.catch((error) => + Effect.logWarning("failed to delete terminal histories for thread", { + threadId, + error: error instanceof Error ? error.message : String(error), + }), + ), + ), + { discard: true }, + ); + }); + + const assertValidCwd = Effect.fn("terminal.assertValidCwd")(function* (cwd: string) { + const stats = yield* fileSystem.stat(cwd).pipe( + Effect.mapError( + (cause) => + new TerminalCwdError({ + cwd, + reason: cause.reason._tag === "NotFound" ? "notFound" : "statFailed", + cause, + }), + ), + ); + if (stats.type !== "Directory") { + return yield* new TerminalCwdError({ + cwd, + reason: "notDirectory", + }); + } + }); + + const getSession = Effect.fn("terminal.getSession")(function* ( + threadId: string, + terminalId: string, + ): Effect.fn.Return> { + return yield* Effect.map(readManagerState, (state) => + Option.fromNullishOr(state.sessions.get(toSessionKey(threadId, terminalId))), + ); + }); + + const requireSession = Effect.fn("terminal.requireSession")(function* ( + threadId: string, + terminalId: string, + ): Effect.fn.Return { + return yield* Effect.flatMap(getSession(threadId, terminalId), (session) => + Option.match(session, { + onNone: () => + Effect.fail( + new TerminalSessionLookupError({ + threadId, + terminalId, + }), + ), + onSome: Effect.succeed, + }), + ); + }); + + const sessionsForThread = Effect.fn("terminal.sessionsForThread")(function* (threadId: string) { + return yield* readManagerState.pipe( + Effect.map((state) => + [...state.sessions.values()].filter((session) => session.threadId === threadId), + ), + ); + }); + + const evictInactiveSessionsIfNeeded = Effect.fn("terminal.evictInactiveSessionsIfNeeded")( + function* () { + yield* modifyManagerState((state) => { + const inactiveSessions = [...state.sessions.values()].filter( + (session) => session.status !== "running", + ); + if (inactiveSessions.length <= maxRetainedInactiveSessions) { + return [undefined, state] as const; + } + + inactiveSessions.sort( + (left, right) => + left.updatedAt.localeCompare(right.updatedAt) || + left.threadId.localeCompare(right.threadId) || + left.terminalId.localeCompare(right.terminalId), + ); + + const sessions = new Map(state.sessions); + + const toEvict = inactiveSessions.length - maxRetainedInactiveSessions; + for (const session of inactiveSessions.slice(0, toEvict)) { + const key = toSessionKey(session.threadId, session.terminalId); + sessions.delete(key); + } + + return [undefined, { ...state, sessions }] as const; + }); + }, + ); + + const drainProcessEvents = Effect.fn("terminal.drainProcessEvents")(function* ( + session: TerminalSessionState, + expectedPid: number, + ) { + while (true) { + const action: DrainProcessEventAction = yield* Effect.sync(() => { + if (session.pid !== expectedPid || !session.process || session.status !== "running") { + session.pendingProcessEvents = []; + session.pendingProcessEventIndex = 0; + session.processEventDrainRunning = false; + return { type: "idle" } as const; + } + + const nextEvent = session.pendingProcessEvents[session.pendingProcessEventIndex]; + if (!nextEvent) { + session.pendingProcessEvents = []; + session.pendingProcessEventIndex = 0; + session.processEventDrainRunning = false; + return { type: "idle" } as const; + } + + session.pendingProcessEventIndex += 1; + if (session.pendingProcessEventIndex >= session.pendingProcessEvents.length) { + session.pendingProcessEvents = []; + session.pendingProcessEventIndex = 0; + } + + if (nextEvent.type === "output") { + const sanitized = sanitizeTerminalHistoryChunk( + session.pendingHistoryControlSequence, + nextEvent.data, + ); + session.pendingHistoryControlSequence = sanitized.pendingControlSequence; + if (sanitized.visibleText.length > 0) { + session.history = capHistory( + `${session.history}${sanitized.visibleText}`, + historyLineLimit, + ); + } + session.updatedAt = new Date().toISOString(); + + return { + type: "output", + threadId: session.threadId, + terminalId: session.terminalId, + history: sanitized.visibleText.length > 0 ? session.history : null, + data: nextEvent.data, + } as const; + } + + const process = session.process; + cleanupProcessHandles(session); + session.process = null; + session.pid = null; + session.hasRunningSubprocess = false; + session.status = "exited"; + session.pendingHistoryControlSequence = ""; + session.pendingProcessEvents = []; + session.pendingProcessEventIndex = 0; + session.processEventDrainRunning = false; + session.exitCode = Number.isInteger(nextEvent.event.exitCode) + ? nextEvent.event.exitCode + : null; + session.exitSignal = Number.isInteger(nextEvent.event.signal) + ? nextEvent.event.signal + : null; + session.updatedAt = new Date().toISOString(); + + return { + type: "exit", + process, + threadId: session.threadId, + terminalId: session.terminalId, + exitCode: session.exitCode, + exitSignal: session.exitSignal, + } as const; + }); + + if (action.type === "idle") { + return; + } + + if (action.type === "output") { + if (action.history !== null) { + yield* queuePersist(action.threadId, action.terminalId, action.history); + } + + yield* publishEvent({ + type: "output", + threadId: action.threadId, + terminalId: action.terminalId, + createdAt: new Date().toISOString(), + data: action.data, + }); + continue; + } + + yield* clearKillFiber(action.process); + yield* publishEvent({ + type: "exited", + threadId: action.threadId, + terminalId: action.terminalId, + createdAt: new Date().toISOString(), + exitCode: action.exitCode, + exitSignal: action.exitSignal, + }); + yield* evictInactiveSessionsIfNeeded(); + return; + } + }); + + const stopProcess = Effect.fn("terminal.stopProcess")(function* ( + session: TerminalSessionState, + ) { + const process = session.process; + if (!process) return; + + yield* modifyManagerState((state) => { + cleanupProcessHandles(session); + session.process = null; + session.pid = null; + session.hasRunningSubprocess = false; + session.status = "exited"; + session.pendingHistoryControlSequence = ""; + session.pendingProcessEvents = []; + session.pendingProcessEventIndex = 0; + session.processEventDrainRunning = false; + session.updatedAt = new Date().toISOString(); + return [undefined, state] as const; + }); + + yield* clearKillFiber(process); + yield* startKillEscalation(process, session.threadId, session.terminalId); + yield* evictInactiveSessionsIfNeeded(); + }); + + const trySpawn = Effect.fn("terminal.trySpawn")(function* ( + shellCandidates: ReadonlyArray, + spawnEnv: NodeJS.ProcessEnv, + session: TerminalSessionState, + index = 0, + lastError: PtySpawnError | null = null, + ): Effect.fn.Return<{ process: PtyProcess; shellLabel: string }, PtySpawnError> { + if (index >= shellCandidates.length) { + const detail = lastError?.message ?? "Failed to spawn PTY process"; + const tried = + shellCandidates.length > 0 + ? ` Tried shells: ${shellCandidates.map((candidate) => formatShellCandidate(candidate)).join(", ")}.` + : ""; + return yield* new PtySpawnError({ + adapter: "terminal-manager", + message: `${detail}.${tried}`.trim(), + ...(lastError ? { cause: lastError } : {}), + }); + } + + const candidate = shellCandidates[index]; + if (!candidate) { + return yield* ( + lastError ?? + new PtySpawnError({ + adapter: "terminal-manager", + message: "No shell candidate available for PTY spawn.", + }) + ); + } + + const attempt = yield* Effect.result( + options.ptyAdapter.spawn({ + shell: candidate.shell, + ...(candidate.args ? { args: candidate.args } : {}), + cwd: session.cwd, + cols: session.cols, + rows: session.rows, + env: spawnEnv, + }), + ); + + if (attempt._tag === "Success") { + return { + process: attempt.success, + shellLabel: formatShellCandidate(candidate), + }; + } + + const spawnError = attempt.failure; + if (!isRetryableShellSpawnError(spawnError)) { + return yield* spawnError; + } + + return yield* trySpawn(shellCandidates, spawnEnv, session, index + 1, spawnError); + }); + + const startSession = Effect.fn("terminal.startSession")(function* ( + session: TerminalSessionState, + input: TerminalStartInput, + eventType: "started" | "restarted", + ) { + yield* stopProcess(session); + + yield* modifyManagerState((state) => { + session.status = "starting"; + session.cwd = input.cwd; + session.cols = input.cols; + session.rows = input.rows; + session.exitCode = null; + session.exitSignal = null; + session.hasRunningSubprocess = false; + session.pendingProcessEvents = []; + session.pendingProcessEventIndex = 0; + session.processEventDrainRunning = false; + session.updatedAt = new Date().toISOString(); + return [undefined, state] as const; + }); + + let ptyProcess: PtyProcess | null = null; + let startedShell: string | null = null; + + const startResult = yield* Effect.result( + Effect.gen(function* () { + const shellCandidates = resolveShellCandidates(shellResolver); + const terminalEnv = createTerminalSpawnEnv(process.env, session.runtimeEnv); + const spawnResult = yield* trySpawn(shellCandidates, terminalEnv, session); + ptyProcess = spawnResult.process; + startedShell = spawnResult.shellLabel; + + const processPid = ptyProcess.pid; + const unsubscribeData = ptyProcess.onData((data) => { + if (!enqueueProcessEvent(session, processPid, { type: "output", data })) { + return; + } + runFork(drainProcessEvents(session, processPid)); + }); + const unsubscribeExit = ptyProcess.onExit((event) => { + if (!enqueueProcessEvent(session, processPid, { type: "exit", event })) { + return; + } + runFork(drainProcessEvents(session, processPid)); + }); + + yield* modifyManagerState((state) => { + session.process = ptyProcess; + session.pid = processPid; + session.status = "running"; + session.updatedAt = new Date().toISOString(); + session.unsubscribeData = unsubscribeData; + session.unsubscribeExit = unsubscribeExit; + return [undefined, state] as const; + }); + + yield* publishEvent({ + type: eventType, + threadId: session.threadId, + terminalId: session.terminalId, + createdAt: new Date().toISOString(), + snapshot: snapshot(session), + }); + }), + ); + + if (startResult._tag === "Success") { + return; + } + + { + const error = startResult.failure; + if (ptyProcess) { + yield* startKillEscalation(ptyProcess, session.threadId, session.terminalId); + } + + yield* modifyManagerState((state) => { + session.status = "error"; + session.pid = null; + session.process = null; + session.unsubscribeData = null; + session.unsubscribeExit = null; + session.hasRunningSubprocess = false; + session.pendingProcessEvents = []; + session.pendingProcessEventIndex = 0; + session.processEventDrainRunning = false; + session.updatedAt = new Date().toISOString(); + return [undefined, state] as const; + }); + + yield* evictInactiveSessionsIfNeeded(); + + const message = error.message; + yield* publishEvent({ + type: "error", + threadId: session.threadId, + terminalId: session.terminalId, + createdAt: new Date().toISOString(), + message, + }); + yield* Effect.logError("failed to start terminal", { + threadId: session.threadId, + terminalId: session.terminalId, + error: message, + ...(startedShell ? { shell: startedShell } : {}), + }); + } + }); + + const closeSession = Effect.fn("terminal.closeSession")(function* ( + threadId: string, + terminalId: string, + deleteHistoryOnClose: boolean, + ) { + const key = toSessionKey(threadId, terminalId); + const session = yield* getSession(threadId, terminalId); + + if (Option.isSome(session)) { + yield* stopProcess(session.value); + yield* persistHistory(threadId, terminalId, session.value.history); + } + + yield* flushPersist(threadId, terminalId); + + yield* modifyManagerState((state) => { + if (!state.sessions.has(key)) { + return [undefined, state] as const; + } + const sessions = new Map(state.sessions); + sessions.delete(key); + return [undefined, { ...state, sessions }] as const; + }); + + if (deleteHistoryOnClose) { + yield* deleteHistory(threadId, terminalId); + } + }); + + const pollSubprocessActivity = Effect.fn("terminal.pollSubprocessActivity")(function* () { + const state = yield* readManagerState; + const runningSessions = [...state.sessions.values()].filter( + (session): session is TerminalSessionState & { pid: number } => + session.status === "running" && Number.isInteger(session.pid), + ); + + if (runningSessions.length === 0) { + return; + } + + const checkSubprocessActivity = Effect.fn("terminal.checkSubprocessActivity")(function* ( + session: TerminalSessionState & { pid: number }, + ) { + const terminalPid = session.pid; + const hasRunningSubprocess = yield* subprocessChecker(terminalPid).pipe( + Effect.map(Option.some), + Effect.catch((error) => + Effect.logWarning("failed to check terminal subprocess activity", { + threadId: session.threadId, + terminalId: session.terminalId, + terminalPid, + error: error instanceof Error ? error.message : String(error), + }).pipe(Effect.as(Option.none())), + ), + ); + + if (Option.isNone(hasRunningSubprocess)) { + return; + } + + const event = yield* modifyManagerState((state) => { + const liveSession: Option.Option = Option.fromNullishOr( + state.sessions.get(toSessionKey(session.threadId, session.terminalId)), + ); + if ( + Option.isNone(liveSession) || + liveSession.value.status !== "running" || + liveSession.value.pid !== terminalPid || + liveSession.value.hasRunningSubprocess === hasRunningSubprocess.value + ) { + return [Option.none(), state] as const; + } + + liveSession.value.hasRunningSubprocess = hasRunningSubprocess.value; + liveSession.value.updatedAt = new Date().toISOString(); + + return [ + Option.some({ + type: "activity" as const, + threadId: liveSession.value.threadId, + terminalId: liveSession.value.terminalId, + createdAt: new Date().toISOString(), + hasRunningSubprocess: hasRunningSubprocess.value, + }), + state, + ] as const; + }); + + if (Option.isSome(event)) { + yield* publishEvent(event.value); + } + }); + + yield* Effect.forEach(runningSessions, checkSubprocessActivity, { + concurrency: "unbounded", + discard: true, + }); + }); + + const hasRunningSessions = readManagerState.pipe( + Effect.map((state) => + [...state.sessions.values()].some((session) => session.status === "running"), + ), + ); + + yield* Effect.forever( + hasRunningSessions.pipe( + Effect.flatMap((active) => + active + ? pollSubprocessActivity().pipe( + Effect.flatMap(() => Effect.sleep(subprocessPollIntervalMs)), + ) + : Effect.sleep(subprocessPollIntervalMs), + ), + ), + ).pipe(Effect.forkIn(workerScope)); + + yield* Effect.addFinalizer(() => + Effect.gen(function* () { + const sessions = yield* modifyManagerState( + (state) => + [ + [...state.sessions.values()], + { + ...state, + sessions: new Map(), + }, + ] as const, + ); + + const cleanupSession = Effect.fn("terminal.cleanupSession")(function* ( + session: TerminalSessionState, + ) { + cleanupProcessHandles(session); + if (!session.process) return; + yield* clearKillFiber(session.process); + yield* runKillEscalation(session.process, session.threadId, session.terminalId); + }); + + yield* Effect.forEach(sessions, cleanupSession, { + concurrency: "unbounded", + discard: true, + }); + }).pipe(Effect.ignoreCause({ log: true })), + ); + + const open: TerminalManagerShape["open"] = (input) => + withThreadLock( + input.threadId, + Effect.gen(function* () { + const terminalId = input.terminalId ?? DEFAULT_TERMINAL_ID; + yield* assertValidCwd(input.cwd); + + const sessionKey = toSessionKey(input.threadId, terminalId); + const existing = yield* getSession(input.threadId, terminalId); + if (Option.isNone(existing)) { + yield* flushPersist(input.threadId, terminalId); + const history = yield* readHistory(input.threadId, terminalId); + const cols = input.cols ?? DEFAULT_OPEN_COLS; + const rows = input.rows ?? DEFAULT_OPEN_ROWS; + const session: TerminalSessionState = { + threadId: input.threadId, + terminalId, + cwd: input.cwd, + status: "starting", + pid: null, + history, + pendingHistoryControlSequence: "", + pendingProcessEvents: [], + pendingProcessEventIndex: 0, + processEventDrainRunning: false, + exitCode: null, + exitSignal: null, + updatedAt: new Date().toISOString(), + cols, + rows, + process: null, + unsubscribeData: null, + unsubscribeExit: null, + hasRunningSubprocess: false, + runtimeEnv: normalizedRuntimeEnv(input.env), + }; + + const createdSession = session; + yield* modifyManagerState((state) => { + const sessions = new Map(state.sessions); + sessions.set(sessionKey, createdSession); + return [undefined, { ...state, sessions }] as const; + }); + + yield* evictInactiveSessionsIfNeeded(); + yield* startSession( + session, + { + threadId: input.threadId, + terminalId, + cwd: input.cwd, + cols, + rows, + ...(input.env ? { env: input.env } : {}), + }, + "started", + ); + return snapshot(session); + } + + const liveSession = existing.value; + const nextRuntimeEnv = normalizedRuntimeEnv(input.env); + const currentRuntimeEnv = liveSession.runtimeEnv; + const targetCols = input.cols ?? liveSession.cols; + const targetRows = input.rows ?? liveSession.rows; + const runtimeEnvChanged = !Equal.equals(currentRuntimeEnv, nextRuntimeEnv); + + if (liveSession.cwd !== input.cwd || runtimeEnvChanged) { + yield* stopProcess(liveSession); + liveSession.cwd = input.cwd; + liveSession.runtimeEnv = nextRuntimeEnv; + liveSession.history = ""; + liveSession.pendingHistoryControlSequence = ""; + liveSession.pendingProcessEvents = []; + liveSession.pendingProcessEventIndex = 0; + liveSession.processEventDrainRunning = false; + yield* persistHistory( + liveSession.threadId, + liveSession.terminalId, + liveSession.history, + ); + } else if (liveSession.status === "exited" || liveSession.status === "error") { + liveSession.runtimeEnv = nextRuntimeEnv; + liveSession.history = ""; + liveSession.pendingHistoryControlSequence = ""; + liveSession.pendingProcessEvents = []; + liveSession.pendingProcessEventIndex = 0; + liveSession.processEventDrainRunning = false; + yield* persistHistory( + liveSession.threadId, + liveSession.terminalId, + liveSession.history, + ); + } + + if (!liveSession.process) { + yield* startSession( + liveSession, + { + threadId: input.threadId, + terminalId, + cwd: input.cwd, + cols: targetCols, + rows: targetRows, + ...(input.env ? { env: input.env } : {}), + }, + "started", + ); + return snapshot(liveSession); + } + + if (liveSession.cols !== targetCols || liveSession.rows !== targetRows) { + liveSession.cols = targetCols; + liveSession.rows = targetRows; + liveSession.updatedAt = new Date().toISOString(); + liveSession.process.resize(targetCols, targetRows); + } + + return snapshot(liveSession); + }), + ); + + const write: TerminalManagerShape["write"] = Effect.fn("terminal.write")(function* (input) { + const terminalId = input.terminalId ?? DEFAULT_TERMINAL_ID; + const session = yield* requireSession(input.threadId, terminalId); + const process = session.process; + if (!process || session.status !== "running") { + if (session.status === "exited") return; + return yield* new TerminalNotRunningError({ + threadId: input.threadId, + terminalId, + }); + } + yield* Effect.sync(() => process.write(input.data)); + }); + + const resize: TerminalManagerShape["resize"] = Effect.fn("terminal.resize")(function* (input) { + const terminalId = input.terminalId ?? DEFAULT_TERMINAL_ID; + const session = yield* requireSession(input.threadId, terminalId); + const process = session.process; + if (!process || session.status !== "running") { + return yield* new TerminalNotRunningError({ + threadId: input.threadId, + terminalId, + }); + } + session.cols = input.cols; + session.rows = input.rows; + session.updatedAt = new Date().toISOString(); + yield* Effect.sync(() => process.resize(input.cols, input.rows)); + }); + + const clear: TerminalManagerShape["clear"] = (input) => + withThreadLock( + input.threadId, + Effect.gen(function* () { + const terminalId = input.terminalId ?? DEFAULT_TERMINAL_ID; + const session = yield* requireSession(input.threadId, terminalId); + session.history = ""; + session.pendingHistoryControlSequence = ""; + session.pendingProcessEvents = []; + session.pendingProcessEventIndex = 0; + session.processEventDrainRunning = false; + session.updatedAt = new Date().toISOString(); + yield* persistHistory(input.threadId, terminalId, session.history); + yield* publishEvent({ + type: "cleared", + threadId: input.threadId, + terminalId, + createdAt: new Date().toISOString(), + }); + }), + ); + + const restart: TerminalManagerShape["restart"] = (input) => + withThreadLock( + input.threadId, + Effect.gen(function* () { + const terminalId = input.terminalId ?? DEFAULT_TERMINAL_ID; + yield* assertValidCwd(input.cwd); + + const sessionKey = toSessionKey(input.threadId, terminalId); + const existingSession = yield* getSession(input.threadId, terminalId); + let session: TerminalSessionState; + if (Option.isNone(existingSession)) { + const cols = input.cols ?? DEFAULT_OPEN_COLS; + const rows = input.rows ?? DEFAULT_OPEN_ROWS; + session = { + threadId: input.threadId, + terminalId, + cwd: input.cwd, + status: "starting", + pid: null, + history: "", + pendingHistoryControlSequence: "", + pendingProcessEvents: [], + pendingProcessEventIndex: 0, + processEventDrainRunning: false, + exitCode: null, + exitSignal: null, + updatedAt: new Date().toISOString(), + cols, + rows, + process: null, + unsubscribeData: null, + unsubscribeExit: null, + hasRunningSubprocess: false, + runtimeEnv: normalizedRuntimeEnv(input.env), + }; + const createdSession = session; + yield* modifyManagerState((state) => { + const sessions = new Map(state.sessions); + sessions.set(sessionKey, createdSession); + return [undefined, { ...state, sessions }] as const; + }); + yield* evictInactiveSessionsIfNeeded(); + } else { + session = existingSession.value; + yield* stopProcess(session); + session.cwd = input.cwd; + session.runtimeEnv = normalizedRuntimeEnv(input.env); + } + + const cols = input.cols ?? session.cols; + const rows = input.rows ?? session.rows; + + session.history = ""; + session.pendingHistoryControlSequence = ""; + session.pendingProcessEvents = []; + session.pendingProcessEventIndex = 0; + session.processEventDrainRunning = false; + yield* persistHistory(input.threadId, terminalId, session.history); + yield* startSession( + session, + { + threadId: input.threadId, + terminalId, + cwd: input.cwd, + cols, + rows, + ...(input.env ? { env: input.env } : {}), + }, + "restarted", + ); + return snapshot(session); + }), + ); + + const close: TerminalManagerShape["close"] = (input) => + withThreadLock( + input.threadId, + Effect.gen(function* () { + if (input.terminalId) { + yield* closeSession(input.threadId, input.terminalId, input.deleteHistory === true); + return; + } + + const threadSessions = yield* sessionsForThread(input.threadId); + yield* Effect.forEach( + threadSessions, + (session) => closeSession(input.threadId, session.terminalId, false), + { discard: true }, + ); + + if (input.deleteHistory) { + yield* deleteAllHistoryForThread(input.threadId); + } + }), + ); + + return { + open, + write, + resize, + clear, + restart, + close, + subscribe: (listener) => + Effect.sync(() => { + terminalEventListeners.add(listener); + return () => { + terminalEventListeners.delete(listener); + }; + }), + } satisfies TerminalManagerShape; + }, +); diff --git a/apps/server/src/terminal/Layers/Manager.test.ts b/apps/server/src/terminal/Layers/Manager.test.ts index ccdd477178..a1770bd10c 100644 --- a/apps/server/src/terminal/Layers/Manager.test.ts +++ b/apps/server/src/terminal/Layers/Manager.test.ts @@ -32,7 +32,7 @@ import { type PtySpawnInput, PtySpawnError, } from "../Services/PTY"; -import { makeTerminalManagerWithOptions } from "./Manager"; +import { makeTerminalManagerWithOptions } from "./Manager.shared"; class FakePtyProcess implements PtyProcess { readonly writes: string[] = []; diff --git a/apps/server/src/terminal/Layers/Manager.ts b/apps/server/src/terminal/Layers/Manager.ts index a076e7600b..b87d987be4 100644 --- a/apps/server/src/terminal/Layers/Manager.ts +++ b/apps/server/src/terminal/Layers/Manager.ts @@ -1,653 +1,8 @@ -import path from "node:path"; - -import { - DEFAULT_TERMINAL_ID, - type TerminalEvent, - type TerminalSessionSnapshot, - type TerminalSessionStatus, -} from "@t3tools/contracts"; -import { makeKeyedCoalescingWorker } from "@t3tools/shared/KeyedCoalescingWorker"; -import { - Data, - Effect, - Encoding, - Equal, - Exit, - Fiber, - FileSystem, - Layer, - Option, - Scope, - Semaphore, - SynchronizedRef, -} from "effect"; - +import { Effect, Layer } from "effect"; +import { makeTerminalManagerWithOptions } from "./Manager.shared"; import { ServerConfig } from "../../config"; -import { - increment, - terminalRestartsTotal, - terminalSessionsTotal, -} from "../../observability/Metrics"; -import { runProcess } from "../../processRunner"; -import { - TerminalCwdError, - TerminalHistoryError, - TerminalManager, - TerminalNotRunningError, - TerminalSessionLookupError, - type TerminalManagerShape, -} from "../Services/Manager"; -import { - PtyAdapter, - PtySpawnError, - type PtyAdapterShape, - type PtyExitEvent, - type PtyProcess, -} from "../Services/PTY"; - -const DEFAULT_HISTORY_LINE_LIMIT = 5_000; -const DEFAULT_PERSIST_DEBOUNCE_MS = 40; -const DEFAULT_SUBPROCESS_POLL_INTERVAL_MS = 1_000; -const DEFAULT_PROCESS_KILL_GRACE_MS = 1_000; -const DEFAULT_MAX_RETAINED_INACTIVE_SESSIONS = 128; -const DEFAULT_OPEN_COLS = 120; -const DEFAULT_OPEN_ROWS = 30; -const TERMINAL_ENV_BLOCKLIST = new Set(["PORT", "ELECTRON_RENDERER_PORT", "ELECTRON_RUN_AS_NODE"]); - -type TerminalSubprocessChecker = ( - terminalPid: number, -) => Effect.Effect; - -class TerminalSubprocessCheckError extends Data.TaggedError("TerminalSubprocessCheckError")<{ - readonly message: string; - readonly cause?: unknown; - readonly terminalPid: number; - readonly command: "powershell" | "pgrep" | "ps"; -}> {} - -class TerminalProcessSignalError extends Data.TaggedError("TerminalProcessSignalError")<{ - readonly message: string; - readonly cause?: unknown; - readonly signal: "SIGTERM" | "SIGKILL"; -}> {} - -interface ShellCandidate { - shell: string; - args?: string[]; -} - -interface TerminalStartInput { - threadId: string; - terminalId: string; - cwd: string; - cols: number; - rows: number; - env?: Record; -} - -interface TerminalSessionState { - threadId: string; - terminalId: string; - cwd: string; - status: TerminalSessionStatus; - pid: number | null; - history: string; - pendingHistoryControlSequence: string; - pendingProcessEvents: Array; - pendingProcessEventIndex: number; - processEventDrainRunning: boolean; - exitCode: number | null; - exitSignal: number | null; - updatedAt: string; - cols: number; - rows: number; - process: PtyProcess | null; - unsubscribeData: (() => void) | null; - unsubscribeExit: (() => void) | null; - hasRunningSubprocess: boolean; - runtimeEnv: Record | null; -} - -interface PersistHistoryRequest { - history: string; - immediate: boolean; -} - -type PendingProcessEvent = { type: "output"; data: string } | { type: "exit"; event: PtyExitEvent }; - -type DrainProcessEventAction = - | { type: "idle" } - | { - type: "output"; - threadId: string; - terminalId: string; - history: string | null; - data: string; - } - | { - type: "exit"; - process: PtyProcess | null; - threadId: string; - terminalId: string; - exitCode: number | null; - exitSignal: number | null; - }; - -interface TerminalManagerState { - sessions: Map; - killFibers: Map>; -} - -function snapshot(session: TerminalSessionState): TerminalSessionSnapshot { - return { - threadId: session.threadId, - terminalId: session.terminalId, - cwd: session.cwd, - status: session.status, - pid: session.pid, - history: session.history, - exitCode: session.exitCode, - exitSignal: session.exitSignal, - updatedAt: session.updatedAt, - }; -} - -function cleanupProcessHandles(session: TerminalSessionState): void { - session.unsubscribeData?.(); - session.unsubscribeData = null; - session.unsubscribeExit?.(); - session.unsubscribeExit = null; -} - -function enqueueProcessEvent( - session: TerminalSessionState, - expectedPid: number, - event: PendingProcessEvent, -): boolean { - if (!session.process || session.status !== "running" || session.pid !== expectedPid) { - return false; - } - - session.pendingProcessEvents.push(event); - if (session.processEventDrainRunning) { - return false; - } - - session.processEventDrainRunning = true; - return true; -} - -function defaultShellResolver(): string { - if (process.platform === "win32") { - return process.env.ComSpec ?? "cmd.exe"; - } - return process.env.SHELL ?? "bash"; -} - -function normalizeShellCommand(value: string | undefined): string | null { - if (!value) return null; - const trimmed = value.trim(); - if (trimmed.length === 0) return null; - - if (process.platform === "win32") { - return trimmed; - } - - const firstToken = trimmed.split(/\s+/g)[0]?.trim(); - if (!firstToken) return null; - return firstToken.replace(/^['"]|['"]$/g, ""); -} - -function shellCandidateFromCommand(command: string | null): ShellCandidate | null { - if (!command || command.length === 0) return null; - const shellName = path.basename(command).toLowerCase(); - if (process.platform !== "win32" && shellName === "zsh") { - return { shell: command, args: ["-o", "nopromptsp"] }; - } - return { shell: command }; -} - -function formatShellCandidate(candidate: ShellCandidate): string { - if (!candidate.args || candidate.args.length === 0) return candidate.shell; - return `${candidate.shell} ${candidate.args.join(" ")}`; -} - -function uniqueShellCandidates(candidates: Array): ShellCandidate[] { - const seen = new Set(); - const ordered: ShellCandidate[] = []; - for (const candidate of candidates) { - if (!candidate) continue; - const key = formatShellCandidate(candidate); - if (seen.has(key)) continue; - seen.add(key); - ordered.push(candidate); - } - return ordered; -} - -function resolveShellCandidates(shellResolver: () => string): ShellCandidate[] { - const requested = shellCandidateFromCommand(normalizeShellCommand(shellResolver())); - - if (process.platform === "win32") { - return uniqueShellCandidates([ - requested, - shellCandidateFromCommand(process.env.ComSpec ?? null), - shellCandidateFromCommand("powershell.exe"), - shellCandidateFromCommand("cmd.exe"), - ]); - } - - return uniqueShellCandidates([ - requested, - shellCandidateFromCommand(normalizeShellCommand(process.env.SHELL)), - shellCandidateFromCommand("/bin/zsh"), - shellCandidateFromCommand("/bin/bash"), - shellCandidateFromCommand("/bin/sh"), - shellCandidateFromCommand("zsh"), - shellCandidateFromCommand("bash"), - shellCandidateFromCommand("sh"), - ]); -} - -function isRetryableShellSpawnError(error: PtySpawnError): boolean { - const queue: unknown[] = [error]; - const seen = new Set(); - const messages: string[] = []; - - while (queue.length > 0) { - const current = queue.shift(); - if (!current || seen.has(current)) { - continue; - } - seen.add(current); - - if (typeof current === "string") { - messages.push(current); - continue; - } - - if (current instanceof Error) { - messages.push(current.message); - const cause = (current as { cause?: unknown }).cause; - if (cause) { - queue.push(cause); - } - continue; - } - - if (typeof current === "object") { - const value = current as { message?: unknown; cause?: unknown }; - if (typeof value.message === "string") { - messages.push(value.message); - } - if (value.cause) { - queue.push(value.cause); - } - } - } - - const message = messages.join(" ").toLowerCase(); - return ( - message.includes("posix_spawnp failed") || - message.includes("enoent") || - message.includes("not found") || - message.includes("file not found") || - message.includes("no such file") - ); -} - -function checkWindowsSubprocessActivity( - terminalPid: number, -): Effect.Effect { - const command = [ - `$children = Get-CimInstance Win32_Process -Filter "ParentProcessId = ${terminalPid}" -ErrorAction SilentlyContinue`, - "if ($children) { exit 0 }", - "exit 1", - ].join("; "); - return Effect.tryPromise({ - try: () => - runProcess("powershell.exe", ["-NoProfile", "-NonInteractive", "-Command", command], { - timeoutMs: 1_500, - allowNonZeroExit: true, - maxBufferBytes: 32_768, - outputMode: "truncate", - }), - catch: (cause) => - new TerminalSubprocessCheckError({ - message: "Failed to check Windows terminal subprocess activity.", - cause, - terminalPid, - command: "powershell", - }), - }).pipe(Effect.map((result) => result.code === 0)); -} - -const checkPosixSubprocessActivity = Effect.fn("terminal.checkPosixSubprocessActivity")(function* ( - terminalPid: number, -): Effect.fn.Return { - const runPgrep = Effect.tryPromise({ - try: () => - runProcess("pgrep", ["-P", String(terminalPid)], { - timeoutMs: 1_000, - allowNonZeroExit: true, - maxBufferBytes: 32_768, - outputMode: "truncate", - }), - catch: (cause) => - new TerminalSubprocessCheckError({ - message: "Failed to inspect terminal subprocesses with pgrep.", - cause, - terminalPid, - command: "pgrep", - }), - }); - - const runPs = Effect.tryPromise({ - try: () => - runProcess("ps", ["-eo", "pid=,ppid="], { - timeoutMs: 1_000, - allowNonZeroExit: true, - maxBufferBytes: 262_144, - outputMode: "truncate", - }), - catch: (cause) => - new TerminalSubprocessCheckError({ - message: "Failed to inspect terminal subprocesses with ps.", - cause, - terminalPid, - command: "ps", - }), - }); - - const pgrepResult = yield* Effect.exit(runPgrep); - if (pgrepResult._tag === "Success") { - if (pgrepResult.value.code === 0) { - return pgrepResult.value.stdout.trim().length > 0; - } - if (pgrepResult.value.code === 1) { - return false; - } - } - - const psResult = yield* Effect.exit(runPs); - if (psResult._tag === "Failure" || psResult.value.code !== 0) { - return false; - } - - for (const line of psResult.value.stdout.split(/\r?\n/g)) { - const [pidRaw, ppidRaw] = line.trim().split(/\s+/g); - const pid = Number(pidRaw); - const ppid = Number(ppidRaw); - if (!Number.isInteger(pid) || !Number.isInteger(ppid)) continue; - if (ppid === terminalPid) { - return true; - } - } - return false; -}); - -const defaultSubprocessChecker = Effect.fn("terminal.defaultSubprocessChecker")(function* ( - terminalPid: number, -): Effect.fn.Return { - if (!Number.isInteger(terminalPid) || terminalPid <= 0) { - return false; - } - if (process.platform === "win32") { - return yield* checkWindowsSubprocessActivity(terminalPid); - } - return yield* checkPosixSubprocessActivity(terminalPid); -}); - -function capHistory(history: string, maxLines: number): string { - if (history.length === 0) return history; - const hasTrailingNewline = history.endsWith("\n"); - const lines = history.split("\n"); - if (hasTrailingNewline) { - lines.pop(); - } - if (lines.length <= maxLines) return history; - const capped = lines.slice(lines.length - maxLines).join("\n"); - return hasTrailingNewline ? `${capped}\n` : capped; -} - -function isCsiFinalByte(codePoint: number): boolean { - return codePoint >= 0x40 && codePoint <= 0x7e; -} - -function shouldStripCsiSequence(body: string, finalByte: string): boolean { - if (finalByte === "n") { - return true; - } - if (finalByte === "R" && /^[0-9;?]*$/.test(body)) { - return true; - } - if (finalByte === "c" && /^[>0-9;?]*$/.test(body)) { - return true; - } - return false; -} - -function shouldStripOscSequence(content: string): boolean { - return /^(10|11|12);(?:\?|rgb:)/.test(content); -} - -function stripStringTerminator(value: string): string { - if (value.endsWith("\u001b\\")) { - return value.slice(0, -2); - } - const lastCharacter = value.at(-1); - if (lastCharacter === "\u0007" || lastCharacter === "\u009c") { - return value.slice(0, -1); - } - return value; -} - -function findStringTerminatorIndex(input: string, start: number): number | null { - for (let index = start; index < input.length; index += 1) { - const codePoint = input.charCodeAt(index); - if (codePoint === 0x07 || codePoint === 0x9c) { - return index + 1; - } - if (codePoint === 0x1b && input.charCodeAt(index + 1) === 0x5c) { - return index + 2; - } - } - return null; -} - -function isEscapeIntermediateByte(codePoint: number): boolean { - return codePoint >= 0x20 && codePoint <= 0x2f; -} - -function isEscapeFinalByte(codePoint: number): boolean { - return codePoint >= 0x30 && codePoint <= 0x7e; -} - -function findEscapeSequenceEndIndex(input: string, start: number): number | null { - let cursor = start; - while (cursor < input.length && isEscapeIntermediateByte(input.charCodeAt(cursor))) { - cursor += 1; - } - if (cursor >= input.length) { - return null; - } - return isEscapeFinalByte(input.charCodeAt(cursor)) ? cursor + 1 : start + 1; -} - -function sanitizeTerminalHistoryChunk( - pendingControlSequence: string, - data: string, -): { visibleText: string; pendingControlSequence: string } { - const input = `${pendingControlSequence}${data}`; - let visibleText = ""; - let index = 0; - - const append = (value: string) => { - visibleText += value; - }; - - while (index < input.length) { - const codePoint = input.charCodeAt(index); - - if (codePoint === 0x1b) { - const nextCodePoint = input.charCodeAt(index + 1); - if (Number.isNaN(nextCodePoint)) { - return { visibleText, pendingControlSequence: input.slice(index) }; - } - - if (nextCodePoint === 0x5b) { - let cursor = index + 2; - while (cursor < input.length) { - if (isCsiFinalByte(input.charCodeAt(cursor))) { - const sequence = input.slice(index, cursor + 1); - const body = input.slice(index + 2, cursor); - if (!shouldStripCsiSequence(body, input[cursor] ?? "")) { - append(sequence); - } - index = cursor + 1; - break; - } - cursor += 1; - } - if (cursor >= input.length) { - return { visibleText, pendingControlSequence: input.slice(index) }; - } - continue; - } - - if ( - nextCodePoint === 0x5d || - nextCodePoint === 0x50 || - nextCodePoint === 0x5e || - nextCodePoint === 0x5f - ) { - const terminatorIndex = findStringTerminatorIndex(input, index + 2); - if (terminatorIndex === null) { - return { visibleText, pendingControlSequence: input.slice(index) }; - } - const sequence = input.slice(index, terminatorIndex); - const content = stripStringTerminator(input.slice(index + 2, terminatorIndex)); - if (nextCodePoint !== 0x5d || !shouldStripOscSequence(content)) { - append(sequence); - } - index = terminatorIndex; - continue; - } - - const escapeSequenceEndIndex = findEscapeSequenceEndIndex(input, index + 1); - if (escapeSequenceEndIndex === null) { - return { visibleText, pendingControlSequence: input.slice(index) }; - } - append(input.slice(index, escapeSequenceEndIndex)); - index = escapeSequenceEndIndex; - continue; - } - - if (codePoint === 0x9b) { - let cursor = index + 1; - while (cursor < input.length) { - if (isCsiFinalByte(input.charCodeAt(cursor))) { - const sequence = input.slice(index, cursor + 1); - const body = input.slice(index + 1, cursor); - if (!shouldStripCsiSequence(body, input[cursor] ?? "")) { - append(sequence); - } - index = cursor + 1; - break; - } - cursor += 1; - } - if (cursor >= input.length) { - return { visibleText, pendingControlSequence: input.slice(index) }; - } - continue; - } - - if (codePoint === 0x9d || codePoint === 0x90 || codePoint === 0x9e || codePoint === 0x9f) { - const terminatorIndex = findStringTerminatorIndex(input, index + 1); - if (terminatorIndex === null) { - return { visibleText, pendingControlSequence: input.slice(index) }; - } - const sequence = input.slice(index, terminatorIndex); - const content = stripStringTerminator(input.slice(index + 1, terminatorIndex)); - if (codePoint !== 0x9d || !shouldStripOscSequence(content)) { - append(sequence); - } - index = terminatorIndex; - continue; - } - - append(input[index] ?? ""); - index += 1; - } - - return { visibleText, pendingControlSequence: "" }; -} - -function legacySafeThreadId(threadId: string): string { - return threadId.replace(/[^a-zA-Z0-9._-]/g, "_"); -} - -function toSafeThreadId(threadId: string): string { - return `terminal_${Encoding.encodeBase64Url(threadId)}`; -} - -function toSafeTerminalId(terminalId: string): string { - return Encoding.encodeBase64Url(terminalId); -} - -function toSessionKey(threadId: string, terminalId: string): string { - return `${threadId}\u0000${terminalId}`; -} - -function shouldExcludeTerminalEnvKey(key: string): boolean { - const normalizedKey = key.toUpperCase(); - if (normalizedKey.startsWith("T3CODE_")) { - return true; - } - if (normalizedKey.startsWith("VITE_")) { - return true; - } - return TERMINAL_ENV_BLOCKLIST.has(normalizedKey); -} - -function createTerminalSpawnEnv( - baseEnv: NodeJS.ProcessEnv, - runtimeEnv?: Record | null, -): NodeJS.ProcessEnv { - const spawnEnv: NodeJS.ProcessEnv = {}; - for (const [key, value] of Object.entries(baseEnv)) { - if (value === undefined) continue; - if (shouldExcludeTerminalEnvKey(key)) continue; - spawnEnv[key] = value; - } - if (runtimeEnv) { - for (const [key, value] of Object.entries(runtimeEnv)) { - spawnEnv[key] = value; - } - } - return spawnEnv; -} - -function normalizedRuntimeEnv( - env: Record | undefined, -): Record | null { - if (!env) return null; - const entries = Object.entries(env); - if (entries.length === 0) return null; - return Object.fromEntries(entries.toSorted(([left], [right]) => left.localeCompare(right))); -} - -interface TerminalManagerOptions { - logsDir: string; - historyLineLimit?: number; - ptyAdapter: PtyAdapterShape; - shellResolver?: () => string; - subprocessChecker?: TerminalSubprocessChecker; - subprocessPollIntervalMs?: number; - processKillGraceMs?: number; - maxRetainedInactiveSessions?: number; -} +import { TerminalManager } from "../Services/Manager"; +import { PtyAdapter } from "../Services/PTY"; const makeTerminalManager = Effect.fn("makeTerminalManager")(function* () { const { terminalLogsDir } = yield* ServerConfig; @@ -658,1194 +13,4 @@ const makeTerminalManager = Effect.fn("makeTerminalManager")(function* () { }); }); -export const makeTerminalManagerWithOptions = Effect.fn("makeTerminalManagerWithOptions")( - function* (options: TerminalManagerOptions) { - const fileSystem = yield* FileSystem.FileSystem; - const services = yield* Effect.services(); - const runFork = Effect.runForkWith(services); - - const logsDir = options.logsDir; - const historyLineLimit = options.historyLineLimit ?? DEFAULT_HISTORY_LINE_LIMIT; - const shellResolver = options.shellResolver ?? defaultShellResolver; - const subprocessChecker = options.subprocessChecker ?? defaultSubprocessChecker; - const subprocessPollIntervalMs = - options.subprocessPollIntervalMs ?? DEFAULT_SUBPROCESS_POLL_INTERVAL_MS; - const processKillGraceMs = options.processKillGraceMs ?? DEFAULT_PROCESS_KILL_GRACE_MS; - const maxRetainedInactiveSessions = - options.maxRetainedInactiveSessions ?? DEFAULT_MAX_RETAINED_INACTIVE_SESSIONS; - - yield* fileSystem.makeDirectory(logsDir, { recursive: true }).pipe(Effect.orDie); - - const managerStateRef = yield* SynchronizedRef.make({ - sessions: new Map(), - killFibers: new Map(), - }); - const threadLocksRef = yield* SynchronizedRef.make(new Map()); - const terminalEventListeners = new Set<(event: TerminalEvent) => Effect.Effect>(); - const workerScope = yield* Scope.make("sequential"); - yield* Effect.addFinalizer(() => Scope.close(workerScope, Exit.void)); - - const publishEvent = (event: TerminalEvent) => - Effect.gen(function* () { - for (const listener of terminalEventListeners) { - yield* listener(event).pipe(Effect.ignoreCause({ log: true })); - } - }); - - const historyPath = (threadId: string, terminalId: string) => { - const threadPart = toSafeThreadId(threadId); - if (terminalId === DEFAULT_TERMINAL_ID) { - return path.join(logsDir, `${threadPart}.log`); - } - return path.join(logsDir, `${threadPart}_${toSafeTerminalId(terminalId)}.log`); - }; - - const legacyHistoryPath = (threadId: string) => - path.join(logsDir, `${legacySafeThreadId(threadId)}.log`); - - const toTerminalHistoryError = - (operation: "read" | "truncate" | "migrate", threadId: string, terminalId: string) => - (cause: unknown) => - new TerminalHistoryError({ - operation, - threadId, - terminalId, - cause, - }); - - const readManagerState = SynchronizedRef.get(managerStateRef); - - const modifyManagerState = ( - f: (state: TerminalManagerState) => readonly [A, TerminalManagerState], - ) => SynchronizedRef.modify(managerStateRef, f); - - const getThreadSemaphore = (threadId: string) => - SynchronizedRef.modifyEffect(threadLocksRef, (current) => { - const existing: Option.Option = Option.fromNullishOr( - current.get(threadId), - ); - return Option.match(existing, { - onNone: () => - Semaphore.make(1).pipe( - Effect.map((semaphore) => { - const next = new Map(current); - next.set(threadId, semaphore); - return [semaphore, next] as const; - }), - ), - onSome: (semaphore) => Effect.succeed([semaphore, current] as const), - }); - }); - - const withThreadLock = ( - threadId: string, - effect: Effect.Effect, - ): Effect.Effect => - Effect.flatMap(getThreadSemaphore(threadId), (semaphore) => semaphore.withPermit(effect)); - - const clearKillFiber = Effect.fn("terminal.clearKillFiber")(function* ( - process: PtyProcess | null, - ) { - if (!process) return; - const fiber: Option.Option> = yield* modifyManagerState< - Option.Option> - >((state) => { - const existing: Option.Option> = Option.fromNullishOr( - state.killFibers.get(process), - ); - if (Option.isNone(existing)) { - return [Option.none>(), state] as const; - } - const killFibers = new Map(state.killFibers); - killFibers.delete(process); - return [existing, { ...state, killFibers }] as const; - }); - if (Option.isSome(fiber)) { - yield* Fiber.interrupt(fiber.value).pipe(Effect.ignore); - } - }); - - const registerKillFiber = Effect.fn("terminal.registerKillFiber")(function* ( - process: PtyProcess, - fiber: Fiber.Fiber, - ) { - yield* modifyManagerState((state) => { - const killFibers = new Map(state.killFibers); - killFibers.set(process, fiber); - return [undefined, { ...state, killFibers }] as const; - }); - }); - - const runKillEscalation = Effect.fn("terminal.runKillEscalation")(function* ( - process: PtyProcess, - threadId: string, - terminalId: string, - ) { - const terminated = yield* Effect.try({ - try: () => process.kill("SIGTERM"), - catch: (cause) => - new TerminalProcessSignalError({ - message: "Failed to send SIGTERM to terminal process.", - cause, - signal: "SIGTERM", - }), - }).pipe( - Effect.as(true), - Effect.catch((error) => - Effect.logWarning("failed to kill terminal process", { - threadId, - terminalId, - signal: "SIGTERM", - error: error.message, - }).pipe(Effect.as(false)), - ), - ); - if (!terminated) { - return; - } - - yield* Effect.sleep(processKillGraceMs); - - yield* Effect.try({ - try: () => process.kill("SIGKILL"), - catch: (cause) => - new TerminalProcessSignalError({ - message: "Failed to send SIGKILL to terminal process.", - cause, - signal: "SIGKILL", - }), - }).pipe( - Effect.catch((error) => - Effect.logWarning("failed to force-kill terminal process", { - threadId, - terminalId, - signal: "SIGKILL", - error: error.message, - }), - ), - ); - }); - - const startKillEscalation = Effect.fn("terminal.startKillEscalation")(function* ( - process: PtyProcess, - threadId: string, - terminalId: string, - ) { - const fiber = yield* runKillEscalation(process, threadId, terminalId).pipe( - Effect.ensuring( - modifyManagerState((state) => { - if (!state.killFibers.has(process)) { - return [undefined, state] as const; - } - const killFibers = new Map(state.killFibers); - killFibers.delete(process); - return [undefined, { ...state, killFibers }] as const; - }), - ), - Effect.forkIn(workerScope), - ); - - yield* registerKillFiber(process, fiber); - }); - - const persistWorker = yield* makeKeyedCoalescingWorker< - string, - PersistHistoryRequest, - never, - never - >({ - merge: (current, next) => ({ - history: next.history, - immediate: current.immediate || next.immediate, - }), - process: Effect.fn("terminal.persistHistoryWorker")(function* (sessionKey, request) { - if (!request.immediate) { - yield* Effect.sleep(DEFAULT_PERSIST_DEBOUNCE_MS); - } - - const [threadId, terminalId] = sessionKey.split("\u0000"); - if (!threadId || !terminalId) { - return; - } - - yield* fileSystem.writeFileString(historyPath(threadId, terminalId), request.history).pipe( - Effect.catch((error) => - Effect.logWarning("failed to persist terminal history", { - threadId, - terminalId, - error: error instanceof Error ? error.message : String(error), - }), - ), - ); - }), - }); - - const queuePersist = Effect.fn("terminal.queuePersist")(function* ( - threadId: string, - terminalId: string, - history: string, - ) { - yield* persistWorker.enqueue(toSessionKey(threadId, terminalId), { - history, - immediate: false, - }); - }); - - const flushPersist = Effect.fn("terminal.flushPersist")(function* ( - threadId: string, - terminalId: string, - ) { - yield* persistWorker.drainKey(toSessionKey(threadId, terminalId)); - }); - - const persistHistory = Effect.fn("terminal.persistHistory")(function* ( - threadId: string, - terminalId: string, - history: string, - ) { - yield* persistWorker.enqueue(toSessionKey(threadId, terminalId), { - history, - immediate: true, - }); - yield* flushPersist(threadId, terminalId); - }); - - const readHistory = Effect.fn("terminal.readHistory")(function* ( - threadId: string, - terminalId: string, - ) { - const nextPath = historyPath(threadId, terminalId); - if ( - yield* fileSystem - .exists(nextPath) - .pipe(Effect.mapError(toTerminalHistoryError("read", threadId, terminalId))) - ) { - const raw = yield* fileSystem - .readFileString(nextPath) - .pipe(Effect.mapError(toTerminalHistoryError("read", threadId, terminalId))); - const capped = capHistory(raw, historyLineLimit); - if (capped !== raw) { - yield* fileSystem - .writeFileString(nextPath, capped) - .pipe(Effect.mapError(toTerminalHistoryError("truncate", threadId, terminalId))); - } - return capped; - } - - if (terminalId !== DEFAULT_TERMINAL_ID) { - return ""; - } - - const legacyPath = legacyHistoryPath(threadId); - if ( - !(yield* fileSystem - .exists(legacyPath) - .pipe(Effect.mapError(toTerminalHistoryError("migrate", threadId, terminalId)))) - ) { - return ""; - } - - const raw = yield* fileSystem - .readFileString(legacyPath) - .pipe(Effect.mapError(toTerminalHistoryError("migrate", threadId, terminalId))); - const capped = capHistory(raw, historyLineLimit); - yield* fileSystem - .writeFileString(nextPath, capped) - .pipe(Effect.mapError(toTerminalHistoryError("migrate", threadId, terminalId))); - yield* fileSystem.remove(legacyPath, { force: true }).pipe( - Effect.catch((cleanupError) => - Effect.logWarning("failed to remove legacy terminal history", { - threadId, - error: cleanupError instanceof Error ? cleanupError.message : String(cleanupError), - }), - ), - ); - return capped; - }); - - const deleteHistory = Effect.fn("terminal.deleteHistory")(function* ( - threadId: string, - terminalId: string, - ) { - yield* fileSystem.remove(historyPath(threadId, terminalId), { force: true }).pipe( - Effect.catch((error) => - Effect.logWarning("failed to delete terminal history", { - threadId, - terminalId, - error: error instanceof Error ? error.message : String(error), - }), - ), - ); - if (terminalId === DEFAULT_TERMINAL_ID) { - yield* fileSystem.remove(legacyHistoryPath(threadId), { force: true }).pipe( - Effect.catch((error) => - Effect.logWarning("failed to delete terminal history", { - threadId, - terminalId, - error: error instanceof Error ? error.message : String(error), - }), - ), - ); - } - }); - - const deleteAllHistoryForThread = Effect.fn("terminal.deleteAllHistoryForThread")(function* ( - threadId: string, - ) { - const threadPrefix = `${toSafeThreadId(threadId)}_`; - const entries = yield* fileSystem - .readDirectory(logsDir, { recursive: false }) - .pipe(Effect.catch(() => Effect.succeed([] as Array))); - yield* Effect.forEach( - entries.filter( - (name) => - name === `${toSafeThreadId(threadId)}.log` || - name === `${legacySafeThreadId(threadId)}.log` || - name.startsWith(threadPrefix), - ), - (name) => - fileSystem.remove(path.join(logsDir, name), { force: true }).pipe( - Effect.catch((error) => - Effect.logWarning("failed to delete terminal histories for thread", { - threadId, - error: error instanceof Error ? error.message : String(error), - }), - ), - ), - { discard: true }, - ); - }); - - const assertValidCwd = Effect.fn("terminal.assertValidCwd")(function* (cwd: string) { - const stats = yield* fileSystem.stat(cwd).pipe( - Effect.mapError( - (cause) => - new TerminalCwdError({ - cwd, - reason: cause.reason._tag === "NotFound" ? "notFound" : "statFailed", - cause, - }), - ), - ); - if (stats.type !== "Directory") { - return yield* new TerminalCwdError({ - cwd, - reason: "notDirectory", - }); - } - }); - - const getSession = Effect.fn("terminal.getSession")(function* ( - threadId: string, - terminalId: string, - ): Effect.fn.Return> { - return yield* Effect.map(readManagerState, (state) => - Option.fromNullishOr(state.sessions.get(toSessionKey(threadId, terminalId))), - ); - }); - - const requireSession = Effect.fn("terminal.requireSession")(function* ( - threadId: string, - terminalId: string, - ): Effect.fn.Return { - return yield* Effect.flatMap(getSession(threadId, terminalId), (session) => - Option.match(session, { - onNone: () => - Effect.fail( - new TerminalSessionLookupError({ - threadId, - terminalId, - }), - ), - onSome: Effect.succeed, - }), - ); - }); - - const sessionsForThread = Effect.fn("terminal.sessionsForThread")(function* (threadId: string) { - return yield* readManagerState.pipe( - Effect.map((state) => - [...state.sessions.values()].filter((session) => session.threadId === threadId), - ), - ); - }); - - const evictInactiveSessionsIfNeeded = Effect.fn("terminal.evictInactiveSessionsIfNeeded")( - function* () { - yield* modifyManagerState((state) => { - const inactiveSessions = [...state.sessions.values()].filter( - (session) => session.status !== "running", - ); - if (inactiveSessions.length <= maxRetainedInactiveSessions) { - return [undefined, state] as const; - } - - inactiveSessions.sort( - (left, right) => - left.updatedAt.localeCompare(right.updatedAt) || - left.threadId.localeCompare(right.threadId) || - left.terminalId.localeCompare(right.terminalId), - ); - - const sessions = new Map(state.sessions); - - const toEvict = inactiveSessions.length - maxRetainedInactiveSessions; - for (const session of inactiveSessions.slice(0, toEvict)) { - const key = toSessionKey(session.threadId, session.terminalId); - sessions.delete(key); - } - - return [undefined, { ...state, sessions }] as const; - }); - }, - ); - - const drainProcessEvents = Effect.fn("terminal.drainProcessEvents")(function* ( - session: TerminalSessionState, - expectedPid: number, - ) { - while (true) { - const action: DrainProcessEventAction = yield* Effect.sync(() => { - if (session.pid !== expectedPid || !session.process || session.status !== "running") { - session.pendingProcessEvents = []; - session.pendingProcessEventIndex = 0; - session.processEventDrainRunning = false; - return { type: "idle" } as const; - } - - const nextEvent = session.pendingProcessEvents[session.pendingProcessEventIndex]; - if (!nextEvent) { - session.pendingProcessEvents = []; - session.pendingProcessEventIndex = 0; - session.processEventDrainRunning = false; - return { type: "idle" } as const; - } - - session.pendingProcessEventIndex += 1; - if (session.pendingProcessEventIndex >= session.pendingProcessEvents.length) { - session.pendingProcessEvents = []; - session.pendingProcessEventIndex = 0; - } - - if (nextEvent.type === "output") { - const sanitized = sanitizeTerminalHistoryChunk( - session.pendingHistoryControlSequence, - nextEvent.data, - ); - session.pendingHistoryControlSequence = sanitized.pendingControlSequence; - if (sanitized.visibleText.length > 0) { - session.history = capHistory( - `${session.history}${sanitized.visibleText}`, - historyLineLimit, - ); - } - session.updatedAt = new Date().toISOString(); - - return { - type: "output", - threadId: session.threadId, - terminalId: session.terminalId, - history: sanitized.visibleText.length > 0 ? session.history : null, - data: nextEvent.data, - } as const; - } - - const process = session.process; - cleanupProcessHandles(session); - session.process = null; - session.pid = null; - session.hasRunningSubprocess = false; - session.status = "exited"; - session.pendingHistoryControlSequence = ""; - session.pendingProcessEvents = []; - session.pendingProcessEventIndex = 0; - session.processEventDrainRunning = false; - session.exitCode = Number.isInteger(nextEvent.event.exitCode) - ? nextEvent.event.exitCode - : null; - session.exitSignal = Number.isInteger(nextEvent.event.signal) - ? nextEvent.event.signal - : null; - session.updatedAt = new Date().toISOString(); - - return { - type: "exit", - process, - threadId: session.threadId, - terminalId: session.terminalId, - exitCode: session.exitCode, - exitSignal: session.exitSignal, - } as const; - }); - - if (action.type === "idle") { - return; - } - - if (action.type === "output") { - if (action.history !== null) { - yield* queuePersist(action.threadId, action.terminalId, action.history); - } - - yield* publishEvent({ - type: "output", - threadId: action.threadId, - terminalId: action.terminalId, - createdAt: new Date().toISOString(), - data: action.data, - }); - continue; - } - - yield* clearKillFiber(action.process); - yield* publishEvent({ - type: "exited", - threadId: action.threadId, - terminalId: action.terminalId, - createdAt: new Date().toISOString(), - exitCode: action.exitCode, - exitSignal: action.exitSignal, - }); - yield* evictInactiveSessionsIfNeeded(); - return; - } - }); - - const stopProcess = Effect.fn("terminal.stopProcess")(function* ( - session: TerminalSessionState, - ) { - const process = session.process; - if (!process) return; - - yield* modifyManagerState((state) => { - cleanupProcessHandles(session); - session.process = null; - session.pid = null; - session.hasRunningSubprocess = false; - session.status = "exited"; - session.pendingHistoryControlSequence = ""; - session.pendingProcessEvents = []; - session.pendingProcessEventIndex = 0; - session.processEventDrainRunning = false; - session.updatedAt = new Date().toISOString(); - return [undefined, state] as const; - }); - - yield* clearKillFiber(process); - yield* startKillEscalation(process, session.threadId, session.terminalId); - yield* evictInactiveSessionsIfNeeded(); - }); - - const trySpawn = Effect.fn("terminal.trySpawn")(function* ( - shellCandidates: ReadonlyArray, - spawnEnv: NodeJS.ProcessEnv, - session: TerminalSessionState, - index = 0, - lastError: PtySpawnError | null = null, - ): Effect.fn.Return<{ process: PtyProcess; shellLabel: string }, PtySpawnError> { - if (index >= shellCandidates.length) { - const detail = lastError?.message ?? "Failed to spawn PTY process"; - const tried = - shellCandidates.length > 0 - ? ` Tried shells: ${shellCandidates.map((candidate) => formatShellCandidate(candidate)).join(", ")}.` - : ""; - return yield* new PtySpawnError({ - adapter: "terminal-manager", - message: `${detail}.${tried}`.trim(), - ...(lastError ? { cause: lastError } : {}), - }); - } - - const candidate = shellCandidates[index]; - if (!candidate) { - return yield* ( - lastError ?? - new PtySpawnError({ - adapter: "terminal-manager", - message: "No shell candidate available for PTY spawn.", - }) - ); - } - - const attempt = yield* Effect.result( - options.ptyAdapter.spawn({ - shell: candidate.shell, - ...(candidate.args ? { args: candidate.args } : {}), - cwd: session.cwd, - cols: session.cols, - rows: session.rows, - env: spawnEnv, - }), - ); - - if (attempt._tag === "Success") { - return { - process: attempt.success, - shellLabel: formatShellCandidate(candidate), - }; - } - - const spawnError = attempt.failure; - if (!isRetryableShellSpawnError(spawnError)) { - return yield* spawnError; - } - - return yield* trySpawn(shellCandidates, spawnEnv, session, index + 1, spawnError); - }); - - const startSession = Effect.fn("terminal.startSession")(function* ( - session: TerminalSessionState, - input: TerminalStartInput, - eventType: "started" | "restarted", - ) { - yield* stopProcess(session); - yield* Effect.annotateCurrentSpan({ - "terminal.thread_id": session.threadId, - "terminal.id": session.terminalId, - "terminal.event_type": eventType, - "terminal.cwd": input.cwd, - }); - - yield* modifyManagerState((state) => { - session.status = "starting"; - session.cwd = input.cwd; - session.cols = input.cols; - session.rows = input.rows; - session.exitCode = null; - session.exitSignal = null; - session.hasRunningSubprocess = false; - session.pendingProcessEvents = []; - session.pendingProcessEventIndex = 0; - session.processEventDrainRunning = false; - session.updatedAt = new Date().toISOString(); - return [undefined, state] as const; - }); - - let ptyProcess: PtyProcess | null = null; - let startedShell: string | null = null; - - const startResult = yield* Effect.result( - increment(terminalSessionsTotal, { lifecycle: eventType }).pipe( - Effect.andThen( - Effect.gen(function* () { - const shellCandidates = resolveShellCandidates(shellResolver); - const terminalEnv = createTerminalSpawnEnv(process.env, session.runtimeEnv); - const spawnResult = yield* trySpawn(shellCandidates, terminalEnv, session); - ptyProcess = spawnResult.process; - startedShell = spawnResult.shellLabel; - - const processPid = ptyProcess.pid; - const unsubscribeData = ptyProcess.onData((data) => { - if (!enqueueProcessEvent(session, processPid, { type: "output", data })) { - return; - } - runFork(drainProcessEvents(session, processPid)); - }); - const unsubscribeExit = ptyProcess.onExit((event) => { - if (!enqueueProcessEvent(session, processPid, { type: "exit", event })) { - return; - } - runFork(drainProcessEvents(session, processPid)); - }); - - yield* modifyManagerState((state) => { - session.process = ptyProcess; - session.pid = processPid; - session.status = "running"; - session.updatedAt = new Date().toISOString(); - session.unsubscribeData = unsubscribeData; - session.unsubscribeExit = unsubscribeExit; - return [undefined, state] as const; - }); - - yield* publishEvent({ - type: eventType, - threadId: session.threadId, - terminalId: session.terminalId, - createdAt: new Date().toISOString(), - snapshot: snapshot(session), - }); - }), - ), - ), - ); - - if (startResult._tag === "Success") { - return; - } - - { - const error = startResult.failure; - if (ptyProcess) { - yield* startKillEscalation(ptyProcess, session.threadId, session.terminalId); - } - - yield* modifyManagerState((state) => { - session.status = "error"; - session.pid = null; - session.process = null; - session.unsubscribeData = null; - session.unsubscribeExit = null; - session.hasRunningSubprocess = false; - session.pendingProcessEvents = []; - session.pendingProcessEventIndex = 0; - session.processEventDrainRunning = false; - session.updatedAt = new Date().toISOString(); - return [undefined, state] as const; - }); - - yield* evictInactiveSessionsIfNeeded(); - - const message = error.message; - yield* publishEvent({ - type: "error", - threadId: session.threadId, - terminalId: session.terminalId, - createdAt: new Date().toISOString(), - message, - }); - yield* Effect.logError("failed to start terminal", { - threadId: session.threadId, - terminalId: session.terminalId, - error: message, - ...(startedShell ? { shell: startedShell } : {}), - }); - } - }); - - const closeSession = Effect.fn("terminal.closeSession")(function* ( - threadId: string, - terminalId: string, - deleteHistoryOnClose: boolean, - ) { - const key = toSessionKey(threadId, terminalId); - const session = yield* getSession(threadId, terminalId); - - if (Option.isSome(session)) { - yield* stopProcess(session.value); - yield* persistHistory(threadId, terminalId, session.value.history); - } - - yield* flushPersist(threadId, terminalId); - - yield* modifyManagerState((state) => { - if (!state.sessions.has(key)) { - return [undefined, state] as const; - } - const sessions = new Map(state.sessions); - sessions.delete(key); - return [undefined, { ...state, sessions }] as const; - }); - - if (deleteHistoryOnClose) { - yield* deleteHistory(threadId, terminalId); - } - }); - - const pollSubprocessActivity = Effect.fn("terminal.pollSubprocessActivity")(function* () { - const state = yield* readManagerState; - const runningSessions = [...state.sessions.values()].filter( - (session): session is TerminalSessionState & { pid: number } => - session.status === "running" && Number.isInteger(session.pid), - ); - - if (runningSessions.length === 0) { - return; - } - - const checkSubprocessActivity = Effect.fn("terminal.checkSubprocessActivity")(function* ( - session: TerminalSessionState & { pid: number }, - ) { - const terminalPid = session.pid; - const hasRunningSubprocess = yield* subprocessChecker(terminalPid).pipe( - Effect.map(Option.some), - Effect.catch((error) => - Effect.logWarning("failed to check terminal subprocess activity", { - threadId: session.threadId, - terminalId: session.terminalId, - terminalPid, - error: error instanceof Error ? error.message : String(error), - }).pipe(Effect.as(Option.none())), - ), - ); - - if (Option.isNone(hasRunningSubprocess)) { - return; - } - - const event = yield* modifyManagerState((state) => { - const liveSession: Option.Option = Option.fromNullishOr( - state.sessions.get(toSessionKey(session.threadId, session.terminalId)), - ); - if ( - Option.isNone(liveSession) || - liveSession.value.status !== "running" || - liveSession.value.pid !== terminalPid || - liveSession.value.hasRunningSubprocess === hasRunningSubprocess.value - ) { - return [Option.none(), state] as const; - } - - liveSession.value.hasRunningSubprocess = hasRunningSubprocess.value; - liveSession.value.updatedAt = new Date().toISOString(); - - return [ - Option.some({ - type: "activity" as const, - threadId: liveSession.value.threadId, - terminalId: liveSession.value.terminalId, - createdAt: new Date().toISOString(), - hasRunningSubprocess: hasRunningSubprocess.value, - }), - state, - ] as const; - }); - - if (Option.isSome(event)) { - yield* publishEvent(event.value); - } - }); - - yield* Effect.forEach(runningSessions, checkSubprocessActivity, { - concurrency: "unbounded", - discard: true, - }); - }); - - const hasRunningSessions = readManagerState.pipe( - Effect.map((state) => - [...state.sessions.values()].some((session) => session.status === "running"), - ), - ); - - yield* Effect.forever( - hasRunningSessions.pipe( - Effect.flatMap((active) => - active - ? pollSubprocessActivity().pipe( - Effect.flatMap(() => Effect.sleep(subprocessPollIntervalMs)), - ) - : Effect.sleep(subprocessPollIntervalMs), - ), - ), - ).pipe(Effect.forkIn(workerScope)); - - yield* Effect.addFinalizer(() => - Effect.gen(function* () { - const sessions = yield* modifyManagerState( - (state) => - [ - [...state.sessions.values()], - { - ...state, - sessions: new Map(), - }, - ] as const, - ); - - const cleanupSession = Effect.fn("terminal.cleanupSession")(function* ( - session: TerminalSessionState, - ) { - cleanupProcessHandles(session); - if (!session.process) return; - yield* clearKillFiber(session.process); - yield* runKillEscalation(session.process, session.threadId, session.terminalId); - }); - - yield* Effect.forEach(sessions, cleanupSession, { - concurrency: "unbounded", - discard: true, - }); - }).pipe(Effect.ignoreCause({ log: true })), - ); - - const open: TerminalManagerShape["open"] = (input) => - withThreadLock( - input.threadId, - Effect.gen(function* () { - const terminalId = input.terminalId ?? DEFAULT_TERMINAL_ID; - yield* assertValidCwd(input.cwd); - - const sessionKey = toSessionKey(input.threadId, terminalId); - const existing = yield* getSession(input.threadId, terminalId); - if (Option.isNone(existing)) { - yield* flushPersist(input.threadId, terminalId); - const history = yield* readHistory(input.threadId, terminalId); - const cols = input.cols ?? DEFAULT_OPEN_COLS; - const rows = input.rows ?? DEFAULT_OPEN_ROWS; - const session: TerminalSessionState = { - threadId: input.threadId, - terminalId, - cwd: input.cwd, - status: "starting", - pid: null, - history, - pendingHistoryControlSequence: "", - pendingProcessEvents: [], - pendingProcessEventIndex: 0, - processEventDrainRunning: false, - exitCode: null, - exitSignal: null, - updatedAt: new Date().toISOString(), - cols, - rows, - process: null, - unsubscribeData: null, - unsubscribeExit: null, - hasRunningSubprocess: false, - runtimeEnv: normalizedRuntimeEnv(input.env), - }; - - const createdSession = session; - yield* modifyManagerState((state) => { - const sessions = new Map(state.sessions); - sessions.set(sessionKey, createdSession); - return [undefined, { ...state, sessions }] as const; - }); - - yield* evictInactiveSessionsIfNeeded(); - yield* startSession( - session, - { - threadId: input.threadId, - terminalId, - cwd: input.cwd, - cols, - rows, - ...(input.env ? { env: input.env } : {}), - }, - "started", - ); - return snapshot(session); - } - - const liveSession = existing.value; - const nextRuntimeEnv = normalizedRuntimeEnv(input.env); - const currentRuntimeEnv = liveSession.runtimeEnv; - const targetCols = input.cols ?? liveSession.cols; - const targetRows = input.rows ?? liveSession.rows; - const runtimeEnvChanged = !Equal.equals(currentRuntimeEnv, nextRuntimeEnv); - - if (liveSession.cwd !== input.cwd || runtimeEnvChanged) { - yield* stopProcess(liveSession); - liveSession.cwd = input.cwd; - liveSession.runtimeEnv = nextRuntimeEnv; - liveSession.history = ""; - liveSession.pendingHistoryControlSequence = ""; - liveSession.pendingProcessEvents = []; - liveSession.pendingProcessEventIndex = 0; - liveSession.processEventDrainRunning = false; - yield* persistHistory( - liveSession.threadId, - liveSession.terminalId, - liveSession.history, - ); - } else if (liveSession.status === "exited" || liveSession.status === "error") { - liveSession.runtimeEnv = nextRuntimeEnv; - liveSession.history = ""; - liveSession.pendingHistoryControlSequence = ""; - liveSession.pendingProcessEvents = []; - liveSession.pendingProcessEventIndex = 0; - liveSession.processEventDrainRunning = false; - yield* persistHistory( - liveSession.threadId, - liveSession.terminalId, - liveSession.history, - ); - } - - if (!liveSession.process) { - yield* startSession( - liveSession, - { - threadId: input.threadId, - terminalId, - cwd: input.cwd, - cols: targetCols, - rows: targetRows, - ...(input.env ? { env: input.env } : {}), - }, - "started", - ); - return snapshot(liveSession); - } - - if (liveSession.cols !== targetCols || liveSession.rows !== targetRows) { - liveSession.cols = targetCols; - liveSession.rows = targetRows; - liveSession.updatedAt = new Date().toISOString(); - liveSession.process.resize(targetCols, targetRows); - } - - return snapshot(liveSession); - }), - ); - - const write: TerminalManagerShape["write"] = Effect.fn("terminal.write")(function* (input) { - const terminalId = input.terminalId ?? DEFAULT_TERMINAL_ID; - const session = yield* requireSession(input.threadId, terminalId); - const process = session.process; - if (!process || session.status !== "running") { - if (session.status === "exited") return; - return yield* new TerminalNotRunningError({ - threadId: input.threadId, - terminalId, - }); - } - yield* Effect.sync(() => process.write(input.data)); - }); - - const resize: TerminalManagerShape["resize"] = Effect.fn("terminal.resize")(function* (input) { - const terminalId = input.terminalId ?? DEFAULT_TERMINAL_ID; - const session = yield* requireSession(input.threadId, terminalId); - const process = session.process; - if (!process || session.status !== "running") { - return yield* new TerminalNotRunningError({ - threadId: input.threadId, - terminalId, - }); - } - session.cols = input.cols; - session.rows = input.rows; - session.updatedAt = new Date().toISOString(); - yield* Effect.sync(() => process.resize(input.cols, input.rows)); - }); - - const clear: TerminalManagerShape["clear"] = (input) => - withThreadLock( - input.threadId, - Effect.gen(function* () { - const terminalId = input.terminalId ?? DEFAULT_TERMINAL_ID; - const session = yield* requireSession(input.threadId, terminalId); - session.history = ""; - session.pendingHistoryControlSequence = ""; - session.pendingProcessEvents = []; - session.pendingProcessEventIndex = 0; - session.processEventDrainRunning = false; - session.updatedAt = new Date().toISOString(); - yield* persistHistory(input.threadId, terminalId, session.history); - yield* publishEvent({ - type: "cleared", - threadId: input.threadId, - terminalId, - createdAt: new Date().toISOString(), - }); - }), - ); - - const restart: TerminalManagerShape["restart"] = (input) => - withThreadLock( - input.threadId, - Effect.gen(function* () { - yield* increment(terminalRestartsTotal, { scope: "thread" }); - const terminalId = input.terminalId ?? DEFAULT_TERMINAL_ID; - yield* assertValidCwd(input.cwd); - - const sessionKey = toSessionKey(input.threadId, terminalId); - const existingSession = yield* getSession(input.threadId, terminalId); - let session: TerminalSessionState; - if (Option.isNone(existingSession)) { - const cols = input.cols ?? DEFAULT_OPEN_COLS; - const rows = input.rows ?? DEFAULT_OPEN_ROWS; - session = { - threadId: input.threadId, - terminalId, - cwd: input.cwd, - status: "starting", - pid: null, - history: "", - pendingHistoryControlSequence: "", - pendingProcessEvents: [], - pendingProcessEventIndex: 0, - processEventDrainRunning: false, - exitCode: null, - exitSignal: null, - updatedAt: new Date().toISOString(), - cols, - rows, - process: null, - unsubscribeData: null, - unsubscribeExit: null, - hasRunningSubprocess: false, - runtimeEnv: normalizedRuntimeEnv(input.env), - }; - const createdSession = session; - yield* modifyManagerState((state) => { - const sessions = new Map(state.sessions); - sessions.set(sessionKey, createdSession); - return [undefined, { ...state, sessions }] as const; - }); - yield* evictInactiveSessionsIfNeeded(); - } else { - session = existingSession.value; - yield* stopProcess(session); - session.cwd = input.cwd; - session.runtimeEnv = normalizedRuntimeEnv(input.env); - } - - const cols = input.cols ?? session.cols; - const rows = input.rows ?? session.rows; - - session.history = ""; - session.pendingHistoryControlSequence = ""; - session.pendingProcessEvents = []; - session.pendingProcessEventIndex = 0; - session.processEventDrainRunning = false; - yield* persistHistory(input.threadId, terminalId, session.history); - yield* startSession( - session, - { - threadId: input.threadId, - terminalId, - cwd: input.cwd, - cols, - rows, - ...(input.env ? { env: input.env } : {}), - }, - "restarted", - ); - return snapshot(session); - }), - ); - - const close: TerminalManagerShape["close"] = (input) => - withThreadLock( - input.threadId, - Effect.gen(function* () { - if (input.terminalId) { - yield* closeSession(input.threadId, input.terminalId, input.deleteHistory === true); - return; - } - - const threadSessions = yield* sessionsForThread(input.threadId); - yield* Effect.forEach( - threadSessions, - (session) => closeSession(input.threadId, session.terminalId, false), - { discard: true }, - ); - - if (input.deleteHistory) { - yield* deleteAllHistoryForThread(input.threadId); - } - }), - ); - - return { - open, - write, - resize, - clear, - restart, - close, - subscribe: (listener) => - Effect.sync(() => { - terminalEventListeners.add(listener); - return () => { - terminalEventListeners.delete(listener); - }; - }), - } satisfies TerminalManagerShape; - }, -); - export const TerminalManagerLive = Layer.effect(TerminalManager, makeTerminalManager()); diff --git a/apps/server/src/terminal/Services/Manager.ts b/apps/server/src/terminal/Services/Manager.ts index bdfbc85cc5..56b0ad49bd 100644 --- a/apps/server/src/terminal/Services/Manager.ts +++ b/apps/server/src/terminal/Services/Manager.ts @@ -19,10 +19,8 @@ import { TerminalRestartInput, TerminalSessionSnapshot, TerminalSessionLookupError, - TerminalSessionStatus, TerminalWriteInput, } from "@t3tools/contracts"; -import { PtyProcess } from "./PTY"; import { Effect, ServiceMap } from "effect"; export { @@ -33,36 +31,6 @@ export { TerminalSessionLookupError, }; -export interface TerminalSessionState { - threadId: string; - terminalId: string; - cwd: string; - status: TerminalSessionStatus; - pid: number | null; - history: string; - pendingHistoryControlSequence: string; - exitCode: number | null; - exitSignal: number | null; - updatedAt: string; - cols: number; - rows: number; - process: PtyProcess | null; - unsubscribeData: (() => void) | null; - unsubscribeExit: (() => void) | null; - hasRunningSubprocess: boolean; - runtimeEnv: Record | null; -} - -export interface ShellCandidate { - shell: string; - args?: string[]; -} - -export interface TerminalStartInput extends TerminalOpenInput { - cols: number; - rows: number; -} - /** * TerminalManagerShape - Service API for terminal session lifecycle operations. */ diff --git a/apps/server/src/workspace/Layers/WorkspaceEntries.ts b/apps/server/src/workspace/Layers/WorkspaceEntries.ts index 12af8601ca..0b23f32145 100644 --- a/apps/server/src/workspace/Layers/WorkspaceEntries.ts +++ b/apps/server/src/workspace/Layers/WorkspaceEntries.ts @@ -217,7 +217,7 @@ function directoryAncestorsOf(relativePath: string): string[] { const processErrorDetail = (cause: unknown): string => cause instanceof Error ? cause.message : String(cause); -export const makeWorkspaceEntries = Effect.gen(function* () { +const makeWorkspaceEntries = Effect.gen(function* () { const path = yield* Path.Path; const gitOption = yield* Effect.serviceOption(GitCore); const workspacePaths = yield* WorkspacePaths; diff --git a/apps/server/src/workspace/Layers/WorkspaceFileSystem.ts b/apps/server/src/workspace/Layers/WorkspaceFileSystem.ts index 84e5d9c6d1..d14c44f812 100644 --- a/apps/server/src/workspace/Layers/WorkspaceFileSystem.ts +++ b/apps/server/src/workspace/Layers/WorkspaceFileSystem.ts @@ -8,7 +8,7 @@ import { import { WorkspaceEntries } from "../Services/WorkspaceEntries.ts"; import { WorkspacePaths } from "../Services/WorkspacePaths.ts"; -export const makeWorkspaceFileSystem = Effect.gen(function* () { +const makeWorkspaceFileSystem = Effect.gen(function* () { const fileSystem = yield* FileSystem.FileSystem; const path = yield* Path.Path; const workspacePaths = yield* WorkspacePaths; diff --git a/apps/server/src/workspace/Layers/WorkspacePaths.ts b/apps/server/src/workspace/Layers/WorkspacePaths.ts index fa7a90cf07..14b77d0e03 100644 --- a/apps/server/src/workspace/Layers/WorkspacePaths.ts +++ b/apps/server/src/workspace/Layers/WorkspacePaths.ts @@ -23,7 +23,7 @@ function expandHomePath(input: string, path: Path.Path): string { return input; } -export const makeWorkspacePaths = Effect.gen(function* () { +const makeWorkspacePaths = Effect.gen(function* () { const fileSystem = yield* FileSystem.FileSystem; const path = yield* Path.Path; diff --git a/apps/server/src/workspace/Services/WorkspacePaths.ts b/apps/server/src/workspace/Services/WorkspacePaths.ts index ad6d9cd3e5..a5a512f593 100644 --- a/apps/server/src/workspace/Services/WorkspacePaths.ts +++ b/apps/server/src/workspace/Services/WorkspacePaths.ts @@ -45,13 +45,6 @@ export class WorkspacePathOutsideRootError extends Schema.TaggedErrorClass { return []; }, }); - __resetNativeApiForTests(); + resetAppAtomRegistryForTests(); + const clientCache = globalThis as typeof globalThis & { + __t3WsRpcClient?: { dispose?: () => Promise } | null; + }; + await clientCache.__t3WsRpcClient?.dispose?.(); + clientCache.__t3WsRpcClient = null; await setViewport(DEFAULT_VIEWPORT); localStorage.clear(); document.body.innerHTML = ""; diff --git a/apps/web/src/components/ChatView.tsx b/apps/web/src/components/ChatView.tsx index 1580c8f605..e1c11c7da0 100644 --- a/apps/web/src/components/ChatView.tsx +++ b/apps/web/src/components/ChatView.tsx @@ -4264,7 +4264,7 @@ export default function ChatView({ threadId }: ChatViewProps) { <> {providerTraitsPicker} @@ -4272,12 +4272,12 @@ export default function ChatView({ threadId }: ChatViewProps) {