diff --git a/src/browser/components/OutputTab.test.tsx b/src/browser/components/OutputTab.test.tsx new file mode 100644 index 0000000000..75d2229d6c --- /dev/null +++ b/src/browser/components/OutputTab.test.tsx @@ -0,0 +1,240 @@ +import { afterEach, beforeEach, describe, expect, mock, spyOn, test } from "bun:test"; +import { GlobalWindow } from "happy-dom"; +import { cleanup, fireEvent, render, waitFor } from "@testing-library/react"; + +import { MAX_LOG_ENTRIES } from "@/common/constants/ui"; + +type LogLevel = "error" | "warn" | "info" | "debug"; + +interface LogEntry { + timestamp: number; + level: LogLevel; + message: string; + location: string; +} + +type LogStreamEvent = + | { type: "snapshot"; epoch: number; entries: LogEntry[] } + | { type: "append"; epoch: number; entries: LogEntry[] } + | { type: "reset"; epoch: number }; + +interface MockAPI { + general: { + subscribeLogs: () => Promise>; + clearLogs: () => Promise<{ success: boolean; error?: string | null }>; + }; +} + +let mockApi: MockAPI | null = null; + +void mock.module("@/browser/contexts/API", () => ({ + useAPI: () => ({ + api: mockApi, + status: mockApi ? ("connected" as const) : ("connecting" as const), + error: null, + authenticate: () => undefined, + retry: () => undefined, + }), +})); + +import { OutputTab } from "./OutputTab"; + +function formatEntryMessage(id: number): string { + return `entry-${id.toString().padStart(4, "0")}`; +} + +function createEntries(startId: number, count: number): LogEntry[] { + return Array.from({ length: count }, (_, i) => { + const id = startId + i; + return { + timestamp: id, + level: "info", + message: formatEntryMessage(id), + location: `src/test.ts:${id}`, + }; + }); +} + +function streamEvents(...events: LogStreamEvent[]): AsyncGenerator { + return (async function* () { + for (const event of events) { + yield event; + // Keep this helper explicitly async to match oRPC stream semantics. + await Promise.resolve(); + } + })(); +} + +describe("OutputTab", () => { + let originalWindow: typeof globalThis.window; + let originalDocument: typeof globalThis.document; + + beforeEach(() => { + originalWindow = globalThis.window; + originalDocument = globalThis.document; + + globalThis.window = new GlobalWindow({ url: "http://localhost" }) as unknown as Window & + typeof globalThis; + globalThis.document = globalThis.window.document; + globalThis.window.localStorage.clear(); + }); + + afterEach(() => { + cleanup(); + mockApi = null; + globalThis.window = originalWindow; + globalThis.document = originalDocument; + }); + + test("renders initial log snapshot from subscription", async () => { + const initialEntries = createEntries(0, 5); + mockApi = { + general: { + subscribeLogs: () => + Promise.resolve(streamEvents({ type: "snapshot", epoch: 1, entries: initialEntries })), + clearLogs: () => Promise.resolve({ success: true }), + }, + }; + + const view = render(); + + await waitFor(() => { + expect(view.getAllByText(/^entry-\d{4}$/)).toHaveLength(5); + }); + + expect(view.getByText(formatEntryMessage(0))).toBeTruthy(); + expect(view.getByText(formatEntryMessage(4))).toBeTruthy(); + }); + + test("caps streamed entries and evicts the oldest entries", async () => { + const initialEntries = createEntries(0, MAX_LOG_ENTRIES); + const appendedEntries = createEntries(MAX_LOG_ENTRIES, 200); + + mockApi = { + general: { + subscribeLogs: () => + Promise.resolve( + streamEvents( + { type: "snapshot", epoch: 1, entries: initialEntries }, + { type: "append", epoch: 1, entries: appendedEntries } + ) + ), + clearLogs: () => Promise.resolve({ success: true }), + }, + }; + + const view = render(); + + await waitFor(() => { + expect(view.getAllByText(/^entry-\d{4}$/)).toHaveLength(MAX_LOG_ENTRIES); + }); + + expect(view.queryByText(formatEntryMessage(0))).toBeNull(); + expect(view.queryByText(formatEntryMessage(199))).toBeNull(); + expect(view.getByText(formatEntryMessage(200))).toBeTruthy(); + expect(view.getByText(formatEntryMessage(MAX_LOG_ENTRIES + 199))).toBeTruthy(); + }); + + test("reset event clears entries", async () => { + const initialEntries = createEntries(0, 3); + + let resolveResetGate: () => void = () => undefined; + const resetGate = new Promise((resolve) => { + resolveResetGate = () => resolve(); + }); + + mockApi = { + general: { + subscribeLogs: () => + Promise.resolve( + (async function* (): AsyncGenerator { + yield { type: "snapshot", epoch: 1, entries: initialEntries }; + await resetGate; + yield { type: "reset", epoch: 2 }; + })() + ), + clearLogs: () => Promise.resolve({ success: true }), + }, + }; + + const view = render(); + + await waitFor(() => { + expect(view.getByText(formatEntryMessage(0))).toBeTruthy(); + }); + + resolveResetGate(); + + await waitFor(() => { + expect(view.queryByText(formatEntryMessage(0))).toBeNull(); + }); + }); + + test("displays warning when clearLogs returns failure", async () => { + const warnSpy = spyOn(console, "warn").mockImplementation(() => undefined); + + try { + mockApi = { + general: { + subscribeLogs: () => + Promise.resolve( + streamEvents({ type: "snapshot", epoch: 1, entries: createEntries(0, 1) }) + ), + clearLogs: () => Promise.resolve({ success: false, error: "disk error" }), + }, + }; + + const view = render(); + const deleteButton = view.getByRole("button", { name: "Delete output logs" }); + + fireEvent.click(deleteButton); + + await waitFor(() => { + expect(warnSpy).toHaveBeenCalledWith("Log files could not be fully deleted:", "disk error"); + }); + } finally { + warnSpy.mockRestore(); + } + }); + + test("stale append after reset is rejected", async () => { + const initialEntries = createEntries(0, 1); + const staleEntries = createEntries(1000, 1); + const currentEpochEntries = createEntries(2000, 1); + + let resolvePostSnapshotGate: () => void = () => undefined; + const postSnapshotGate = new Promise((resolve) => { + resolvePostSnapshotGate = () => resolve(); + }); + + mockApi = { + general: { + subscribeLogs: () => + Promise.resolve( + (async function* (): AsyncGenerator { + yield { type: "snapshot", epoch: 1, entries: initialEntries }; + await postSnapshotGate; + yield { type: "reset", epoch: 2 }; + yield { type: "append", epoch: 1, entries: staleEntries }; + yield { type: "append", epoch: 2, entries: currentEpochEntries }; + })() + ), + clearLogs: () => Promise.resolve({ success: true }), + }, + }; + + const view = render(); + + await waitFor(() => { + expect(view.getByText(formatEntryMessage(0))).toBeTruthy(); + }); + + resolvePostSnapshotGate(); + + await waitFor(() => { + expect(view.queryByText(formatEntryMessage(0))).toBeNull(); + expect(view.queryByText(formatEntryMessage(1000))).toBeNull(); + expect(view.getByText(formatEntryMessage(2000))).toBeTruthy(); + }); + }); +}); diff --git a/src/browser/components/OutputTab.tsx b/src/browser/components/OutputTab.tsx new file mode 100644 index 0000000000..6bbeb52692 --- /dev/null +++ b/src/browser/components/OutputTab.tsx @@ -0,0 +1,236 @@ +import { type UIEvent, useEffect, useReducer, useRef, useState } from "react"; +import { Trash2 } from "lucide-react"; +import { useAPI } from "@/browser/contexts/API"; +import { usePersistedState } from "@/browser/hooks/usePersistedState"; +import { isAbortError } from "@/browser/utils/isAbortError"; +import { MAX_LOG_ENTRIES } from "@/common/constants/ui"; + +type LogLevel = "error" | "warn" | "info" | "debug"; + +interface LogEntry { + timestamp: number; + level: LogLevel; + message: string; + location: string; +} + +type LogStreamEvent = + | { type: "snapshot"; epoch: number; entries: LogEntry[] } + | { type: "append"; epoch: number; entries: LogEntry[] } + | { type: "reset"; epoch: number }; + +interface LogState { + epoch: number; + entries: LogEntry[]; +} + +function reduceLogState(state: LogState, event: LogStreamEvent): LogState { + switch (event.type) { + case "snapshot": + return { + epoch: event.epoch, + entries: event.entries, + }; + case "append": { + if (event.epoch !== state.epoch) { + return state; + } + const merged = [...state.entries, ...event.entries]; + return { + epoch: state.epoch, + entries: merged.length > MAX_LOG_ENTRIES ? merged.slice(-MAX_LOG_ENTRIES) : merged, + }; + } + case "reset": + return { + epoch: event.epoch, + entries: [], + }; + } +} + +const LOG_LEVELS: readonly LogLevel[] = ["debug", "info", "warn", "error"]; + +interface OutputTabProps { + // Required by the RightSidebar tab system, even though the log stream itself is global. + workspaceId: string; +} + +export function OutputTab(_props: OutputTabProps) { + const { api } = useAPI(); + + const [logState, dispatch] = useReducer(reduceLogState, { + epoch: 0, + entries: [], + }); + const [levelFilter, setLevelFilter] = usePersistedState("output-tab-level", "info"); + const [autoScroll, setAutoScroll] = useState(true); + + const scrollRef = useRef(null); + + useEffect(() => { + if (!api) return; + + const controller = new AbortController(); + const { signal } = controller; + + let iterator: AsyncIterator | null = null; + + void (async () => { + try { + const subscribedIterator = await api.general.subscribeLogs( + { level: levelFilter }, + { signal } + ); + + // oRPC iterators don’t eagerly close. If we’re already aborted, explicitly close. + if (signal.aborted) { + void subscribedIterator.return?.(); + return; + } + + iterator = subscribedIterator; + + for await (const event of subscribedIterator) { + if (signal.aborted) break; + dispatch(event); + } + } catch (error) { + if (signal.aborted || isAbortError(error)) return; + console.warn("Log subscription error:", error); + } + })(); + + return () => { + controller.abort(); + void iterator?.return?.(); + }; + }, [api, levelFilter]); + + // Auto-scroll on new entries when the user is at the bottom. + useEffect(() => { + if (!autoScroll) return; + if (!scrollRef.current) return; + scrollRef.current.scrollTop = scrollRef.current.scrollHeight; + }, [logState.entries, autoScroll]); + + const handleScroll = (e: UIEvent) => { + const el = e.currentTarget; + const isAtBottom = el.scrollHeight - el.scrollTop - el.clientHeight < 20; + setAutoScroll(isAtBottom); + }; + + const handleDelete = () => { + if (!api) { + dispatch({ type: "reset", epoch: 0 }); + return; + } + + api.general + .clearLogs() + .then((result) => { + if (!result.success) { + console.warn("Log files could not be fully deleted:", result.error); + } + }) + .catch((error) => { + console.warn("Failed to delete logs:", error); + }); + }; + + return ( +
+
+ + +
+ +
+ {logState.entries.map((entry, i) => ( + + ))} +
+
+ ); +} + +function LevelFilterDropdown(props: { value: LogLevel; onChange: (level: LogLevel) => void }) { + return ( + + ); +} + +function LogLine(props: { entry: LogEntry }) { + const { entry } = props; + + const levelColor: string = + entry.level === "error" + ? "var(--color-error)" + : entry.level === "warn" + ? "var(--color-warning)" + : entry.level === "debug" + ? "var(--color-muted-foreground)" + : "var(--color-foreground)"; + + // Inline flow layout — wraps naturally at any panel width instead of + // forcing fixed-width columns that crush the message content. + return ( +
+ {formatTime(entry.timestamp)}{" "} + {entry.level.toUpperCase()}{" "} + [{shortenLocation(entry.location)}]{" "} + {entry.message} +
+ ); +} + +/** Strip common path prefixes to show just the meaningful part. e.g. + * "src/node/services/log.ts:486" → "log.ts:486" + * "/home/user/.mux/src/cmux/.../log.ts:486" → "log.ts:486" */ +function shortenLocation(location: string): string { + // Grab the last path segment (filename:line) + const lastSlash = location.lastIndexOf("/"); + if (lastSlash >= 0) { + return location.slice(lastSlash + 1); + } + return location; +} + +function formatTime(timestampMs: number): string { + const date = new Date(timestampMs); + const hh = String(date.getHours()).padStart(2, "0"); + const mm = String(date.getMinutes()).padStart(2, "0"); + const ss = String(date.getSeconds()).padStart(2, "0"); + const ms = String(date.getMilliseconds()).padStart(3, "0"); + return `${hh}:${mm}:${ss}.${ms}`; +} diff --git a/src/browser/components/RightSidebar.tsx b/src/browser/components/RightSidebar.tsx index 3dcdb611c2..58d23c51ca 100644 --- a/src/browser/components/RightSidebar.tsx +++ b/src/browser/components/RightSidebar.tsx @@ -18,6 +18,7 @@ import { CostsTab } from "./RightSidebar/CostsTab"; import { ReviewPanel } from "./RightSidebar/CodeReview/ReviewPanel"; import { ErrorBoundary } from "./ErrorBoundary"; import { StatsTab } from "./RightSidebar/StatsTab"; +import { OutputTab } from "./OutputTab"; import { matchesKeybind, KEYBINDS, formatKeybind } from "@/browser/utils/ui/keybinds"; import { SidebarCollapseButton } from "./ui/SidebarCollapseButton"; @@ -69,6 +70,7 @@ import { import { CostsTabLabel, ExplorerTabLabel, + OutputTabLabel, FileTabLabel, ReviewTabLabel, StatsTabLabel, @@ -345,6 +347,8 @@ const RightSidebarTabsetNode: React.FC = (props) => label = ; } else if (tab === "stats") { label = ; + } else if (tab === "output") { + label = ; } else if (isTerminal) { const terminalIndex = terminalTabs.indexOf(tab); label = ( @@ -385,11 +389,13 @@ const RightSidebarTabsetNode: React.FC = (props) => const reviewPanelId = `${tabsetBaseId}-panel-review`; const explorerPanelId = `${tabsetBaseId}-panel-explorer`; const statsPanelId = `${tabsetBaseId}-panel-stats`; + const outputPanelId = `${tabsetBaseId}-panel-output`; const costsTabId = `${tabsetBaseId}-tab-costs`; const reviewTabId = `${tabsetBaseId}-tab-review`; const explorerTabId = `${tabsetBaseId}-tab-explorer`; const statsTabId = `${tabsetBaseId}-tab-stats`; + const outputTabId = `${tabsetBaseId}-tab-output`; // Generate sortable IDs for tabs in this tabset const sortableIds = items.map((item) => `${props.node.id}:${item.tab}`); @@ -467,6 +473,12 @@ const RightSidebarTabsetNode: React.FC = (props) => )} + {props.node.activeTab === "output" && ( +
+ +
+ )} + {/* Render all terminal tabs (keep-alive: hidden but mounted) */} {terminalTabs.map((terminalTab) => { const terminalTabId = `${tabsetBaseId}-tab-${terminalTab}`; diff --git a/src/browser/components/RightSidebar/tabs/TabLabels.tsx b/src/browser/components/RightSidebar/tabs/TabLabels.tsx index 078a2f8005..e5103a7752 100644 --- a/src/browser/components/RightSidebar/tabs/TabLabels.tsx +++ b/src/browser/components/RightSidebar/tabs/TabLabels.tsx @@ -115,6 +115,10 @@ export const ExplorerTabLabel: React.FC = () => ( ); +export function OutputTabLabel() { + return <>Output; +} + interface FileTabLabelProps { /** File path (relative to workspace) */ filePath: string; diff --git a/src/browser/components/RightSidebar/tabs/index.ts b/src/browser/components/RightSidebar/tabs/index.ts index 4744d46062..3a61e96b9e 100644 --- a/src/browser/components/RightSidebar/tabs/index.ts +++ b/src/browser/components/RightSidebar/tabs/index.ts @@ -24,6 +24,7 @@ export { export { CostsTabLabel, ExplorerTabLabel, + OutputTabLabel, FileTabLabel, ReviewTabLabel, StatsTabLabel, diff --git a/src/browser/components/RightSidebar/tabs/registry.ts b/src/browser/components/RightSidebar/tabs/registry.ts index 9662e2c0cf..477eb917ee 100644 --- a/src/browser/components/RightSidebar/tabs/registry.ts +++ b/src/browser/components/RightSidebar/tabs/registry.ts @@ -78,25 +78,30 @@ export interface TabConfig { } /** Static tab configurations (non-terminal tabs) */ -export const TAB_CONFIGS: Record<"costs" | "review" | "explorer" | "stats", TabConfig> = { - costs: { - name: "Costs", - contentClassName: "overflow-y-auto p-[15px]", - }, - review: { - name: "Review", - contentClassName: "overflow-y-auto p-0", - }, - explorer: { - name: "Explorer", - contentClassName: "overflow-y-auto p-0", - }, - stats: { - name: "Stats", - contentClassName: "overflow-y-auto p-[15px]", - featureFlag: "statsTab", - }, -}; +export const TAB_CONFIGS: Record<"costs" | "review" | "explorer" | "stats" | "output", TabConfig> = + { + costs: { + name: "Costs", + contentClassName: "overflow-y-auto p-[15px]", + }, + review: { + name: "Review", + contentClassName: "overflow-y-auto p-0", + }, + explorer: { + name: "Explorer", + contentClassName: "overflow-y-auto p-0", + }, + stats: { + name: "Stats", + contentClassName: "overflow-y-auto p-[15px]", + featureFlag: "statsTab", + }, + output: { + name: "Output", + contentClassName: "overflow-hidden p-0", + }, + }; /** Terminal tab configuration */ export const TERMINAL_TAB_CONFIG: TabConfig = { @@ -114,7 +119,13 @@ export const FILE_TAB_CONFIG: TabConfig = { /** Get config for a tab type */ export function getTabConfig(tab: TabType): TabConfig { - if (tab === "costs" || tab === "review" || tab === "explorer" || tab === "stats") { + if ( + tab === "costs" || + tab === "review" || + tab === "explorer" || + tab === "stats" || + tab === "output" + ) { return TAB_CONFIGS[tab]; } // File tabs diff --git a/src/browser/main.tsx b/src/browser/main.tsx index 2b732d7016..d125d591ce 100644 --- a/src/browser/main.tsx +++ b/src/browser/main.tsx @@ -1,10 +1,17 @@ import React from "react"; import ReactDOM from "react-dom/client"; +import { installBrowserLogCapture } from "@/browser/utils/browserLog"; import { AppLoader } from "@/browser/components/AppLoader"; import { initTelemetry, trackAppStarted } from "@/common/telemetry"; import { initTitlebarInsets } from "@/browser/hooks/useDesktopTitlebar"; // Initialize telemetry on app startup +try { + installBrowserLogCapture(); +} catch { + // Silent failure — never crash the app for logging capture +} + initTelemetry(); trackAppStarted(); diff --git a/src/browser/stories/App.rightSidebar.stories.tsx b/src/browser/stories/App.rightSidebar.stories.tsx index e9ac1f9782..7adf5645e0 100644 --- a/src/browser/stories/App.rightSidebar.stories.tsx +++ b/src/browser/stories/App.rightSidebar.stories.tsx @@ -1322,3 +1322,146 @@ export const CostsTabCompactionModelWarning: AppStory = { }, }, }; + +/** + * Helper to create realistic output log entries with mixed levels and locations. + */ +function createOutputLogEntries() { + const now = Date.now(); + + return [ + { + timestamp: now - 5 * 60_000, + level: "info" as const, + message: "Server bootstrap complete on http://localhost:3000", + location: "src/node/server.ts:42", + }, + { + timestamp: now - 4 * 60_000, + level: "debug" as const, + message: "Loaded 18 routes from API manifest", + location: "src/node/router.ts:88", + }, + { + timestamp: now - 3 * 60_000, + level: "warn" as const, + message: "Deprecated endpoint /v1/users called by legacy client", + location: "src/node/api/users.ts:133", + }, + { + timestamp: now - 2 * 60_000, + level: "info" as const, + message: "Redis cache warmup finished with 243 keys", + location: "src/node/cache/warmup.ts:57", + }, + { + timestamp: now - 90_000, + level: "error" as const, + message: "Database connection timeout after 5000ms", + location: "src/node/db/pool.ts:219", + }, + { + timestamp: now - 60_000, + level: "debug" as const, + message: "Cache hit for user profile query", + location: "src/node/cache/queryCache.ts:74", + }, + { + timestamp: now - 45_000, + level: "warn" as const, + message: "Retrying webhook delivery (attempt 2/3)", + location: "src/node/webhooks/sender.ts:161", + }, + { + timestamp: now - 30_000, + level: "error" as const, + message: "Failed to parse JSON payload from upstream service", + location: "src/node/integrations/partnerClient.ts:204", + }, + { + timestamp: now - 15_000, + level: "info" as const, + message: "Background cleanup job completed successfully", + location: "src/node/jobs/cleanup.ts:96", + }, + ]; +} + +/** + * Output tab selected with an empty log feed. + */ +export const OutputTabEmpty: AppStory = { + render: () => ( + { + localStorage.setItem(RIGHT_SIDEBAR_TAB_KEY, JSON.stringify("output")); + localStorage.setItem(RIGHT_SIDEBAR_WIDTH_KEY, "400"); + localStorage.removeItem(getRightSidebarLayoutKey("ws-output-empty")); + localStorage.removeItem("output-tab-level"); + + const client = setupSimpleChatStory({ + workspaceId: "ws-output-empty", + workspaceName: "feature/logging", + projectName: "my-app", + messages: [createUserMessage("msg-1", "Hello", { historySequence: 1 })], + logEntries: [], + }); + expandRightSidebar(); + return client; + }} + /> + ), +}; + +/** + * Output tab selected with mixed log entries. + */ +export const OutputTabWithLogs: AppStory = { + render: () => ( + { + localStorage.setItem(RIGHT_SIDEBAR_TAB_KEY, JSON.stringify("output")); + localStorage.setItem(RIGHT_SIDEBAR_WIDTH_KEY, "400"); + localStorage.removeItem(getRightSidebarLayoutKey("ws-output-logs")); + localStorage.removeItem("output-tab-level"); + + const client = setupSimpleChatStory({ + workspaceId: "ws-output-logs", + workspaceName: "feature/logging", + projectName: "my-app", + messages: [createUserMessage("msg-1", "Show logs", { historySequence: 1 })], + logEntries: createOutputLogEntries(), + }); + expandRightSidebar(); + return client; + }} + /> + ), +}; + +/** + * Output tab with persisted level filter set to "error". + */ +export const OutputTabErrorsOnly: AppStory = { + render: () => ( + { + localStorage.setItem(RIGHT_SIDEBAR_TAB_KEY, JSON.stringify("output")); + localStorage.setItem(RIGHT_SIDEBAR_WIDTH_KEY, "400"); + localStorage.removeItem(getRightSidebarLayoutKey("ws-output-errors")); + // Persist the level filter to "error" so only error entries display. + localStorage.setItem("output-tab-level", JSON.stringify("error")); + + const client = setupSimpleChatStory({ + workspaceId: "ws-output-errors", + workspaceName: "feature/logging", + projectName: "my-app", + messages: [createUserMessage("msg-1", "Check errors", { historySequence: 1 })], + logEntries: createOutputLogEntries(), + }); + expandRightSidebar(); + return client; + }} + /> + ), +}; diff --git a/src/browser/stories/mocks/orpc.ts b/src/browser/stories/mocks/orpc.ts index 5a3327f318..f02b8eee2e 100644 --- a/src/browser/stories/mocks/orpc.ts +++ b/src/browser/stories/mocks/orpc.ts @@ -213,6 +213,15 @@ export interface MockORPCClientOptions { status: { state: "disabled" | "enforced" | "blocked"; reason?: string }; policy: unknown; }; + /** Mock log entries for Output tab (subscribeLogs snapshot) */ + logEntries?: Array<{ + timestamp: number; + level: "error" | "warn" | "info" | "debug"; + message: string; + location: string; + }>; + /** Mock clearLogs result (default: { success: true, error: null }) */ + clearLogsResult?: { success: boolean; error?: string | null }; } interface MockBackgroundProcess { @@ -303,6 +312,8 @@ export function createMockORPCClient(options: MockORPCClientOptions = {}): APICl status: { state: "disabled" as const }, policy: null, }, + logEntries = [], + clearLogsResult = { success: true, error: null }, } = options; // Feature flags @@ -636,6 +647,21 @@ export function createMockORPCClient(options: MockORPCClientOptions = {}): APICl yield* []; await new Promise(() => undefined); }, + subscribeLogs: async function* (input: { level?: string | null }) { + const LOG_LEVEL_PRIORITY: Record = { + error: 0, + warn: 1, + info: 2, + debug: 3, + }; + const minPriority = input.level != null ? (LOG_LEVEL_PRIORITY[input.level] ?? 3) : 3; + const filtered = logEntries.filter( + (entry) => (LOG_LEVEL_PRIORITY[entry.level] ?? 3) <= minPriority + ); + yield { type: "snapshot" as const, epoch: 1, entries: filtered }; + await new Promise(() => undefined); + }, + clearLogs: () => Promise.resolve(clearLogsResult), }, secrets: { get: (input?: { projectPath?: string }) => { diff --git a/src/browser/stories/storyHelpers.ts b/src/browser/stories/storyHelpers.ts index 1aad778654..32ac45d40f 100644 --- a/src/browser/stories/storyHelpers.ts +++ b/src/browser/stories/storyHelpers.ts @@ -380,6 +380,15 @@ export interface SimpleChatSetupOptions { agentSkills?: AgentSkillDescriptor[]; /** Agent skills that were discovered but couldn't be loaded (SKILL.md parse errors, etc.) */ invalidAgentSkills?: AgentSkillIssue[]; + /** Mock log entries for Output tab */ + logEntries?: Array<{ + timestamp: number; + level: "error" | "warn" | "info" | "debug"; + message: string; + location: string; + }>; + /** Mock clearLogs result */ + clearLogsResult?: { success: boolean; error?: string | null }; } /** @@ -466,6 +475,8 @@ export function setupSimpleChatStory(opts: SimpleChatSetupOptions): APIClient { signingCapabilities: opts.signingCapabilities, agentSkills: opts.agentSkills, invalidAgentSkills: opts.invalidAgentSkills, + logEntries: opts.logEntries, + clearLogsResult: opts.clearLogsResult, }); } diff --git a/src/browser/types/rightSidebar.ts b/src/browser/types/rightSidebar.ts index b12d6796de..7a211205aa 100644 --- a/src/browser/types/rightSidebar.ts +++ b/src/browser/types/rightSidebar.ts @@ -1,4 +1,11 @@ -export const RIGHT_SIDEBAR_TABS = ["costs", "review", "terminal", "explorer", "stats"] as const; +export const RIGHT_SIDEBAR_TABS = [ + "costs", + "review", + "terminal", + "explorer", + "stats", + "output", +] as const; /** Base tab types that are always valid */ export type BaseTabType = (typeof RIGHT_SIDEBAR_TABS)[number]; diff --git a/src/browser/utils/browserLog.ts b/src/browser/utils/browserLog.ts new file mode 100644 index 0000000000..a2ff3d6317 --- /dev/null +++ b/src/browser/utils/browserLog.ts @@ -0,0 +1,68 @@ +export interface BrowserLogEntry { + timestamp: number; + level: "error" | "warn" | "info" | "debug"; + message: string; +} + +const MAX_ENTRIES = 500; +const entries: BrowserLogEntry[] = []; + +type Listener = (entry: BrowserLogEntry) => void; +const listeners = new Set(); + +// Preserve originals so DevTools still works +const originalConsole = { + log: console.log.bind(console), + warn: console.warn.bind(console), + error: console.error.bind(console), + debug: console.debug.bind(console), +}; + +const LEVEL_MAP = { + log: "info", + warn: "warn", + error: "error", + debug: "debug", +} as const; + +/** Call once at renderer entry (e.g. src/browser/main.tsx), before createRoot. */ +export function installBrowserLogCapture(): void { + for (const method of ["log", "warn", "error", "debug"] as const) { + console[method] = (...args: unknown[]) => { + originalConsole[method](...args); // pass through to DevTools + + const message = args + .map((a) => { + if (typeof a === "string") return a; + try { + return JSON.stringify(a); + } catch { + return String(a); + } + }) + .join(" "); + + const entry: BrowserLogEntry = { + timestamp: Date.now(), + level: LEVEL_MAP[method], + message, + }; + + entries.push(entry); + if (entries.length > MAX_ENTRIES) entries.shift(); + + for (const listener of listeners) listener(entry); + }; + } +} + +export function onBrowserLogEntry(listener: Listener): () => void { + listeners.add(listener); + return () => { + listeners.delete(listener); + }; +} + +export function getRecentBrowserLogs(): BrowserLogEntry[] { + return [...entries]; +} diff --git a/src/browser/utils/commandIds.ts b/src/browser/utils/commandIds.ts index a7b22d4c7c..565acc9eaa 100644 --- a/src/browser/utils/commandIds.ts +++ b/src/browser/utils/commandIds.ts @@ -38,6 +38,8 @@ export const CommandIds = { navRightSidebarSplitHorizontal: () => "nav:rightSidebar:splitHorizontal" as const, navRightSidebarSplitVertical: () => "nav:rightSidebar:splitVertical" as const, navRightSidebarAddTool: () => "nav:rightSidebar:addTool" as const, + navToggleOutput: () => "nav:toggle-output" as const, + navOpenLogFile: () => "nav:open-log-file" as const, // Chat commands chatClear: () => "chat:clear" as const, diff --git a/src/browser/utils/commands/sources.ts b/src/browser/utils/commands/sources.ts index d6a24bad35..2ca9beb126 100644 --- a/src/browser/utils/commands/sources.ts +++ b/src/browser/utils/commands/sources.ts @@ -6,7 +6,11 @@ import { THINKING_LEVELS, type ThinkingLevel } from "@/common/types/thinking"; import { getThinkingPolicyForModel } from "@/common/utils/thinking/policy"; import assert from "@/common/utils/assert"; import { CUSTOM_EVENTS, createCustomEvent } from "@/common/constants/events"; -import { getRightSidebarLayoutKey, RIGHT_SIDEBAR_TAB_KEY } from "@/common/constants/storage"; +import { + getRightSidebarLayoutKey, + RIGHT_SIDEBAR_COLLAPSED_KEY, + RIGHT_SIDEBAR_TAB_KEY, +} from "@/common/constants/storage"; import { readPersistedState, updatePersistedState } from "@/browser/hooks/usePersistedState"; import { disableAutoRetryPreference } from "@/browser/utils/messages/autoRetryPreference"; import { CommandIds } from "@/browser/utils/commandIds"; @@ -20,10 +24,12 @@ import type { LayoutPresetsConfig, LayoutSlotNumber } from "@/common/types/uiLay import { addToolToFocusedTabset, getDefaultRightSidebarLayoutState, + hasTab, parseRightSidebarLayoutState, selectTabInTabset, setFocusedTabset, splitFocusedTabset, + toggleTab, } from "@/browser/utils/rightSidebarLayout"; import type { ProjectConfig } from "@/node/config"; @@ -117,6 +123,15 @@ const getRightSidebarTabFallback = (): TabType => { return isTabType(raw) ? raw : "costs"; }; +const readRightSidebarLayout = (workspaceId: string) => { + const fallback = getRightSidebarTabFallback(); + const raw = readPersistedState( + getRightSidebarLayoutKey(workspaceId), + getDefaultRightSidebarLayoutState(fallback) + ); + return parseRightSidebarLayoutState(raw, fallback); +}; + const updateRightSidebarLayout = ( workspaceId: string, updater: ( @@ -133,6 +148,23 @@ const updateRightSidebarLayout = ( ); }; +function toFileUrl(filePath: string): string { + const normalized = filePath.replace(/\\/g, "/"); + + // Windows drive letter paths: C:/... + if (/^[A-Za-z]:\//.test(normalized)) { + return `file:///${encodeURI(normalized)}`; + } + + // POSIX absolute paths: /... + if (normalized.startsWith("/")) { + return `file://${encodeURI(normalized)}`; + } + + // Fall back to treating the string as a path-ish URL segment. + return `file://${encodeURI(normalized)}`; +} + const findFirstTerminalSessionTab = ( node: ReturnType["root"] ): { tabsetId: string; tab: TabType } | null => { @@ -423,6 +455,32 @@ export function buildCoreSources(p: BuildSourcesParams): Array<() => CommandActi const wsId = p.selectedWorkspace?.workspaceId; if (wsId) { list.push( + { + id: CommandIds.navToggleOutput(), + title: hasTab(readRightSidebarLayout(wsId), "output") ? "Hide Output" : "Show Output", + section: section.navigation, + keywords: ["log", "logs", "output"], + run: () => { + const isOutputVisible = hasTab(readRightSidebarLayout(wsId), "output"); + updateRightSidebarLayout(wsId, (s) => toggleTab(s, "output")); + if (!isOutputVisible) { + updatePersistedState(RIGHT_SIDEBAR_COLLAPSED_KEY, false); + } + }, + }, + { + id: CommandIds.navOpenLogFile(), + title: "Open Log File", + section: section.navigation, + keywords: ["log", "logs"], + run: async () => { + const result = await p.api?.general.getLogPath(); + const logPath = result?.path; + if (!logPath) return; + + window.open(toFileUrl(logPath), "_blank", "noopener"); + }, + }, { id: CommandIds.navRightSidebarFocusTerminal(), title: "Right Sidebar: Focus Terminal", @@ -464,9 +522,16 @@ export function buildCoreSources(p: BuildSourcesParams): Array<() => CommandActi label: "Tool", placeholder: "Select a tool…", getOptions: () => - (["costs", "review", "terminal"] as TabType[]).map((tab) => ({ + (["costs", "review", "output", "terminal"] as TabType[]).map((tab) => ({ id: tab, - label: tab === "costs" ? "Costs" : tab === "review" ? "Review" : "Terminal", + label: + tab === "costs" + ? "Costs" + : tab === "review" + ? "Review" + : tab === "output" + ? "Output" + : "Terminal", keywords: [tab], })), }, diff --git a/src/browser/utils/rightSidebarLayout.ts b/src/browser/utils/rightSidebarLayout.ts index 00e5b303a1..546fe8d535 100644 --- a/src/browser/utils/rightSidebarLayout.ts +++ b/src/browser/utils/rightSidebarLayout.ts @@ -416,6 +416,14 @@ export function collectActiveTabs(node: RightSidebarLayoutNode): TabType[] { return [...collectActiveTabs(node.children[0]), ...collectActiveTabs(node.children[1])]; } +export function hasTab(state: RightSidebarLayoutState, tab: TabType): boolean { + return collectAllTabs(state.root).includes(tab); +} + +export function toggleTab(state: RightSidebarLayoutState, tab: TabType): RightSidebarLayoutState { + return hasTab(state, tab) ? removeTabEverywhere(state, tab) : selectOrAddTab(state, tab); +} + /** * Collect all tabs from all tabsets with their tabset IDs. * Returns tabs in layout order (depth-first, left-to-right/top-to-bottom). @@ -499,6 +507,21 @@ export function addTabToFocusedTabset( }; } +/** + * Select an existing tab anywhere in the layout, or add it to the focused tabset if missing. + */ +export function selectOrAddTab( + state: RightSidebarLayoutState, + tab: TabType +): RightSidebarLayoutState { + const found = collectAllTabsWithTabset(state.root).find((t) => t.tab === tab); + if (found) { + return selectTabInTabset(setFocusedTabset(state, found.tabsetId), found.tabsetId, found.tab); + } + + return addTabToFocusedTabset(state, tab); +} + /** * Move a tab from one tabset to another. * Handles edge cases: diff --git a/src/common/constants/paths.ts b/src/common/constants/paths.ts index 2a2a82e978..8b47c46ab0 100644 --- a/src/common/constants/paths.ts +++ b/src/common/constants/paths.ts @@ -83,6 +83,17 @@ export function getMuxSessionsDir(rootDir?: string): string { * * @param rootDir - Optional root directory (defaults to getMuxHome()) */ +/** + * Get the directory where mux backend logs are stored. + * Example: ~/.mux/logs/mux.log + * + * @param rootDir - Optional root directory (defaults to getMuxHome()) + */ +export function getMuxLogsDir(rootDir?: string): string { + const root = rootDir ?? getMuxHome(); + return join(root, "logs"); +} + export function getMuxPlansDir(rootDir?: string): string { const root = rootDir ?? getMuxHome(); return join(root, "plans"); diff --git a/src/common/constants/ui.ts b/src/common/constants/ui.ts index 3ab415913f..d1b4d2af3e 100644 --- a/src/common/constants/ui.ts +++ b/src/common/constants/ui.ts @@ -69,6 +69,14 @@ export const FORCE_COMPACTION_BUFFER_PERCENT = 5; */ export const COPY_FEEDBACK_DURATION_MS = 2000; +/** + * Maximum number of log entries retained in memory for Output tab views. + * + * This cap is shared by the backend log ring buffer and frontend Output tab + * state to prevent unbounded growth during long-running verbose sessions. + */ +export const MAX_LOG_ENTRIES = 1000; + /** * Predefined color palette for project sections. * Each color is designed to work well with the dark theme. diff --git a/src/common/orpc/schemas/api.ts b/src/common/orpc/schemas/api.ts index b6a384e505..963d65edc7 100644 --- a/src/common/orpc/schemas/api.ts +++ b/src/common/orpc/schemas/api.ts @@ -1583,6 +1583,54 @@ export const general = { }), output: ResultSchema(z.void(), z.string()), }, + getLogPath: { + input: z.void(), + output: z.object({ path: z.string() }), + }, + clearLogs: { + input: z.void(), + output: z.object({ + success: z.boolean(), + error: z.string().nullish(), + }), + }, + subscribeLogs: { + input: z.object({ + level: z.enum(["error", "warn", "info", "debug"]).nullish(), + }), + output: eventIterator( + z.discriminatedUnion("type", [ + z.object({ + type: z.literal("snapshot"), + epoch: z.number(), + entries: z.array( + z.object({ + timestamp: z.number(), + level: z.enum(["error", "warn", "info", "debug"]), + message: z.string(), + location: z.string(), + }) + ), + }), + z.object({ + type: z.literal("append"), + epoch: z.number(), + entries: z.array( + z.object({ + timestamp: z.number(), + level: z.enum(["error", "warn", "info", "debug"]), + message: z.string(), + location: z.string(), + }) + ), + }), + z.object({ + type: z.literal("reset"), + epoch: z.number(), + }), + ]) + ), + }, }; // Menu events (main→renderer notifications) diff --git a/src/node/orpc/router.ts b/src/node/orpc/router.ts index 05fc5b9733..3871c79dbe 100644 --- a/src/node/orpc/router.ts +++ b/src/node/orpc/router.ts @@ -18,6 +18,9 @@ import type { import type { WorkspaceMetadata } from "@/common/types/workspace"; import { createAuthMiddleware } from "./authMiddleware"; import { createAsyncMessageQueue } from "@/common/utils/asyncMessageQueue"; +import { clearLogFiles, getLogFilePath } from "@/node/services/log"; +import type { LogEntry } from "@/node/services/logBuffer"; +import { clearLogEntries, subscribeLogFeed } from "@/node/services/logBuffer"; import { createReplayBufferedStreamMessageRelay } from "./replayBufferedStreamMessageRelay"; import { createRuntime, checkRuntimeAvailability } from "@/node/runtime/runtimeFactory"; @@ -1272,6 +1275,90 @@ export const router = (authToken?: string) => { } } }), + getLogPath: t + .input(schemas.general.getLogPath.input) + .output(schemas.general.getLogPath.output) + .handler(() => { + return { path: getLogFilePath() }; + }), + clearLogs: t + .input(schemas.general.clearLogs.input) + .output(schemas.general.clearLogs.output) + .handler(async () => { + try { + await clearLogFiles(); + clearLogEntries(); + return { success: true }; + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + return { success: false, error: message }; + } + }), + subscribeLogs: t + .input(schemas.general.subscribeLogs.input) + .output(schemas.general.subscribeLogs.output) + .handler(async function* ({ input, signal }) { + const LOG_LEVEL_PRIORITY: Record = { + error: 0, + warn: 1, + info: 2, + debug: 3, + }; + + function shouldInclude( + entryLevel: LogEntry["level"], + minLevel: LogEntry["level"] + ): boolean { + return ( + (LOG_LEVEL_PRIORITY[entryLevel] ?? LOG_LEVEL_PRIORITY.debug) <= + (LOG_LEVEL_PRIORITY[minLevel] ?? LOG_LEVEL_PRIORITY.info) + ); + } + + const minLevel = input.level ?? "info"; + + const queue = createAsyncMessageQueue< + | { type: "snapshot"; epoch: number; entries: LogEntry[] } + | { type: "append"; epoch: number; entries: LogEntry[] } + | { type: "reset"; epoch: number } + >(); + + // Atomic handshake: register listener + snapshot in one step. + // No events can be lost between snapshot and subscription. + const { snapshot, unsubscribe } = subscribeLogFeed((event) => { + if (signal?.aborted) { + return; + } + + if (event.type === "append") { + if (shouldInclude(event.entry.level, minLevel)) { + queue.push({ type: "append", epoch: event.epoch, entries: [event.entry] }); + } + return; + } + + queue.push({ type: "reset", epoch: event.epoch }); + }, minLevel); + + queue.push({ + type: "snapshot", + epoch: snapshot.epoch, + entries: snapshot.entries.filter((e) => shouldInclude(e.level, minLevel)), + }); + + const onAbort = () => { + queue.end(); + }; + signal?.addEventListener("abort", onAbort); + + try { + yield* queue.iterate(); + } finally { + signal?.removeEventListener("abort", onAbort); + unsubscribe(); + queue.end(); + } + }), openInEditor: t .input(schemas.general.openInEditor.input) .output(schemas.general.openInEditor.output) diff --git a/src/node/services/log.test.ts b/src/node/services/log.test.ts new file mode 100644 index 0000000000..10987731f8 --- /dev/null +++ b/src/node/services/log.test.ts @@ -0,0 +1,166 @@ +import { + afterAll, + afterEach, + beforeAll, + beforeEach, + describe, + expect, + mock, + spyOn, + test, +} from "bun:test"; +import * as fs from "node:fs"; +import * as fsPromises from "node:fs/promises"; +import * as os from "node:os"; +import * as path from "node:path"; +import { __resetFileSinkForTests, clearLogFiles, closeLogFile, log } from "./log"; + +describe("log file sink state machine", () => { + let tempMuxRoot: string; + let originalMuxRoot: string | undefined; + + beforeAll(async () => { + originalMuxRoot = process.env.MUX_ROOT; + tempMuxRoot = await fsPromises.mkdtemp(path.join(os.tmpdir(), "mux-log-test-")); + process.env.MUX_ROOT = tempMuxRoot; + }); + + beforeEach(() => { + __resetFileSinkForTests(); + }); + + afterEach(() => { + mock.restore(); + }); + + afterAll(async () => { + closeLogFile(); + + if (originalMuxRoot === undefined) { + delete process.env.MUX_ROOT; + } else { + process.env.MUX_ROOT = originalMuxRoot; + } + + await fsPromises.rm(tempMuxRoot, { recursive: true, force: true }); + }); + + test("transitions to degraded after stream error and suppresses immediate retries", async () => { + const originalCreateWriteStream = fs.createWriteStream; + let streamCreations = 0; + + const createWriteStreamSpy = spyOn(fs, "createWriteStream").mockImplementation((...args) => { + const stream = originalCreateWriteStream(...args); + streamCreations += 1; + + if (streamCreations === 1) { + queueMicrotask(() => { + stream.emit("error", new Error("forced log stream failure")); + }); + } + + return stream; + }); + + expect(() => log.info("first log line")).not.toThrow(); + await Promise.resolve(); + + expect(() => log.info("second log line should no-op while degraded")).not.toThrow(); + expect(createWriteStreamSpy).toHaveBeenCalledTimes(1); + }); + + test("clearLogFiles closes and reopens the active sink stream", async () => { + const originalCreateWriteStream = fs.createWriteStream; + const createdStreams: fs.WriteStream[] = []; + + const createWriteStreamSpy = spyOn(fs, "createWriteStream").mockImplementation((...args) => { + const stream = originalCreateWriteStream(...args); + createdStreams.push(stream); + return stream; + }); + + const nowSpy = spyOn(Date, "now").mockReturnValue(Number.MAX_SAFE_INTEGER); + log.info("line before clear"); + nowSpy.mockRestore(); + + expect(createdStreams).toHaveLength(1); + + const firstStream = createdStreams[0]; + if (!firstStream) { + throw new Error("expected an initial log stream"); + } + + const firstStreamEndSpy = spyOn(firstStream, "end"); + + await clearLogFiles(); + + expect(firstStreamEndSpy).toHaveBeenCalledTimes(1); + expect(createdStreams).toHaveLength(2); + expect(createdStreams[1]).not.toBe(firstStream); + + log.info("line after clear"); + expect(createWriteStreamSpy).toHaveBeenCalledTimes(2); + }); + + test("clearLogFiles rejects when truncate fails", async () => { + const openSyncSpy = spyOn(fs, "openSync").mockImplementation(() => { + throw new Error("truncate failed"); + }); + + let clearError: unknown; + try { + await clearLogFiles(); + } catch (error) { + clearError = error; + } + + expect(openSyncSpy).toHaveBeenCalled(); + expect(clearError).toBeInstanceOf(Error); + if (!(clearError instanceof Error)) { + throw new Error("expected clearLogFiles to reject with Error"); + } + expect(clearError.message).toContain("truncate failed"); + }); + + test("queued clear cannot reopen after closeLogFile", async () => { + const originalCreateWriteStream = fs.createWriteStream; + const createdStreams: fs.WriteStream[] = []; + + const createWriteStreamSpy = spyOn(fs, "createWriteStream").mockImplementation((...args) => { + const stream = originalCreateWriteStream(...args); + createdStreams.push(stream); + return stream; + }); + + log.info("line before clear and close"); + expect(createdStreams).toHaveLength(1); + + const clearPromise = clearLogFiles(); + closeLogFile(); + await clearPromise.catch(() => undefined); + + expect(createdStreams).toHaveLength(1); + + log.info("line after close should be dropped"); + expect(createWriteStreamSpy).toHaveBeenCalledTimes(1); + }); + + test("closeLogFile transitions sink to closed and blocks reopen attempts", () => { + const originalCreateWriteStream = fs.createWriteStream; + const createWriteStreamSpy = spyOn(fs, "createWriteStream").mockImplementation((...args) => + originalCreateWriteStream(...args) + ); + const writeSpy = spyOn(fs.WriteStream.prototype, "write"); + + log.info("line before close"); + const createCallsBeforeClose = createWriteStreamSpy.mock.calls.length; + const writeCallsBeforeClose = writeSpy.mock.calls.length; + expect(writeCallsBeforeClose).toBeGreaterThan(0); + + closeLogFile(); + + expect(() => log.info("line after close should be dropped")).not.toThrow(); + expect(createWriteStreamSpy.mock.calls.length).toBe(createCallsBeforeClose); + expect(writeSpy.mock.calls.length).toBe(writeCallsBeforeClose); + }); +}); diff --git a/src/node/services/log.ts b/src/node/services/log.ts index d319f78de0..199541648e 100644 --- a/src/node/services/log.ts +++ b/src/node/services/log.ts @@ -20,7 +20,12 @@ import * as fs from "fs"; import * as path from "path"; import chalk from "chalk"; import { parseBoolEnv } from "@/common/utils/env"; -import { getMuxHome } from "@/common/constants/paths"; +import { getMuxHome, getMuxLogsDir } from "@/common/constants/paths"; +import { hasDebugSubscriber, pushLogEntry } from "./logBuffer"; + +process.once("exit", () => { + closeLogFile(); +}); // Lazy-initialized to avoid circular dependency with config.ts let _debugObjDir: string | null = null; @@ -74,6 +79,288 @@ function getDefaultLogLevel(): LogLevel { return isElectron ? "info" : "error"; } +type FileSinkState = + | { status: "idle" } + | { status: "open"; stream: fs.WriteStream; path: string; size: number } + | { status: "degraded"; reason: string; retryAfterMs: number } + | { status: "closed" }; + +let fileSinkState: FileSinkState = { status: "idle" }; +let sinkTransition: Promise = Promise.resolve(); +let sinkTransitionDepth = 0; +let sinkLifecycleEpoch = 0; + +function enqueueSinkTransition(task: () => Promise): Promise { + sinkTransitionDepth += 1; + + // `run` is the caller-visible promise: resolves/rejects with the task's real outcome. + const run = sinkTransition.catch(() => undefined).then(task); + + // The internal chain absorbs failures so subsequent commands still execute, + // but only transitions to degraded if the sink hasn't been terminally closed. + sinkTransition = run + .catch((error) => { + if (fileSinkState.status !== "closed") { + setDegradedState(error); + } + }) + .finally(() => { + sinkTransitionDepth -= 1; + }); + + return run; +} + +const MAX_LOG_FILE_SIZE = 10 * 1024 * 1024; // 10 MB +const MAX_LOG_FILES = 3; +const LOG_FILE_RETRY_BACKOFF_MS = 30_000; + +function toErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); +} + +function createSafeStream(filePath: string): fs.WriteStream { + const stream = fs.createWriteStream(filePath, { flags: "a" }); + stream.on("error", (error) => { + // Ignore stale stream errors after rotation/clear/close; only active stream + // failures should flip the sink into degraded mode. + if (fileSinkState.status !== "open" || fileSinkState.stream !== stream) { + return; + } + + fileSinkState = { + status: "degraded", + reason: toErrorMessage(error), + retryAfterMs: Date.now() + LOG_FILE_RETRY_BACKOFF_MS, + }; + + try { + stream.destroy(); + } catch { + // logger must fail silently + } + }); + return stream; +} + +function setDegradedState(error: unknown): void { + fileSinkState = { + status: "degraded", + reason: toErrorMessage(error), + retryAfterMs: Date.now() + LOG_FILE_RETRY_BACKOFF_MS, + }; +} + +function waitForStreamClose(stream: fs.WriteStream): Promise { + return new Promise((resolve) => { + let done = false; + const finish = () => { + if (done) { + return; + } + done = true; + resolve(); + }; + + stream.once("close", finish); + stream.once("error", finish); + // .end() callback fires on 'finish', which precedes 'close'— + // but the callback guarantees flush completed. + stream.end(() => finish()); + }); +} + +function stripAnsi(text: string): string { + // Matches standard ANSI escape codes for colors/styles. + // eslint-disable-next-line no-control-regex + return text.replace(/\x1b\[[0-9;]*m/g, ""); +} + +function ensureSinkOpen(): void { + // Bail while a transition (rotate/clear/close) is in progress — the queue + // will reopen the sink if appropriate. + if (sinkTransitionDepth > 0) { + return; + } + + if (fileSinkState.status === "open" || fileSinkState.status === "closed") { + return; + } + + if (fileSinkState.status === "degraded" && Date.now() < fileSinkState.retryAfterMs) { + return; + } + + try { + const logsDir = getMuxLogsDir(); + const activeLogPath = path.join(logsDir, "mux.log"); + + fs.mkdirSync(logsDir, { recursive: true }); + + let fileSize = 0; + try { + fileSize = fs.statSync(activeLogPath).size; + } catch { + fileSize = 0; + } + + const stream = createSafeStream(activeLogPath); + fileSinkState = { status: "open", stream, path: activeLogPath, size: fileSize }; + } catch (error) { + // Never throw from the logger; enter degraded mode and retry later. + setDegradedState(error); + } +} + +function rotateSink(): void { + if (fileSinkState.status !== "open") { + return; + } + + const openSink = fileSinkState; + const transitionEpoch = sinkLifecycleEpoch; + // Immediately move to idle so writeSink() won't write to the old stream. + fileSinkState = { status: "idle" }; + + enqueueSinkTransition(async () => { + await waitForStreamClose(openSink.stream); + + const logsDir = path.dirname(openSink.path); + + // Shift: mux.3.log → deleted, mux.2.log → mux.3.log, etc. + for (let i = MAX_LOG_FILES; i >= 1; i--) { + const from = path.join(logsDir, i === 1 ? "mux.log" : `mux.${i - 1}.log`); + const to = path.join(logsDir, `mux.${i}.log`); + try { + fs.renameSync(from, to); + } catch { + // file may not exist + } + } + + if (fileSinkState.status === "closed" || transitionEpoch !== sinkLifecycleEpoch) { + return; + } + + const stream = createSafeStream(openSink.path); + fileSinkState = { status: "open", stream, path: openSink.path, size: 0 }; + }).catch(() => undefined); +} + +function writeSink(cleanLineWithNewline: string): void { + try { + ensureSinkOpen(); + if (fileSinkState.status !== "open") { + return; + } + + const openSink = fileSinkState; + const bytes = Buffer.byteLength(cleanLineWithNewline, "utf-8"); + + openSink.stream.write(cleanLineWithNewline); + + // Stream writes are async and can error out-of-band. Only update size + // when this sink instance is still active. + if (fileSinkState.status !== "open" || fileSinkState.stream !== openSink.stream) { + return; + } + + const nextSize = openSink.size + bytes; + fileSinkState = { ...openSink, size: nextSize }; + + if (nextSize >= MAX_LOG_FILE_SIZE) { + rotateSink(); + } + } catch { + // Silent failure. + } +} + +export function getLogFilePath(): string { + return path.join(getMuxLogsDir(), "mux.log"); +} + +function clearSink(): Promise { + const logsDir = getMuxLogsDir(); + const activeLogPath = path.join(logsDir, "mux.log"); + + const openSink = fileSinkState.status === "open" ? fileSinkState : null; + const transitionEpoch = sinkLifecycleEpoch; + // Immediately move to idle so writeSink() won't write to the old stream. + if (openSink) { + fileSinkState = { status: "idle" }; + } + + return enqueueSinkTransition(async () => { + if (openSink) { + await waitForStreamClose(openSink.stream); + } + + fs.mkdirSync(logsDir, { recursive: true }); + + // Truncate the active log. Throws on permission errors. + const fd = fs.openSync(activeLogPath, "w"); + fs.closeSync(fd); + + // Remove rotated files — missing files are fine. + for (let i = 1; i <= MAX_LOG_FILES; i++) { + const rotatedPath = path.join(logsDir, `mux.${i}.log`); + try { + fs.unlinkSync(rotatedPath); + } catch { + // file may not exist + } + } + + if (!openSink || fileSinkState.status === "closed" || transitionEpoch !== sinkLifecycleEpoch) { + return; + } + + const stream = createSafeStream(activeLogPath); + fileSinkState = { status: "open", stream, path: activeLogPath, size: 0 }; + }); +} + +export function clearLogFiles(): Promise { + return clearSink(); +} + +function closeSink(): void { + const openSink = fileSinkState.status === "open" ? fileSinkState : null; + + // Terminal state — set immediately so no new writes start. + fileSinkState = { status: "closed" }; + sinkLifecycleEpoch += 1; + + if (!openSink) { + return; + } + + enqueueSinkTransition(async () => { + await waitForStreamClose(openSink.stream); + }).catch(() => undefined); +} + +export function closeLogFile(): void { + closeSink(); +} + +/** @internal Test seam: reset singleton sink state for hermetic tests. */ +export function __resetFileSinkForTests(): void { + if (fileSinkState.status === "open") { + try { + fileSinkState.stream.end(); + } catch { + // silent + } + } + + fileSinkState = { status: "idle" }; + sinkTransition = Promise.resolve(); + sinkTransitionDepth = 0; + sinkLifecycleEpoch = 0; +} + let currentLogLevel: LogLevel = getDefaultLogLevel(); /** @@ -140,33 +427,79 @@ function getTimestamp(): string { return `${hours}:${mm}.${ms}${ampm}`; } +interface ParsedStackFrame { + filePath: string; + lineNum: string; +} + +function parseStackFrame(stackLine: string): ParsedStackFrame | null { + const match = /\((.+):(\d+):\d+\)$/.exec(stackLine) ?? /at (.+):(\d+):\d+$/.exec(stackLine); + if (!match) { + return null; + } + + const [, filePath, lineNum] = match; + return { filePath, lineNum }; +} + +function isLoggerStackFrame(stackLine: string, filePath: string): boolean { + const normalizedPath = filePath.replace(/\\/g, "/"); + + if ( + normalizedPath.endsWith("/src/node/services/log.ts") || + normalizedPath.endsWith("/src/node/services/log.js") + ) { + return true; + } + + return ( + stackLine.includes("getCallerLocation") || + stackLine.includes("safePipeLog") || + stackLine.includes("formatLogLine") + ); +} + +function formatCallerLocation(filePath: string, lineNum: string): string { + const normalizedPath = filePath.replace(/\\/g, "/"); + const normalizedCwd = process.cwd().replace(/\\/g, "/"); + + if (normalizedPath.startsWith(`${normalizedCwd}/`)) { + return `${normalizedPath.slice(normalizedCwd.length + 1)}:${lineNum}`; + } + + const srcIndex = normalizedPath.lastIndexOf("/src/"); + if (srcIndex >= 0) { + return `${normalizedPath.slice(srcIndex + 1)}:${lineNum}`; + } + + return `${path.basename(normalizedPath)}:${lineNum}`; +} + /** - * Get the caller's file path and line number from the stack trace - * Returns format: "path/to/file.ts:123" + * Get the first non-logger caller frame from the stack trace. + * + * We intentionally scan frames instead of using a fixed stack index because + * wrapper levels can shift over time and otherwise collapse locations to the + * logger wrapper (e.g. log.ts:488) instead of the real call site. */ function getCallerLocation(): string { - const error = new Error(); - const stack = error.stack?.split("\n"); - - // Stack trace format: - // 0: "Error" - // 1: " at getCallerLocation (log.ts:X:Y)" - // 2: " at safePipeLog (log.ts:X:Y)" - // 3: " at log.info (log.ts:X:Y)" or "at log.error (log.ts:X:Y)" - // 4: " at (file.ts:X:Y)" <- We want this one - - if (stack && stack.length > 4) { - const callerLine = stack[4]; - // Extract file path and line number from the stack trace - // Format: " at FunctionName (path/to/file.ts:123:45)" - const match = /\((.+):(\d+):\d+\)/.exec(callerLine) ?? /at (.+):(\d+):\d+/.exec(callerLine); - - if (match) { - const [, filePath, lineNum] = match; - // Strip the full path to just show relative path from project root - const relativePath = filePath.replace(/^.*\/mux\//, ""); - return `${relativePath}:${lineNum}`; + const stackLines = new Error().stack?.split("\n").slice(1) ?? []; + + for (const stackLine of stackLines) { + const parsedFrame = parseStackFrame(stackLine); + if (!parsedFrame) { + continue; } + + if (parsedFrame.filePath.startsWith("node:")) { + continue; + } + + if (isLoggerStackFrame(stackLine, parsedFrame.filePath)) { + continue; + } + + return formatCallerLocation(parsedFrame.filePath, parsedFrame.lineNum); } return "unknown:0"; @@ -178,12 +511,12 @@ function getCallerLocation(): string { * @param level - Log level * @param args - Arguments to log */ -function safePipeLog(level: LogLevel, ...args: unknown[]): void { - // Check if this level should be logged - if (!shouldLog(level)) { - return; - } - +function formatLogLine(level: LogLevel): { + timestamp: string; + location: string; + useColor: boolean; + prefix: string; +} { const timestamp = getTimestamp(); const location = getCallerLocation(); const useColor = supportsColor(); @@ -209,30 +542,47 @@ function safePipeLog(level: LogLevel, ...args: unknown[]): void { prefix = `${timestamp} ${location}`; } + return { timestamp, location, useColor, prefix }; +} + +function safePipeLog(level: LogLevel, ...args: unknown[]): void { + const shouldConsoleLog = shouldLog(level); + + // Fast path: skip formatting entirely for debug entries that won't be + // logged to console or persisted. Avoids the expensive new Error().stack + // capture in getCallerLocation() on hot callsites. + if (level === "debug" && !shouldConsoleLog && !hasDebugSubscriber()) { + return; + } + + const { timestamp, location, useColor, prefix } = formatLogLine(level); + try { - if (level === "error") { - // Color the entire error message red if supported - if (useColor) { - console.error( - prefix, - ...args.map((arg) => (typeof arg === "string" ? chalkRed(arg) : arg)) - ); + if (shouldConsoleLog) { + if (level === "error") { + // Color the entire error message red if supported + if (useColor) { + console.error( + prefix, + ...args.map((arg) => (typeof arg === "string" ? chalkRed(arg) : arg)) + ); + } else { + console.error(prefix, ...args); + } + } else if (level === "warn") { + // Color the entire warning message yellow if supported + if (useColor) { + console.error( + prefix, + ...args.map((arg) => (typeof arg === "string" ? chalkYellow(arg) : arg)) + ); + } else { + console.error(prefix, ...args); + } } else { - console.error(prefix, ...args); + // info and debug go to stdout + console.log(prefix, ...args); } - } else if (level === "warn") { - // Color the entire warning message yellow if supported - if (useColor) { - console.error( - prefix, - ...args.map((arg) => (typeof arg === "string" ? chalkYellow(arg) : arg)) - ); - } else { - console.error(prefix, ...args); - } - } else { - // info and debug go to stdout - console.log(prefix, ...args); } } catch (error) { // Silently ignore EPIPE and other console errors @@ -252,6 +602,46 @@ function safePipeLog(level: LogLevel, ...args: unknown[]): void { } } } + + // Always persist error/warn/info to buffer+file. + // Debug entries only persist when console level includes debug + // or an Output tab subscriber has requested debug level. + const shouldPersist = level !== "debug" || shouldConsoleLog || hasDebugSubscriber(); + if (!shouldPersist) { + return; + } + + // Build a best-effort, pre-formatted single-line message for file/buffer. + // Note: console output behavior is intentionally unchanged. + const message = args + .map((arg) => { + if (typeof arg === "string") { + return arg; + } + if (arg instanceof Error) { + return arg.stack ?? arg.message; + } + try { + return JSON.stringify(arg); + } catch { + return String(arg); + } + }) + .join(" "); + + const formattedLine = `${prefix} ${message}`; + const cleanLine = stripAnsi(formattedLine); + + writeSink(`${cleanLine}\n`); + + pushLogEntry({ + timestamp: Date.now(), + level, + // Send just the log message, not the pre-formatted line (timestamp+location + // are already separate fields — no need to duplicate them in the message). + message, + location, + }); } /** diff --git a/src/node/services/logBuffer.test.ts b/src/node/services/logBuffer.test.ts new file mode 100644 index 0000000000..1d3399511b --- /dev/null +++ b/src/node/services/logBuffer.test.ts @@ -0,0 +1,123 @@ +import { beforeEach, describe, expect, test } from "bun:test"; + +import { MAX_LOG_ENTRIES } from "@/common/constants/ui"; +import { + clearLogEntries, + getEpoch, + getRecentLogs, + onLogEntry, + pushLogEntry, + subscribeLogFeed, + type BufferEvent, + type LogEntry, +} from "./logBuffer"; + +function createEntry(id: number): LogEntry { + return { + timestamp: id, + level: "info", + message: `entry-${id}`, + location: `src/test.ts:${id}`, + }; +} + +describe("logBuffer", () => { + beforeEach(() => { + clearLogEntries(); + }); + + test("pushLogEntry emits append events with the current epoch", () => { + const startEpoch = getEpoch(); + const received: BufferEvent[] = []; + + const unsubscribe = onLogEntry((event) => { + received.push(event); + }); + + const entry = createEntry(1); + pushLogEntry(entry); + + unsubscribe(); + + expect(received).toEqual([{ type: "append", epoch: startEpoch, entry }]); + }); + + test("subscribeLogFeed snapshots existing entries and streams new events", () => { + const existingEntry = createEntry(2); + pushLogEntry(existingEntry); + const currentEpoch = getEpoch(); + const received: BufferEvent[] = []; + + const { snapshot, unsubscribe } = subscribeLogFeed((event) => { + received.push(event); + }); + + expect(snapshot).toEqual({ epoch: currentEpoch, entries: [existingEntry] }); + + const nextEntry = createEntry(3); + pushLogEntry(nextEntry); + + unsubscribe(); + + expect(received).toEqual([{ type: "append", epoch: currentEpoch, entry: nextEntry }]); + }); + + test("clearLogEntries emits a reset event and increments epoch", () => { + const startEpoch = getEpoch(); + const received: BufferEvent[] = []; + + const unsubscribe = onLogEntry((event) => { + received.push(event); + }); + + clearLogEntries(); + + unsubscribe(); + + expect(received).toEqual([{ type: "reset", epoch: startEpoch + 1 }]); + expect(getEpoch()).toBe(startEpoch + 1); + }); + + test("unsubscribe stops receiving append and reset events", () => { + const received: BufferEvent[] = []; + + const unsubscribe = onLogEntry((event) => { + received.push(event); + }); + + unsubscribe(); + pushLogEntry(createEntry(2)); + clearLogEntries(); + + expect(received).toHaveLength(0); + }); + + test("getEpoch only advances on reset events", () => { + const initialEpoch = getEpoch(); + + pushLogEntry(createEntry(3)); + expect(getEpoch()).toBe(initialEpoch); + + clearLogEntries(); + const afterFirstReset = getEpoch(); + expect(afterFirstReset).toBe(initialEpoch + 1); + + clearLogEntries(); + expect(getEpoch()).toBe(afterFirstReset + 1); + }); + + test("retains only the most recent MAX_LOG_ENTRIES entries", () => { + const overflowCount = 5; + const totalEntries = MAX_LOG_ENTRIES + overflowCount; + + for (let id = 0; id < totalEntries; id += 1) { + pushLogEntry(createEntry(id)); + } + + const recent = getRecentLogs(); + + expect(recent).toHaveLength(MAX_LOG_ENTRIES); + expect(recent[0]?.message).toBe(`entry-${overflowCount}`); + expect(recent.at(-1)?.message).toBe(`entry-${totalEntries - 1}`); + }); +}); diff --git a/src/node/services/logBuffer.ts b/src/node/services/logBuffer.ts new file mode 100644 index 0000000000..b8a7beecaf --- /dev/null +++ b/src/node/services/logBuffer.ts @@ -0,0 +1,95 @@ +import { MAX_LOG_ENTRIES } from "@/common/constants/ui"; +import type { LogLevel } from "./log"; + +export interface LogEntry { + timestamp: number; + level: LogLevel; + message: string; + location: string; +} + +export type BufferEvent = + | { type: "append"; epoch: number; entry: LogEntry } + | { type: "reset"; epoch: number }; + +export interface LogFeedSnapshot { + epoch: number; + entries: LogEntry[]; +} + +const buffer: LogEntry[] = []; +let epoch = 0; + +type LogListener = (event: BufferEvent) => void; +const listeners = new Set(); +const subscriberLevels = new Map(); + +export function pushLogEntry(entry: LogEntry): void { + buffer.push(entry); + if (buffer.length > MAX_LOG_ENTRIES) { + buffer.shift(); + } + + const appendEvent: BufferEvent = { type: "append", epoch, entry }; + for (const listener of listeners) { + listener(appendEvent); + } +} + +export function getRecentLogs(): LogEntry[] { + return [...buffer]; +} + +export function getEpoch(): number { + return epoch; +} + +export function subscribeLogFeed( + listener: (event: BufferEvent) => void, + requestedLevel?: LogLevel +): { snapshot: LogFeedSnapshot; unsubscribe: () => void } { + listeners.add(listener); + if (requestedLevel) { + subscriberLevels.set(listener, requestedLevel); + } + + return { + snapshot: { epoch, entries: [...buffer] }, + unsubscribe: () => { + listeners.delete(listener); + subscriberLevels.delete(listener); + }, + }; +} + +export function clearLogEntries(): void { + buffer.length = 0; + epoch += 1; + + const resetEvent: BufferEvent = { type: "reset", epoch }; + for (const listener of listeners) { + listener(resetEvent); + } +} + +export function onLogEntry(listener: LogListener, requestedLevel?: LogLevel): () => void { + listeners.add(listener); + if (requestedLevel) { + subscriberLevels.set(listener, requestedLevel); + } + + return () => { + listeners.delete(listener); + subscriberLevels.delete(listener); + }; +} + +export function hasDebugSubscriber(): boolean { + for (const level of subscriberLevels.values()) { + if (level === "debug") { + return true; + } + } + + return false; +}