diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 727f89dc..1257c15b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -72,10 +72,10 @@ jobs: # ALSO produces the per-agent bundles AND `openclaw/dist/`. The # latter is gitignored, so it doesn't exist after a fresh # `actions/checkout` — and several bundle-scan tests under - # `claude-code/tests/skilify-session-start-injection.test.ts` read - # `openclaw/dist/index.js` and `openclaw/dist/skilify-worker.js` + # `claude-code/tests/skillify-session-start-injection.test.ts` read + # `openclaw/dist/index.js` and `openclaw/dist/skillify-worker.js` # directly. Without this rebuild they fail with ENOENT (see PR #98 - # — first CI run after the openclaw skilify wiring landed). + # — first CI run after the openclaw skillify wiring landed). run: npm run build - name: Run tests with coverage diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 80f61af7..eada41d5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -205,8 +205,8 @@ jobs: - name: Build bundles # Must run BEFORE the quality gate. `npm run ci` includes vitest, # and the bundle-scan tests under - # claude-code/tests/skilify-session-start-injection.test.ts read - # openclaw/dist/index.js + openclaw/dist/skilify-worker.js + # claude-code/tests/skillify-session-start-injection.test.ts read + # openclaw/dist/index.js + openclaw/dist/skillify-worker.js # directly. openclaw/dist/ is gitignored — it only exists after # `npm run build`. Without this step before the gate, vitest # fails with ENOENT and the publish aborts. Same root cause as diff --git a/RELEASE_CHECKLIST.md b/RELEASE_CHECKLIST.md index 80a0ff63..25557ab5 100644 --- a/RELEASE_CHECKLIST.md +++ b/RELEASE_CHECKLIST.md @@ -2,7 +2,7 @@ Before merging any new feature into `main` (and especially before cutting an npm release), walk through this list. Every item here corresponds to a real -gap that has slipped past us in past PRs — most recently the skilify +gap that has slipped past us in past PRs — most recently the skillify discovery + cherry-pick e2e gap on PR #98. The list is **the same regardless of feature size**. Don't skip sections @@ -62,7 +62,7 @@ For every new SQL-touching surface: - [ ] Run with a **missing table name** and confirm graceful fallback (no stack trace) - [ ] Run with an **invalid identifier** (`bad-name-with-dashes`) and confirm `sqlIdent` rejects it before any SQL fires -Reference: `/tmp/skilify-pull-e2e.mjs` (65/65 across 15 scenarios for `pull`). +Reference: `/tmp/skillify-pull-e2e.mjs` (65/65 across 15 scenarios for `pull`). Lives outside the repo by design — the e2e matrix is per-feature scratch. --- @@ -71,7 +71,7 @@ Lives outside the repo by design — the e2e matrix is per-feature scratch. Hivemind ships into **six** agent surfaces. A feature is not done until every applicable surface is covered. Skipping one because "it's the -weird one" is how skilify shipped to Pi and OpenClaw blind on PR #98 — +weird one" is how skillify shipped to Pi and OpenClaw blind on PR #98 — the prior version of this section listed only the four hook-driven agents and quietly excluded the other two. @@ -81,8 +81,8 @@ agents and quietly excluded the other two. | Codex | `src/hooks/codex/`, `codex/bundle/` | full | ✅ | ✅ session-start.ts | npm bin via `$CODEX_PLUGIN_ROOT` | | Cursor | `src/hooks/cursor/`, `cursor/bundle/` | session-start + end + capture + pre-tool-use | ✅ | ✅ session-start.ts | no slash command surface | | Hermes | `src/hooks/hermes/`, `hermes/bundle/` | analogous to cursor | ✅ | ✅ session-start.ts | gate uses OpenRouter (`hermes -z`), NOT claude | -| **Pi** | `pi/extension-source/hivemind.ts` (raw .ts, no bundle) | full (session_start, input, tool_result, message_end, session_shutdown) | ✅ via `pi/bundle/skilify-worker.js` spawned from session_shutdown | ✅ inline in `CONTEXT_PREAMBLE` | self-contained extension; pi compiles the .ts at load time | -| **OpenClaw**| `openclaw/src/index.ts`, `openclaw/skills/SKILL.md` | `before_prompt_build`, `before_agent_start`, `agent_end` | ✅ via `openclaw/dist/skilify-worker.js` spawned from `agent_end` | ✅ in `openclaw/skills/SKILL.md` | gateway plugin: captures sessions to the same `sessions` table; bypasses esbuild's `child_process` stub via `createRequire(import.meta.url)` | +| **Pi** | `pi/extension-source/hivemind.ts` (raw .ts, no bundle) | full (session_start, input, tool_result, message_end, session_shutdown) | ✅ via `pi/bundle/skillify-worker.js` spawned from session_shutdown | ✅ inline in `CONTEXT_PREAMBLE` | self-contained extension; pi compiles the .ts at load time | +| **OpenClaw**| `openclaw/src/index.ts`, `openclaw/skills/SKILL.md` | `before_prompt_build`, `before_agent_start`, `agent_end` | ✅ via `openclaw/dist/skillify-worker.js` spawned from `agent_end` | ✅ in `openclaw/skills/SKILL.md` | gateway plugin: captures sessions to the same `sessions` table; bypasses esbuild's `child_process` stub via `createRequire(import.meta.url)` | **Mining (worker firing on session end)** applies wherever the agent captures sessions — which is all six. Earlier drafts of this section @@ -90,8 +90,8 @@ incorrectly described OpenClaw mining as "N/A by design"; OpenClaw does in fact capture sessions to the same `sessions` Deeplake table (see `openclaw/src/index.ts:903` agent_end hook), and the worker can mine them just like any other agent. The wiring lives at -`openclaw/src/index.ts:spawnOpenclawSkilifyWorker` and fires from the -agent_end hook after each capture. Bundle: `openclaw/dist/skilify-worker.js` +`openclaw/src/index.ts:spawnOpenclawSkillifyWorker` and fires from the +agent_end hook after each capture. Bundle: `openclaw/dist/skillify-worker.js` (separate esbuild entry, isolated from the gateway's child_process stub via `createRequire(import.meta.url)`). E2E verified live against Deeplake plugin_test_1/test1 sandbox: 6/6 PASS, real skill produced @@ -122,7 +122,7 @@ For every feature that runs inside a hook (worker, capture, session-end): - [ ] Verify the worker / hook actually fires for every agent (check Deeplake table for the inserted row, not just "no error") - [ ] If the feature uses async hooks (Stop / SessionEnd), check both: parent process exits before async work completes is a real risk and has bitten us before (`claude -p` does not block on Stop hook) -Reference: `/tmp/skilify-e2e-matrix.mjs` exercised gate CREATE / MERGE / SKIP across the four hook-driven agents — but did NOT cover `pull` (gap closed by the dedicated pull e2e in Section 2), and did NOT cover Pi or OpenClaw at all (gap closed by the Pi inject + OpenClaw SKILL.md additions in commit `9d74db6`). +Reference: `/tmp/skillify-e2e-matrix.mjs` exercised gate CREATE / MERGE / SKIP across the four hook-driven agents — but did NOT cover `pull` (gap closed by the dedicated pull e2e in Section 2), and did NOT cover Pi or OpenClaw at all (gap closed by the Pi inject + OpenClaw SKILL.md additions in commit `9d74db6`). --- @@ -138,8 +138,8 @@ layers, mirroring the existing `auth-login` family: - [ ] **Slash command (OPTIONAL — decide explicitly)** — `claude-code/commands/.md` and `codex/commands/.md` register `/hivemind:` for user-typed invocation. Only add when there is a clear UX reason (e.g. parity with `/hivemind:login` for a top-level user action). If you do add one: - Use `node "${CLAUDE_PLUGIN_ROOT}/bundle/cli.js" $ARGUMENTS` (CC) and `node "$CODEX_PLUGIN_ROOT/bundle/cli.js" $ARGUMENTS` (Codex). **Never** the bare-binary form `hivemind $ARGUMENTS` — it assumes `npm i -g @deeplake/hivemind` which marketplace-installed users do not have, so the slash silently breaks - Cursor and Hermes do not support slash commands at all — those agents go through the CLI or natural-language inject only. Don't write slash commands you cannot deliver across all four agents unless the asymmetry is intentional - - If the agent already has full coverage via SessionStart inject + `hivemind ` CLI, the slash is pure UX and can be skipped (skilify chose to skip it on PR #98 — agent autonomous discovery + CLI cover the ground) -- [ ] **Bundle-scan guard test** — a vitest scans the SHIPPED `*/bundle/session-start.js` files and asserts the new section + the most-important flags are present. Protects against silent regressions on rebuild (see `claude-code/tests/skilify-session-start-injection.test.ts`) + - If the agent already has full coverage via SessionStart inject + `hivemind ` CLI, the slash is pure UX and can be skipped (skillify chose to skip it on PR #98 — agent autonomous discovery + CLI cover the ground) +- [ ] **Bundle-scan guard test** — a vitest scans the SHIPPED `*/bundle/session-start.js` files and asserts the new section + the most-important flags are present. Protects against silent regressions on rebuild (see `claude-code/tests/skillify-session-start-injection.test.ts`) - [ ] Optional: dedicated SKILL.md if the feature warrants a skill (Claude Code skills auto-load on description match) If the feature is invocable but undiscoverable, no agent will surface it @@ -165,7 +165,7 @@ into SQL / shell / filesystem: ## 6. Backend quirks (Deeplake-specific) -- [ ] **UPDATE coalescing**: two rapid UPDATEs on the same row drop one silently (`row_count: 0` even though API returns 200 OK). Solution: single combined UPDATE per RMW, or append-only INSERT with `ORDER BY version DESC LIMIT 1` reads (skilify pattern) +- [ ] **UPDATE coalescing**: two rapid UPDATEs on the same row drop one silently (`row_count: 0` even though API returns 200 OK). Solution: single combined UPDATE per RMW, or append-only INSERT with `ORDER BY version DESC LIMIT 1` reads (skillify pattern) - [ ] **Lazy table creation**: first INSERT against a missing table should `CREATE TABLE IF NOT EXISTS` then retry. Test path: drop the table, run the feature, confirm it self-heals - [ ] **Missing-table error matching**: use the project's `isMissingTableError` regex. Do NOT match the bare phrase "does not exist" — that also fires for column errors - [ ] **Lookup-index creation**: idempotent `CREATE INDEX IF NOT EXISTS` calls, but tolerate the duplicate-key warning that fires when two parallel sessions race to create the same index @@ -196,8 +196,8 @@ For every shipped artifact under `*/bundle/`: Examples in tree: - `claude-code/tests/wiki-worker-upload-sql.test.ts` — rejects standalone `UPDATE … SET description = …` -- `claude-code/tests/skilify-bundle-scan.test.ts` — per-agent skilify-worker presence -- `claude-code/tests/skilify-session-start-injection.test.ts` — per-agent SKILLS injection +- `claude-code/tests/skillify-bundle-scan.test.ts` — per-agent skillify-worker presence +- `claude-code/tests/skillify-session-start-injection.test.ts` — per-agent SKILLS injection - `claude-code/tests/periodic-summary-bundles.test.ts` — lock-acquire wiring + flag rename --- @@ -228,16 +228,16 @@ Examples in tree: --- -## What we missed on PR #98 (skilify), retrospectively +## What we missed on PR #98 (skillify), retrospectively -So this checklist is grounded, not theoretical. On the original skilify PR +So this checklist is grounded, not theoretical. On the original skillify PR we passed every section EXCEPT: - **Section 2** — only the gate write path was e2e-tested; `pull --user`, `pull --users`, `pull --all-users`, `pull --to global`, `pull --dry-run`, `pull --force`, positional name, SQL injection, missing table, invalid identifier all relied on mocked unit tests until we did the dedicated pull e2e (65 assertions across 15 scenarios) -- **Section 4** — the SessionStart injection was never extended for skilify, even though `auth-login` already had its parallel section. All four agents shipped without any way to discover `hivemind skilify pull --user X` or its variants. Closed by commits `64b25eb` + `e5c5987`. -- **Section 4 (slash command)** — initial slash command files (`claude-code/commands/skilify.md`, `codex/commands/skilify.md`) used the bare-binary form `hivemind skilify $ARGUMENTS`, which silently fails for marketplace-installed users (no global `hivemind` bin). After deciding the SessionStart inject + CLI cover the ground, both files were removed rather than fixed — keeping the surface honest across the 4 agents (Cursor and Hermes never had slash commands anyway). Reviewer Kaghni surfaced this on PR comment 3196839552. +- **Section 4** — the SessionStart injection was never extended for skillify, even though `auth-login` already had its parallel section. All four agents shipped without any way to discover `hivemind skillify pull --user X` or its variants. Closed by commits `64b25eb` + `e5c5987`. +- **Section 4 (slash command)** — initial slash command files (`claude-code/commands/skillify.md`, `codex/commands/skillify.md`) used the bare-binary form `hivemind skillify $ARGUMENTS`, which silently fails for marketplace-installed users (no global `hivemind` bin). After deciding the SessionStart inject + CLI cover the ground, both files were removed rather than fixed — keeping the surface honest across the 4 agents (Cursor and Hermes never had slash commands anyway). Reviewer Kaghni surfaced this on PR comment 3196839552. - **Section 3 (per-agent matrix scope)** — the matrix in this checklist initially listed only **four** agents (CC / Codex / Cursor / Hermes). Pi has the full session lifecycle (`session_start` … `session_shutdown`) via its extension API and was simply forgotten. OpenClaw has a different model (gateway, not session runner) but its agent-facing SKILL.md was also untouched. Both were closed when the user asked "Abbiamo coperto anche OpenClaw e Pi?" and forced the surface to grow from 4 to 6. The matrix table in Section 3 now explicitly enumerates all six. -- **Section 3 (mistake about OpenClaw mining being "N/A by design")** — the first revision of the matrix table claimed OpenClaw mining was "N/A by design — gateway, no sessions to mine." This was wrong: `openclaw/src/index.ts:903` hooks `agent_end` and writes captured messages to the same `sessions` Deeplake table CC/Codex/Cursor/Hermes/Pi share. The user caught this by asking "how are we saving the sessions with openclaw?" — once forced to read the source, the gap was obvious. Fixed by adding `spawnOpenclawSkilifyWorker` and a sibling `openclaw/dist/skilify-worker.js` bundle (separate esbuild entry, runtime spawn via `createRequire(import.meta.url)` to bypass the main bundle's `child_process` stub). E2E verified: real skill produced in 20.7s against the live Deeplake sandbox. +- **Section 3 (mistake about OpenClaw mining being "N/A by design")** — the first revision of the matrix table claimed OpenClaw mining was "N/A by design — gateway, no sessions to mine." This was wrong: `openclaw/src/index.ts:903` hooks `agent_end` and writes captured messages to the same `sessions` Deeplake table CC/Codex/Cursor/Hermes/Pi share. The user caught this by asking "how are we saving the sessions with openclaw?" — once forced to read the source, the gap was obvious. Fixed by adding `spawnOpenclawSkillifyWorker` and a sibling `openclaw/dist/skillify-worker.js` bundle (separate esbuild entry, runtime spawn via `createRequire(import.meta.url)` to bypass the main bundle's `child_process` stub). E2E verified: real skill produced in 20.7s against the live Deeplake sandbox. Five gaps caught only because the user asked the right cynical questions ("ha funzionato tutto davvero?" / "will cc codex etc know?" / diff --git a/bundle/cli.js b/bundle/cli.js index b3ec2d85..7de28152 100755 --- a/bundle/cli.js +++ b/bundle/cli.js @@ -3352,7 +3352,7 @@ var EXTENSION_PATH = join8(EXTENSIONS_DIR, "hivemind.ts"); var VERSION_DIR = join8(PI_AGENT_DIR, ".hivemind"); var WIKI_WORKER_DIR = join8(PI_AGENT_DIR, "hivemind"); var WIKI_WORKER_PATH = join8(WIKI_WORKER_DIR, "wiki-worker.js"); -var SKILIFY_WORKER_PATH = join8(WIKI_WORKER_DIR, "skilify-worker.js"); +var SKILLIFY_WORKER_PATH = join8(WIKI_WORKER_DIR, "skillify-worker.js"); var AUTOPULL_WORKER_PATH = join8(WIKI_WORKER_DIR, "autopull-worker.js"); var HIVEMIND_BLOCK_START = ""; var HIVEMIND_BLOCK_END = ""; @@ -3438,10 +3438,10 @@ function installPi() { ensureDir(WIKI_WORKER_DIR); copyFileSync2(srcWorker, WIKI_WORKER_PATH); } - const srcSkilifyWorker = join8(pkgRoot(), "pi", "bundle", "skilify-worker.js"); - if (existsSync7(srcSkilifyWorker)) { + const srcSkillifyWorker = join8(pkgRoot(), "pi", "bundle", "skillify-worker.js"); + if (existsSync7(srcSkillifyWorker)) { ensureDir(WIKI_WORKER_DIR); - copyFileSync2(srcSkilifyWorker, SKILIFY_WORKER_PATH); + copyFileSync2(srcSkillifyWorker, SKILLIFY_WORKER_PATH); } const srcAutopullWorker = join8(pkgRoot(), "pi", "bundle", "autopull-worker.js"); if (existsSync7(srcAutopullWorker)) { @@ -3455,8 +3455,8 @@ function installPi() { if (existsSync7(WIKI_WORKER_PATH)) { log(` pi wiki-worker installed -> ${WIKI_WORKER_PATH}`); } - if (existsSync7(SKILIFY_WORKER_PATH)) { - log(` pi skilify-worker installed -> ${SKILIFY_WORKER_PATH}`); + if (existsSync7(SKILLIFY_WORKER_PATH)) { + log(` pi skillify-worker installed -> ${SKILLIFY_WORKER_PATH}`); } if (existsSync7(AUTOPULL_WORKER_PATH)) { log(` pi autopull-worker installed -> ${AUTOPULL_WORKER_PATH}`); @@ -4718,20 +4718,53 @@ if (process.argv[1] && process.argv[1].endsWith("auth-login.js")) { }); } -// dist/src/commands/skilify.js -import { readdirSync as readdirSync4, existsSync as existsSync18, readFileSync as readFileSync13, mkdirSync as mkdirSync8, renameSync as renameSync3 } from "node:fs"; -import { homedir as homedir11 } from "node:os"; -import { dirname as dirname4, join as join21 } from "node:path"; +// dist/src/commands/skillify.js +import { readdirSync as readdirSync4, existsSync as existsSync19, readFileSync as readFileSync13, mkdirSync as mkdirSync8, renameSync as renameSync4 } from "node:fs"; +import { homedir as homedir12 } from "node:os"; +import { dirname as dirname4, join as join22 } from "node:path"; + +// dist/src/skillify/scope-config.js +import { existsSync as existsSync13, mkdirSync as mkdirSync4, readFileSync as readFileSync9, writeFileSync as writeFileSync6 } from "node:fs"; +import { homedir as homedir6 } from "node:os"; +import { join as join16 } from "node:path"; -// dist/src/skilify/scope-config.js -import { existsSync as existsSync12, mkdirSync as mkdirSync4, readFileSync as readFileSync9, writeFileSync as writeFileSync6 } from "node:fs"; +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync12, renameSync } from "node:fs"; import { homedir as homedir5 } from "node:os"; import { join as join15 } from "node:path"; -var STATE_DIR = join15(homedir5(), ".deeplake", "state", "skilify"); -var CONFIG_PATH2 = join15(STATE_DIR, "config.json"); +var dlog = (msg) => log2("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join15(homedir5(), ".deeplake", "state"); + const legacy = join15(root, "skilify"); + const current = join15(root, "skillify"); + if (!existsSync12(legacy)) + return; + if (existsSync12(current)) + return; + try { + renameSync(legacy, current); + dlog(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/scope-config.js +var STATE_DIR = join16(homedir6(), ".deeplake", "state", "skillify"); +var CONFIG_PATH2 = join16(STATE_DIR, "config.json"); var DEFAULT = { scope: "me", team: [], install: "project" }; function loadScopeConfig() { - if (!existsSync12(CONFIG_PATH2)) + migrateLegacyStateDir(); + if (!existsSync13(CONFIG_PATH2)) return DEFAULT; try { const raw = JSON.parse(readFileSync9(CONFIG_PATH2, "utf-8")); @@ -4744,19 +4777,20 @@ function loadScopeConfig() { } } function saveScopeConfig(cfg) { + migrateLegacyStateDir(); mkdirSync4(STATE_DIR, { recursive: true }); writeFileSync6(CONFIG_PATH2, JSON.stringify(cfg, null, 2)); } -// dist/src/skilify/pull.js -import { existsSync as existsSync16, readFileSync as readFileSync12, writeFileSync as writeFileSync9, mkdirSync as mkdirSync7, renameSync as renameSync2, lstatSync as lstatSync4, readlinkSync as readlinkSync2, symlinkSync as symlinkSync2, unlinkSync as unlinkSync8 } from "node:fs"; -import { homedir as homedir9 } from "node:os"; -import { dirname as dirname3, join as join19 } from "node:path"; +// dist/src/skillify/pull.js +import { existsSync as existsSync17, readFileSync as readFileSync12, writeFileSync as writeFileSync9, mkdirSync as mkdirSync7, renameSync as renameSync3, lstatSync as lstatSync4, readlinkSync as readlinkSync2, symlinkSync as symlinkSync2, unlinkSync as unlinkSync8 } from "node:fs"; +import { homedir as homedir10 } from "node:os"; +import { dirname as dirname3, join as join20 } from "node:path"; -// dist/src/skilify/skill-writer.js -import { existsSync as existsSync13, mkdirSync as mkdirSync5, readFileSync as readFileSync10, readdirSync as readdirSync2, statSync as statSync2, writeFileSync as writeFileSync7 } from "node:fs"; -import { homedir as homedir6 } from "node:os"; -import { join as join16 } from "node:path"; +// dist/src/skillify/skill-writer.js +import { existsSync as existsSync14, mkdirSync as mkdirSync5, readFileSync as readFileSync10, readdirSync as readdirSync2, statSync as statSync2, writeFileSync as writeFileSync7 } from "node:fs"; +import { homedir as homedir7 } from "node:os"; +import { join as join17 } from "node:path"; function assertValidSkillName(name) { if (typeof name !== "string" || name.length === 0) { throw new Error(`invalid skill name: empty or non-string`); @@ -4814,18 +4848,19 @@ function parseFrontmatter(text) { return { fm, body }; } -// dist/src/skilify/manifest.js -import { existsSync as existsSync14, lstatSync as lstatSync3, mkdirSync as mkdirSync6, readFileSync as readFileSync11, renameSync, unlinkSync as unlinkSync7, writeFileSync as writeFileSync8 } from "node:fs"; -import { homedir as homedir7 } from "node:os"; -import { dirname as dirname2, join as join17 } from "node:path"; +// dist/src/skillify/manifest.js +import { existsSync as existsSync15, lstatSync as lstatSync3, mkdirSync as mkdirSync6, readFileSync as readFileSync11, renameSync as renameSync2, unlinkSync as unlinkSync7, writeFileSync as writeFileSync8 } from "node:fs"; +import { homedir as homedir8 } from "node:os"; +import { dirname as dirname2, join as join18 } from "node:path"; function emptyManifest() { return { version: 1, entries: [] }; } function manifestPath() { - return join17(homedir7(), ".deeplake", "state", "skilify", "pulled.json"); + return join18(homedir8(), ".deeplake", "state", "skillify", "pulled.json"); } function loadManifest(path = manifestPath()) { - if (!existsSync14(path)) + migrateLegacyStateDir(); + if (!existsSync15(path)) return emptyManifest(); let raw; try { @@ -4875,10 +4910,11 @@ function loadManifest(path = manifestPath()) { } } function saveManifest(m, path = manifestPath()) { + migrateLegacyStateDir(); mkdirSync6(dirname2(path), { recursive: true }); const tmp = `${path}.tmp`; writeFileSync8(tmp, JSON.stringify(m, null, 2) + "\n", { mode: 384 }); - renameSync(tmp, path); + renameSync2(tmp, path); } function recordPull(entry, path = manifestPath()) { const m = loadManifest(path); @@ -4920,7 +4956,7 @@ function pruneOrphanedEntries(path = manifestPath()) { const live = []; let pruned = 0; for (const e of m.entries) { - if (existsSync14(join17(e.installRoot, e.dirName))) { + if (existsSync15(join18(e.installRoot, e.dirName))) { live.push(e); continue; } @@ -4932,31 +4968,31 @@ function pruneOrphanedEntries(path = manifestPath()) { return pruned; } -// dist/src/skilify/agent-roots.js -import { existsSync as existsSync15 } from "node:fs"; -import { homedir as homedir8 } from "node:os"; -import { join as join18 } from "node:path"; +// dist/src/skillify/agent-roots.js +import { existsSync as existsSync16 } from "node:fs"; +import { homedir as homedir9 } from "node:os"; +import { join as join19 } from "node:path"; function resolveDetected(home) { const out = []; - const codexInstalled = existsSync15(join18(home, ".codex")); - const piInstalled = existsSync15(join18(home, ".pi", "agent")); - const hermesInstalled = existsSync15(join18(home, ".hermes")); + const codexInstalled = existsSync16(join19(home, ".codex")); + const piInstalled = existsSync16(join19(home, ".pi", "agent")); + const hermesInstalled = existsSync16(join19(home, ".hermes")); if (codexInstalled || piInstalled) { - out.push(join18(home, ".agents", "skills")); + out.push(join19(home, ".agents", "skills")); } if (hermesInstalled) { - out.push(join18(home, ".hermes", "skills")); + out.push(join19(home, ".hermes", "skills")); } if (piInstalled) { - out.push(join18(home, ".pi", "agent", "skills")); + out.push(join19(home, ".pi", "agent", "skills")); } return out; } -function detectAgentSkillsRoots(canonicalRoot, home = homedir8()) { +function detectAgentSkillsRoots(canonicalRoot, home = homedir9()) { return resolveDetected(home).filter((p) => p !== canonicalRoot); } -// dist/src/skilify/pull.js +// dist/src/skillify/pull.js function assertValidAuthor(author) { if (!author) throw new Error("author is empty"); @@ -4988,15 +5024,15 @@ function isMissingTableError(message) { } function resolvePullDestination(install, cwd) { if (install === "global") - return join19(homedir9(), ".claude", "skills"); + return join20(homedir10(), ".claude", "skills"); if (!cwd) throw new Error("install=project requires a cwd"); - return join19(cwd, ".claude", "skills"); + return join20(cwd, ".claude", "skills"); } function fanOutSymlinks(canonicalDir, dirName, agentRoots) { const out = []; for (const root of agentRoots) { - const link = join19(root, dirName); + const link = join20(root, dirName); let existing; try { existing = lstatSync4(link); @@ -5039,8 +5075,8 @@ function backfillSymlinks(installRoot) { return; const detected = detectAgentSkillsRoots(installRoot); for (const entry of entries) { - const canonical = join19(entry.installRoot, entry.dirName); - if (!existsSync16(canonical)) + const canonical = join20(entry.installRoot, entry.dirName); + if (!existsSync17(canonical)) continue; const fresh = fanOutSymlinks(canonical, entry.dirName, detected); if (sameSorted(fresh, entry.symlinks)) @@ -5125,7 +5161,7 @@ function renderFrontmatter(fm) { return lines.join("\n"); } function readLocalVersion(path) { - if (!existsSync16(path)) + if (!existsSync17(path)) return null; try { const text = readFileSync12(path, "utf-8"); @@ -5214,8 +5250,8 @@ async function runPull(opts) { summary.skipped++; continue; } - const skillDir = join19(root, dirName); - const skillFile = join19(skillDir, "SKILL.md"); + const skillDir = join20(root, dirName); + const skillFile = join20(skillDir, "SKILL.md"); const remoteVersion = Number(row.version ?? 1); const localVersion = readLocalVersion(skillFile); const action = decideAction({ @@ -5227,9 +5263,9 @@ async function runPull(opts) { let manifestError; if (action === "wrote") { mkdirSync7(skillDir, { recursive: true }); - if (existsSync16(skillFile)) { + if (existsSync17(skillFile)) { try { - renameSync2(skillFile, `${skillFile}.bak`); + renameSync3(skillFile, `${skillFile}.bak`); } catch { } } @@ -5274,16 +5310,16 @@ async function runPull(opts) { return summary; } -// dist/src/skilify/unpull.js -import { existsSync as existsSync17, readdirSync as readdirSync3, rmSync as rmSync5, statSync as statSync3 } from "node:fs"; -import { homedir as homedir10 } from "node:os"; -import { join as join20 } from "node:path"; +// dist/src/skillify/unpull.js +import { existsSync as existsSync18, readdirSync as readdirSync3, rmSync as rmSync5, statSync as statSync3 } from "node:fs"; +import { homedir as homedir11 } from "node:os"; +import { join as join21 } from "node:path"; function resolveUnpullRoot(install, cwd) { if (install === "global") - return join20(homedir10(), ".claude", "skills"); + return join21(homedir11(), ".claude", "skills"); if (!cwd) throw new Error("cwd required when install === 'project'"); - return join20(cwd, ".claude", "skills"); + return join21(cwd, ".claude", "skills"); } function runUnpull(opts) { const root = resolveUnpullRoot(opts.install, opts.cwd); @@ -5306,8 +5342,8 @@ function runUnpull(opts) { const entries = entriesForRoot(manifest, opts.install, root); for (const entry of entries) { summary.scanned++; - const path = join20(root, entry.dirName); - if (!existsSync17(path)) { + const path = join21(root, entry.dirName); + if (!existsSync18(path)) { if (!opts.dryRun) { unlinkSymlinks(entry.symlinks); removePullEntry(opts.install, entry.installRoot, entry.dirName); @@ -5360,12 +5396,12 @@ function runUnpull(opts) { } summary.entries.push(result); } - if (existsSync17(root) && (opts.all || opts.legacyCleanup)) { + if (existsSync18(root) && (opts.all || opts.legacyCleanup)) { const manifestDirNames = new Set(entries.map((e) => e.dirName)); for (const dirName of readdirSync3(root)) { if (manifestDirNames.has(dirName)) continue; - const path = join20(root, dirName); + const path = join21(root, dirName); let st; try { st = statSync3(path); @@ -5442,9 +5478,9 @@ function decideTargetForManifestEntry(entry, opts, userFilter, haveUserFilter) { return { shouldRemove: true }; } -// dist/src/commands/skilify.js +// dist/src/commands/skillify.js function stateDir() { - return join21(homedir11(), ".deeplake", "state", "skilify"); + return join22(homedir12(), ".deeplake", "state", "skillify"); } function showStatus() { const cfg = loadScopeConfig(); @@ -5452,11 +5488,11 @@ function showStatus() { console.log(`team: ${cfg.team.length === 0 ? "(empty)" : cfg.team.join(", ")}`); console.log(`install: ${cfg.install} (${cfg.install === "global" ? "~/.claude/skills/" : "/.claude/skills/"})`); const dir = stateDir(); - if (!existsSync18(dir)) { + if (!existsSync19(dir)) { console.log(`state: (no projects tracked yet)`); return; } - const files = readdirSync4(dir).filter((f) => f.endsWith(".json") && f !== "config.json" && f !== "pulled.json"); + const files = readdirSync4(dir).filter((f) => f.endsWith(".json") && f !== "config.json" && f !== "pulled.json" && f !== "autopull-last-run.json"); if (files.length === 0) { console.log(`state: (no projects tracked yet)`); return; @@ -5464,9 +5500,10 @@ function showStatus() { console.log(`state: ${files.length} project(s) tracked`); for (const f of files) { try { - const s = JSON.parse(readFileSync13(join21(dir, f), "utf-8")); - const skills = s.skillsGenerated.length === 0 ? "none" : s.skillsGenerated.join(", "); - console.log(` - ${s.project} (counter=${s.counter}, last=${s.lastDate ?? "never"}, skills=${skills})`); + const s = JSON.parse(readFileSync13(join22(dir, f), "utf-8")); + const last = typeof s.updatedAt === "number" ? new Date(s.updatedAt).toISOString() : s.lastDate ?? "never"; + const skills = Array.isArray(s.skillsGenerated) && s.skillsGenerated.length > 0 ? s.skillsGenerated.join(", ") : "none"; + console.log(` - ${s.project} (counter=${s.counter}, last=${last}, skills=${skills})`); } catch { } } @@ -5480,7 +5517,7 @@ function setScope(scope) { saveScopeConfig({ ...cfg, scope }); console.log(`Scope set to '${scope}'.`); if (scope === "team" && cfg.team.length === 0) { - console.log(`Note: team list is empty. Use 'hivemind skilify team add ' to populate it.`); + console.log(`Note: team list is empty. Use 'hivemind skillify team add ' to populate it.`); } } function setInstall(loc) { @@ -5490,31 +5527,31 @@ function setInstall(loc) { } const cfg = loadScopeConfig(); saveScopeConfig({ ...cfg, install: loc }); - const path = loc === "global" ? join21(homedir11(), ".claude", "skills") : "/.claude/skills"; + const path = loc === "global" ? join22(homedir12(), ".claude", "skills") : "/.claude/skills"; console.log(`Install location set to '${loc}'. New skills will be written to ${path}//SKILL.md.`); } function promoteSkill(name, cwd) { if (!name) { - console.error("Usage: hivemind skilify promote "); + console.error("Usage: hivemind skillify promote "); process.exit(1); } - const projectPath = join21(cwd, ".claude", "skills", name); - const globalPath = join21(homedir11(), ".claude", "skills", name); - if (!existsSync18(join21(projectPath, "SKILL.md"))) { + const projectPath = join22(cwd, ".claude", "skills", name); + const globalPath = join22(homedir12(), ".claude", "skills", name); + if (!existsSync19(join22(projectPath, "SKILL.md"))) { console.error(`Skill '${name}' not found at ${projectPath}/SKILL.md`); process.exit(1); } - if (existsSync18(join21(globalPath, "SKILL.md"))) { + if (existsSync19(join22(globalPath, "SKILL.md"))) { console.error(`Skill '${name}' already exists at ${globalPath}/SKILL.md \u2014 refusing to overwrite. Remove it first or rename the project skill.`); process.exit(1); } mkdirSync8(dirname4(globalPath), { recursive: true }); - renameSync3(projectPath, globalPath); + renameSync4(projectPath, globalPath); console.log(`Promoted '${name}' from ${projectPath} \u2192 ${globalPath}.`); } function teamAdd(name) { if (!name) { - console.error("Usage: hivemind skilify team add "); + console.error("Usage: hivemind skillify team add "); process.exit(1); } const cfg = loadScopeConfig(); @@ -5528,7 +5565,7 @@ function teamAdd(name) { } function teamRemove(name) { if (!name) { - console.error("Usage: hivemind skilify team remove "); + console.error("Usage: hivemind skillify team remove "); process.exit(1); } const cfg = loadScopeConfig(); @@ -5551,14 +5588,14 @@ function teamList() { } function usage() { console.log("Usage:"); - console.log(" hivemind skilify show current scope, team, install, and per-project state"); - console.log(" hivemind skilify scope set the mining scope"); - console.log(" hivemind skilify install set where new skills are written"); - console.log(" hivemind skilify promote move a project skill to the global location"); - console.log(" hivemind skilify team add add a username to the team list"); - console.log(" hivemind skilify team remove remove a username from the team list"); - console.log(" hivemind skilify team list list current team members"); - console.log(" hivemind skilify pull [skill-name] [opts] fetch skills from Deeplake to local FS"); + console.log(" hivemind skillify show current scope, team, install, and per-project state"); + console.log(" hivemind skillify scope set the mining scope"); + console.log(" hivemind skillify install set where new skills are written"); + console.log(" hivemind skillify promote move a project skill to the global location"); + console.log(" hivemind skillify team add add a username to the team list"); + console.log(" hivemind skillify team remove remove a username from the team list"); + console.log(" hivemind skillify team list list current team members"); + console.log(" hivemind skillify pull [skill-name] [opts] fetch skills from Deeplake to local FS"); console.log(" Options for pull:"); console.log(" --to destination (default: global)"); console.log(" --user only skills authored by this user"); @@ -5566,7 +5603,7 @@ function usage() { console.log(" --all-users all authors (default \u2014 equivalent to no filter)"); console.log(" --dry-run show what would be written, don't touch disk"); console.log(" --force overwrite even when local version >= remote"); - console.log(" hivemind skilify unpull [opts] remove skills previously installed by pull"); + console.log(" hivemind skillify unpull [opts] remove skills previously installed by pull"); console.log(" Options for unpull:"); console.log(" --to where to scan (default: global)"); console.log(" --user only entries authored by this user"); @@ -5575,7 +5612,7 @@ function usage() { console.log(" --dry-run show what would be removed"); console.log(" --all also remove flat-layout (locally-mined) entries"); console.log(" --legacy-cleanup also remove pre-`--author`-layout legacy `/` dirs"); - console.log(" hivemind skilify status show per-project state"); + console.log(" hivemind skillify status show per-project state"); } function takeFlagValue(args, flag) { const idx = args.indexOf(flag); @@ -5639,7 +5676,7 @@ async function pullSkills(args) { console.error(`pull failed: ${e?.message ?? e}`); process.exit(1); } - const dest = toRaw === "global" ? join21(homedir11(), ".claude", "skills") : `${process.cwd()}/.claude/skills`; + const dest = toRaw === "global" ? join22(homedir12(), ".claude", "skills") : `${process.cwd()}/.claude/skills`; const filterDesc = users.length === 0 ? "all users" : users.join(", "); console.log(`Destination: ${dest}`); console.log(`Filter: ${filterDesc}${skillName ? ` \xB7 skill='${skillName}'` : ""}${dryRun ? " \xB7 dry-run" : ""}${force ? " \xB7 force" : ""}`); @@ -5689,7 +5726,7 @@ async function unpullSkills(args) { all, legacyCleanup }); - const dest = toRaw === "global" ? join21(homedir11(), ".claude", "skills") : `${process.cwd()}/.claude/skills`; + const dest = toRaw === "global" ? join22(homedir12(), ".claude", "skills") : `${process.cwd()}/.claude/skills`; const filterParts = []; if (users.length > 0) filterParts.push(`users=${users.join(",")}`); @@ -5714,7 +5751,7 @@ async function unpullSkills(args) { const prunedNote = summary.manifestPruned > 0 ? `, ${summary.manifestPruned} manifest-pruned` : ""; console.log(`Result: ${summary.removed} removed, ${summary.wouldRemove} dry-run, ${summary.kept} kept${prunedNote}.`); } -function runSkilifyCommand(args) { +function runSkillifyCommand(args) { const sub = args[0]; if (!sub || sub === "status") { showStatus(); @@ -5761,30 +5798,30 @@ function runSkilifyCommand(args) { teamList(); return; } - console.error("Usage: hivemind skilify team [name]"); + console.error("Usage: hivemind skillify team [name]"); process.exit(1); } if (sub === "--help" || sub === "-h" || sub === "help") { usage(); return; } - console.error(`Unknown skilify subcommand: ${sub}`); + console.error(`Unknown skillify subcommand: ${sub}`); usage(); process.exit(1); } -if (process.argv[1] && process.argv[1].endsWith("skilify.js")) { - runSkilifyCommand(process.argv.slice(2)); +if (process.argv[1] && process.argv[1].endsWith("skillify.js")) { + runSkillifyCommand(process.argv.slice(2)); } // dist/src/cli/update.js import { execFileSync as execFileSync4 } from "node:child_process"; -import { existsSync as existsSync19, readFileSync as readFileSync15, realpathSync } from "node:fs"; +import { existsSync as existsSync20, readFileSync as readFileSync15, realpathSync } from "node:fs"; import { dirname as dirname6, sep } from "node:path"; import { fileURLToPath as fileURLToPath2 } from "node:url"; // dist/src/utils/version-check.js import { readFileSync as readFileSync14 } from "node:fs"; -import { dirname as dirname5, join as join22 } from "node:path"; +import { dirname as dirname5, join as join23 } from "node:path"; function isNewer(latest, current) { const parse = (v) => v.split(".").map(Number); const [la, lb, lc] = parse(latest); @@ -5829,7 +5866,7 @@ function detectInstallKind(argv1) { } let gitDir = installDir; for (let i = 0; i < 6; i++) { - if (existsSync19(`${gitDir}${sep}.git`)) { + if (existsSync20(`${gitDir}${sep}.git`)) { return { kind: "local-dev", installDir }; } const parent = dirname6(gitDir); @@ -5986,8 +6023,8 @@ Semantic search (embeddings): to run "embeddings install" automatically after installing the agent(s). Skill management (mine + share reusable Claude skills across the org): - hivemind skilify Show scope, team, install, and per-project state. - hivemind skilify pull [skill-name] Sync skills from the org table to local FS. + hivemind skillify Show scope, team, install, and per-project state. + hivemind skillify pull [skill-name] Sync skills from the org table to local FS. Options: --user , --users a,b,c, --all-users, --to , --dry-run, --force. @@ -5997,17 +6034,17 @@ Skill management (mine + share reusable Claude skills across the org): idempotent (skipped when local is at-or-newer than remote). Disable via HIVEMIND_AUTOPULL_DISABLED=1. - hivemind skilify unpull Remove skills previously installed by pull. + hivemind skillify unpull Remove skills previously installed by pull. Options: --user, --users, --not-mine, --to , --dry-run, --all (also locally-mined), --legacy-cleanup (pre-suffix-author dirs). - hivemind skilify scope Set the sharing scope for newly mined skills. - hivemind skilify install Set where new skills are written. - hivemind skilify promote Move a project skill to the global location. - hivemind skilify team add Add a username to the team list. - hivemind skilify team remove Remove a username from the team list. - hivemind skilify team list List current team members. + hivemind skillify scope Set the sharing scope for newly mined skills. + hivemind skillify install Set where new skills are written. + hivemind skillify promote Move a project skill to the global location. + hivemind skillify team add Add a username to the team list. + hivemind skillify team remove Remove a username from the team list. + hivemind skillify team list List current team members. Account / org / workspace: hivemind whoami Show current user, org, workspace. @@ -6158,8 +6195,8 @@ async function main() { const code = await runUpdate({ dryRun: hasFlag(args.slice(1), "--dry-run") }); process.exit(code); } - if (cmd === "skilify") { - runSkilifyCommand(args.slice(1)); + if (cmd === "skillify") { + runSkillifyCommand(args.slice(1)); return; } if (cmd === "embeddings") { diff --git a/claude-code/bundle/capture.js b/claude-code/bundle/capture.js index 8b8e767e..7008c57b 100755 --- a/claude-code/bundle/capture.js +++ b/claude-code/bundle/capture.js @@ -817,14 +817,14 @@ function bundleDirFromImportMeta(importMetaUrl) { return dirname(fileURLToPath(importMetaUrl)); } -// dist/src/skilify/spawn-skilify-worker.js +// dist/src/skillify/spawn-skillify-worker.js import { spawn as spawn2 } from "node:child_process"; import { fileURLToPath as fileURLToPath2 } from "node:url"; import { dirname as dirname2, join as join8 } from "node:path"; import { writeFileSync as writeFileSync4, mkdirSync as mkdirSync5, appendFileSync as appendFileSync3, chmodSync } from "node:fs"; import { homedir as homedir6, tmpdir as tmpdir3 } from "node:os"; -// dist/src/skilify/gate-runner.js +// dist/src/skillify/gate-runner.js import { execFileSync } from "node:child_process"; import { existsSync as existsSync4 } from "node:fs"; import { homedir as homedir5 } from "node:os"; @@ -855,20 +855,20 @@ function findAgentBin(agent) { } } -// dist/src/skilify/spawn-skilify-worker.js +// dist/src/skillify/spawn-skillify-worker.js var HOME2 = homedir6(); -var SKILIFY_LOG = join8(HOME2, ".claude", "hooks", "skilify.log"); -function skilifyLog(msg) { +var SKILLIFY_LOG = join8(HOME2, ".claude", "hooks", "skillify.log"); +function skillifyLog(msg) { try { - mkdirSync5(dirname2(SKILIFY_LOG), { recursive: true }); - appendFileSync3(SKILIFY_LOG, `[${utcTimestamp()}] ${msg} + mkdirSync5(dirname2(SKILLIFY_LOG), { recursive: true }); + appendFileSync3(SKILLIFY_LOG, `[${utcTimestamp()}] ${msg} `); } catch { } } -function spawnSkilifyWorker(opts) { +function spawnSkillifyWorker(opts) { const { config, cwd, projectKey, project, bundleDir, agent, scopeConfig, currentSessionId, reason } = opts; - const tmpDir = join8(tmpdir3(), `deeplake-skilify-${projectKey}-${Date.now()}`); + const tmpDir = join8(tmpdir3(), `deeplake-skillify-${projectKey}-${Date.now()}`); mkdirSync5(tmpDir, { recursive: true, mode: 448 }); const gateBin = findAgentBin(agent); const configFile = join8(tmpDir, "config.json"); @@ -894,40 +894,72 @@ function spawnSkilifyWorker(opts) { hermesModel: process.env.HIVEMIND_HERMES_MODEL, piProvider: process.env.HIVEMIND_PI_PROVIDER, piModel: process.env.HIVEMIND_PI_MODEL, - skilifyLog: SKILIFY_LOG, + skillifyLog: SKILLIFY_LOG, currentSessionId }), { mode: 384 }); try { chmodSync(configFile, 384); } catch { } - skilifyLog(`${reason}: spawning skilify worker for project=${project} key=${projectKey}`); - const workerPath = join8(bundleDir, "skilify-worker.js"); + skillifyLog(`${reason}: spawning skillify worker for project=${project} key=${projectKey}`); + const workerPath = join8(bundleDir, "skillify-worker.js"); spawn2("nohup", ["node", workerPath, configFile], { detached: true, stdio: ["ignore", "ignore", "ignore"] }).unref(); - skilifyLog(`${reason}: spawned skilify worker for ${projectKey}`); + skillifyLog(`${reason}: spawned skillify worker for ${projectKey}`); } -// dist/src/skilify/state.js -import { readFileSync as readFileSync4, writeFileSync as writeFileSync5, writeSync as writeSync2, mkdirSync as mkdirSync6, renameSync as renameSync2, existsSync as existsSync5, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs"; +// dist/src/skillify/state.js +import { readFileSync as readFileSync4, writeFileSync as writeFileSync5, writeSync as writeSync2, mkdirSync as mkdirSync6, renameSync as renameSync3, existsSync as existsSync6, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs"; import { execSync as execSync2 } from "node:child_process"; -import { homedir as homedir7 } from "node:os"; +import { homedir as homedir8 } from "node:os"; import { createHash } from "node:crypto"; -import { join as join9, basename } from "node:path"; -var dlog2 = (msg) => log("skilify-state", msg); -var STATE_DIR2 = join9(homedir7(), ".deeplake", "state", "skilify"); +import { join as join10, basename } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync5, renameSync as renameSync2 } from "node:fs"; +import { homedir as homedir7 } from "node:os"; +import { join as join9 } from "node:path"; +var dlog2 = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join9(homedir7(), ".deeplake", "state"); + const legacy = join9(root, "skilify"); + const current = join9(root, "skillify"); + if (!existsSync5(legacy)) + return; + if (existsSync5(current)) + return; + try { + renameSync2(legacy, current); + dlog2(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog2(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/state.js +var dlog3 = (msg) => log("skillify-state", msg); +var STATE_DIR2 = join10(homedir8(), ".deeplake", "state", "skillify"); var YIELD_BUF2 = new Int32Array(new SharedArrayBuffer(4)); var TRIGGER_THRESHOLD = (() => { - const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? ""); + const n = Number(process.env.HIVEMIND_SKILLIFY_EVERY_N_TURNS ?? ""); return Number.isInteger(n) && n > 0 ? n : 20; })(); function statePath2(projectKey) { - return join9(STATE_DIR2, `${projectKey}.json`); + return join10(STATE_DIR2, `${projectKey}.json`); } function lockPath2(projectKey) { - return join9(STATE_DIR2, `${projectKey}.lock`); + return join10(STATE_DIR2, `${projectKey}.lock`); } function deriveProjectKey(cwd) { const project = basename(cwd) || "unknown"; @@ -945,8 +977,9 @@ function deriveProjectKey(cwd) { return { key, project }; } function readState2(projectKey) { + migrateLegacyStateDir(); const p = statePath2(projectKey); - if (!existsSync5(p)) + if (!existsSync6(p)) return null; try { return JSON.parse(readFileSync4(p, "utf-8")); @@ -955,13 +988,15 @@ function readState2(projectKey) { } } function writeState2(projectKey, state) { + migrateLegacyStateDir(); mkdirSync6(STATE_DIR2, { recursive: true }); const p = statePath2(projectKey); const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; writeFileSync5(tmp, JSON.stringify(state, null, 2)); - renameSync2(tmp, p); + renameSync3(tmp, p); } function withRmwLock2(projectKey, fn) { + migrateLegacyStateDir(); mkdirSync6(STATE_DIR2, { recursive: true }); const rmw = lockPath2(projectKey) + ".rmw"; const deadline = Date.now() + 2e3; @@ -973,11 +1008,11 @@ function withRmwLock2(projectKey, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { - dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); + dlog3(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); try { unlinkSync2(rmw); } catch (unlinkErr) { - dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); + dlog3(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); } continue; } @@ -991,7 +1026,7 @@ function withRmwLock2(projectKey, fn) { try { unlinkSync2(rmw); } catch (unlinkErr) { - dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); + dlog3(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); } } } @@ -1021,20 +1056,21 @@ function resetCounter(projectKey) { }); } function tryAcquireWorkerLock(projectKey, maxAgeMs = 10 * 60 * 1e3) { + migrateLegacyStateDir(); mkdirSync6(STATE_DIR2, { recursive: true }); const p = lockPath2(projectKey); - if (existsSync5(p)) { + if (existsSync6(p)) { try { const ageMs = Date.now() - parseInt(readFileSync4(p, "utf-8"), 10); if (Number.isFinite(ageMs) && ageMs < maxAgeMs) return false; } catch (readErr) { - dlog2(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`); + dlog3(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`); } try { unlinkSync2(p); } catch (unlinkErr) { - dlog2(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`); + dlog3(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`); return false; } } @@ -1058,15 +1094,16 @@ function releaseWorkerLock(projectKey) { } } -// dist/src/skilify/scope-config.js -import { existsSync as existsSync6, mkdirSync as mkdirSync7, readFileSync as readFileSync5, writeFileSync as writeFileSync6 } from "node:fs"; -import { homedir as homedir8 } from "node:os"; -import { join as join10 } from "node:path"; -var STATE_DIR3 = join10(homedir8(), ".deeplake", "state", "skilify"); -var CONFIG_PATH = join10(STATE_DIR3, "config.json"); +// dist/src/skillify/scope-config.js +import { existsSync as existsSync7, mkdirSync as mkdirSync7, readFileSync as readFileSync5, writeFileSync as writeFileSync6 } from "node:fs"; +import { homedir as homedir9 } from "node:os"; +import { join as join11 } from "node:path"; +var STATE_DIR3 = join11(homedir9(), ".deeplake", "state", "skillify"); +var CONFIG_PATH = join11(STATE_DIR3, "config.json"); var DEFAULT = { scope: "me", team: [], install: "project" }; function loadScopeConfig() { - if (!existsSync6(CONFIG_PATH)) + migrateLegacyStateDir(); + if (!existsSync7(CONFIG_PATH)) return DEFAULT; try { const raw = JSON.parse(readFileSync5(CONFIG_PATH, "utf-8")); @@ -1079,9 +1116,9 @@ function loadScopeConfig() { } } -// dist/src/skilify/triggers.js +// dist/src/skillify/triggers.js function tryStopCounterTrigger(opts) { - if (process.env.HIVEMIND_SKILIFY_WORKER === "1") + if (process.env.HIVEMIND_SKILLIFY_WORKER === "1") return; if (!opts.cwd) return; @@ -1090,13 +1127,13 @@ function tryStopCounterTrigger(opts) { if (state.counter < TRIGGER_THRESHOLD) return; if (!tryAcquireWorkerLock(state.projectKey)) { - skilifyLog(`Stop: trigger suppressed (worker lock held) project=${state.project}`); + skillifyLog(`Stop: trigger suppressed (worker lock held) project=${state.project}`); return; } - skilifyLog(`Stop: threshold hit (counter=${state.counter}, N=${TRIGGER_THRESHOLD}) project=${state.project} agent=${opts.agent}`); + skillifyLog(`Stop: threshold hit (counter=${state.counter}, N=${TRIGGER_THRESHOLD}) project=${state.project} agent=${opts.agent}`); resetCounter(state.projectKey); try { - spawnSkilifyWorker({ + spawnSkillifyWorker({ config: opts.config, cwd: opts.cwd, projectKey: state.projectKey, @@ -1108,23 +1145,23 @@ function tryStopCounterTrigger(opts) { reason: "Stop" }); } catch (e) { - skilifyLog(`Stop spawn failed: ${e?.message ?? e}`); + skillifyLog(`Stop spawn failed: ${e?.message ?? e}`); try { releaseWorkerLock(state.projectKey); } catch { } } } catch (e) { - skilifyLog(`Stop trigger error: ${e?.message ?? e}`); + skillifyLog(`Stop trigger error: ${e?.message ?? e}`); } } // dist/src/embeddings/client.js import { connect } from "node:net"; import { spawn as spawn3 } from "node:child_process"; -import { openSync as openSync3, closeSync as closeSync3, writeSync as writeSync3, unlinkSync as unlinkSync3, existsSync as existsSync7, readFileSync as readFileSync6 } from "node:fs"; -import { homedir as homedir9 } from "node:os"; -import { join as join11 } from "node:path"; +import { openSync as openSync3, closeSync as closeSync3, writeSync as writeSync3, unlinkSync as unlinkSync3, existsSync as existsSync8, readFileSync as readFileSync6 } from "node:fs"; +import { homedir as homedir10 } from "node:os"; +import { join as join12 } from "node:path"; // dist/src/embeddings/protocol.js var DEFAULT_SOCKET_DIR = "/tmp"; @@ -1138,7 +1175,7 @@ function pidPathFor(uid, dir = DEFAULT_SOCKET_DIR) { } // dist/src/embeddings/client.js -var SHARED_DAEMON_PATH = join11(homedir9(), ".hivemind", "embed-deps", "embed-daemon.js"); +var SHARED_DAEMON_PATH = join12(homedir10(), ".hivemind", "embed-deps", "embed-daemon.js"); var log3 = (m) => log("embed-client", m); function getUid() { const uid = typeof process.getuid === "function" ? process.getuid() : void 0; @@ -1158,7 +1195,7 @@ var EmbedClient = class { this.socketPath = socketPathFor(uid, dir); this.pidPath = pidPathFor(uid, dir); this.timeoutMs = opts.timeoutMs ?? DEFAULT_CLIENT_TIMEOUT_MS; - this.daemonEntry = opts.daemonEntry ?? process.env.HIVEMIND_EMBED_DAEMON ?? (existsSync7(SHARED_DAEMON_PATH) ? SHARED_DAEMON_PATH : void 0); + this.daemonEntry = opts.daemonEntry ?? process.env.HIVEMIND_EMBED_DAEMON ?? (existsSync8(SHARED_DAEMON_PATH) ? SHARED_DAEMON_PATH : void 0); this.autoSpawn = opts.autoSpawn ?? true; this.spawnWaitMs = opts.spawnWaitMs ?? 5e3; } @@ -1258,7 +1295,7 @@ var EmbedClient = class { return; } } - if (!this.daemonEntry || !existsSync7(this.daemonEntry)) { + if (!this.daemonEntry || !existsSync8(this.daemonEntry)) { log3(`daemonEntry not configured or missing: ${this.daemonEntry}`); try { closeSync3(fd); @@ -1301,7 +1338,7 @@ var EmbedClient = class { while (Date.now() < deadline) { await sleep2(delay); delay = Math.min(delay * 1.5, 300); - if (!existsSync7(this.socketPath)) + if (!existsSync8(this.socketPath)) continue; try { return await this.connectOnce(); @@ -1362,8 +1399,8 @@ function embeddingSqlLiteral(vec) { // dist/src/embeddings/disable.js import { createRequire } from "node:module"; -import { homedir as homedir10 } from "node:os"; -import { join as join12 } from "node:path"; +import { homedir as homedir11 } from "node:os"; +import { join as join13 } from "node:path"; import { pathToFileURL } from "node:url"; var cachedStatus = null; function defaultResolveTransformers() { @@ -1372,7 +1409,7 @@ function defaultResolveTransformers() { return; } catch { } - const sharedDir = join12(homedir10(), ".hivemind", "embed-deps"); + const sharedDir = join13(homedir11(), ".hivemind", "embed-deps"); createRequire(pathToFileURL(`${sharedDir}/`).href).resolve("@huggingface/transformers"); } var _resolve = defaultResolveTransformers; @@ -1398,10 +1435,10 @@ function embeddingsDisabled() { // dist/src/hooks/capture.js import { fileURLToPath as fileURLToPath3 } from "node:url"; -import { dirname as dirname3, join as join13 } from "node:path"; +import { dirname as dirname3, join as join14 } from "node:path"; var log4 = (msg) => log("capture", msg); function resolveEmbedDaemonPath() { - return join13(dirname3(fileURLToPath3(import.meta.url)), "embeddings", "embed-daemon.js"); + return join14(dirname3(fileURLToPath3(import.meta.url)), "embeddings", "embed-daemon.js"); } var CAPTURE = process.env.HIVEMIND_CAPTURE !== "false"; async function main() { diff --git a/claude-code/bundle/session-end.js b/claude-code/bundle/session-end.js index 7c1aab46..49093c38 100755 --- a/claude-code/bundle/session-end.js +++ b/claude-code/bundle/session-end.js @@ -241,14 +241,14 @@ function releaseLock(sessionId) { } } -// dist/src/skilify/spawn-skilify-worker.js +// dist/src/skillify/spawn-skillify-worker.js import { spawn as spawn2 } from "node:child_process"; import { fileURLToPath as fileURLToPath2 } from "node:url"; import { dirname as dirname2, join as join7 } from "node:path"; import { writeFileSync as writeFileSync3, mkdirSync as mkdirSync4, appendFileSync as appendFileSync3, chmodSync } from "node:fs"; import { homedir as homedir6, tmpdir as tmpdir2 } from "node:os"; -// dist/src/skilify/gate-runner.js +// dist/src/skillify/gate-runner.js import { execFileSync } from "node:child_process"; import { existsSync as existsSync3 } from "node:fs"; import { homedir as homedir5 } from "node:os"; @@ -279,20 +279,20 @@ function findAgentBin(agent) { } } -// dist/src/skilify/spawn-skilify-worker.js +// dist/src/skillify/spawn-skillify-worker.js var HOME2 = homedir6(); -var SKILIFY_LOG = join7(HOME2, ".claude", "hooks", "skilify.log"); -function skilifyLog(msg) { +var SKILLIFY_LOG = join7(HOME2, ".claude", "hooks", "skillify.log"); +function skillifyLog(msg) { try { - mkdirSync4(dirname2(SKILIFY_LOG), { recursive: true }); - appendFileSync3(SKILIFY_LOG, `[${utcTimestamp()}] ${msg} + mkdirSync4(dirname2(SKILLIFY_LOG), { recursive: true }); + appendFileSync3(SKILLIFY_LOG, `[${utcTimestamp()}] ${msg} `); } catch { } } -function spawnSkilifyWorker(opts) { +function spawnSkillifyWorker(opts) { const { config, cwd, projectKey, project, bundleDir, agent, scopeConfig, currentSessionId, reason } = opts; - const tmpDir = join7(tmpdir2(), `deeplake-skilify-${projectKey}-${Date.now()}`); + const tmpDir = join7(tmpdir2(), `deeplake-skillify-${projectKey}-${Date.now()}`); mkdirSync4(tmpDir, { recursive: true, mode: 448 }); const gateBin = findAgentBin(agent); const configFile = join7(tmpDir, "config.json"); @@ -318,40 +318,72 @@ function spawnSkilifyWorker(opts) { hermesModel: process.env.HIVEMIND_HERMES_MODEL, piProvider: process.env.HIVEMIND_PI_PROVIDER, piModel: process.env.HIVEMIND_PI_MODEL, - skilifyLog: SKILIFY_LOG, + skillifyLog: SKILLIFY_LOG, currentSessionId }), { mode: 384 }); try { chmodSync(configFile, 384); } catch { } - skilifyLog(`${reason}: spawning skilify worker for project=${project} key=${projectKey}`); - const workerPath = join7(bundleDir, "skilify-worker.js"); + skillifyLog(`${reason}: spawning skillify worker for project=${project} key=${projectKey}`); + const workerPath = join7(bundleDir, "skillify-worker.js"); spawn2("nohup", ["node", workerPath, configFile], { detached: true, stdio: ["ignore", "ignore", "ignore"] }).unref(); - skilifyLog(`${reason}: spawned skilify worker for ${projectKey}`); + skillifyLog(`${reason}: spawned skillify worker for ${projectKey}`); } -// dist/src/skilify/state.js -import { readFileSync as readFileSync3, writeFileSync as writeFileSync4, writeSync as writeSync2, mkdirSync as mkdirSync5, renameSync as renameSync2, existsSync as existsSync4, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs"; +// dist/src/skillify/state.js +import { readFileSync as readFileSync3, writeFileSync as writeFileSync4, writeSync as writeSync2, mkdirSync as mkdirSync5, renameSync as renameSync3, existsSync as existsSync5, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs"; import { execSync as execSync2 } from "node:child_process"; -import { homedir as homedir7 } from "node:os"; +import { homedir as homedir8 } from "node:os"; import { createHash } from "node:crypto"; -import { join as join8, basename } from "node:path"; -var dlog2 = (msg) => log("skilify-state", msg); -var STATE_DIR2 = join8(homedir7(), ".deeplake", "state", "skilify"); +import { join as join9, basename } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync4, renameSync as renameSync2 } from "node:fs"; +import { homedir as homedir7 } from "node:os"; +import { join as join8 } from "node:path"; +var dlog2 = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join8(homedir7(), ".deeplake", "state"); + const legacy = join8(root, "skilify"); + const current = join8(root, "skillify"); + if (!existsSync4(legacy)) + return; + if (existsSync4(current)) + return; + try { + renameSync2(legacy, current); + dlog2(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog2(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/state.js +var dlog3 = (msg) => log("skillify-state", msg); +var STATE_DIR2 = join9(homedir8(), ".deeplake", "state", "skillify"); var YIELD_BUF2 = new Int32Array(new SharedArrayBuffer(4)); var TRIGGER_THRESHOLD = (() => { - const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? ""); + const n = Number(process.env.HIVEMIND_SKILLIFY_EVERY_N_TURNS ?? ""); return Number.isInteger(n) && n > 0 ? n : 20; })(); function statePath(projectKey) { - return join8(STATE_DIR2, `${projectKey}.json`); + return join9(STATE_DIR2, `${projectKey}.json`); } function lockPath2(projectKey) { - return join8(STATE_DIR2, `${projectKey}.lock`); + return join9(STATE_DIR2, `${projectKey}.lock`); } function deriveProjectKey(cwd) { const project = basename(cwd) || "unknown"; @@ -369,8 +401,9 @@ function deriveProjectKey(cwd) { return { key, project }; } function readState(projectKey) { + migrateLegacyStateDir(); const p = statePath(projectKey); - if (!existsSync4(p)) + if (!existsSync5(p)) return null; try { return JSON.parse(readFileSync3(p, "utf-8")); @@ -379,13 +412,15 @@ function readState(projectKey) { } } function writeState(projectKey, state) { + migrateLegacyStateDir(); mkdirSync5(STATE_DIR2, { recursive: true }); const p = statePath(projectKey); const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; writeFileSync4(tmp, JSON.stringify(state, null, 2)); - renameSync2(tmp, p); + renameSync3(tmp, p); } function withRmwLock(projectKey, fn) { + migrateLegacyStateDir(); mkdirSync5(STATE_DIR2, { recursive: true }); const rmw = lockPath2(projectKey) + ".rmw"; const deadline = Date.now() + 2e3; @@ -397,11 +432,11 @@ function withRmwLock(projectKey, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { - dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); + dlog3(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); try { unlinkSync2(rmw); } catch (unlinkErr) { - dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); + dlog3(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); } continue; } @@ -415,7 +450,7 @@ function withRmwLock(projectKey, fn) { try { unlinkSync2(rmw); } catch (unlinkErr) { - dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); + dlog3(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); } } } @@ -428,20 +463,21 @@ function resetCounter(projectKey) { }); } function tryAcquireWorkerLock(projectKey, maxAgeMs = 10 * 60 * 1e3) { + migrateLegacyStateDir(); mkdirSync5(STATE_DIR2, { recursive: true }); const p = lockPath2(projectKey); - if (existsSync4(p)) { + if (existsSync5(p)) { try { const ageMs = Date.now() - parseInt(readFileSync3(p, "utf-8"), 10); if (Number.isFinite(ageMs) && ageMs < maxAgeMs) return false; } catch (readErr) { - dlog2(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`); + dlog3(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`); } try { unlinkSync2(p); } catch (unlinkErr) { - dlog2(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`); + dlog3(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`); return false; } } @@ -465,15 +501,16 @@ function releaseWorkerLock(projectKey) { } } -// dist/src/skilify/scope-config.js -import { existsSync as existsSync5, mkdirSync as mkdirSync6, readFileSync as readFileSync4, writeFileSync as writeFileSync5 } from "node:fs"; -import { homedir as homedir8 } from "node:os"; -import { join as join9 } from "node:path"; -var STATE_DIR3 = join9(homedir8(), ".deeplake", "state", "skilify"); -var CONFIG_PATH = join9(STATE_DIR3, "config.json"); +// dist/src/skillify/scope-config.js +import { existsSync as existsSync6, mkdirSync as mkdirSync6, readFileSync as readFileSync4, writeFileSync as writeFileSync5 } from "node:fs"; +import { homedir as homedir9 } from "node:os"; +import { join as join10 } from "node:path"; +var STATE_DIR3 = join10(homedir9(), ".deeplake", "state", "skillify"); +var CONFIG_PATH = join10(STATE_DIR3, "config.json"); var DEFAULT = { scope: "me", team: [], install: "project" }; function loadScopeConfig() { - if (!existsSync5(CONFIG_PATH)) + migrateLegacyStateDir(); + if (!existsSync6(CONFIG_PATH)) return DEFAULT; try { const raw = JSON.parse(readFileSync4(CONFIG_PATH, "utf-8")); @@ -486,24 +523,24 @@ function loadScopeConfig() { } } -// dist/src/skilify/triggers.js +// dist/src/skillify/triggers.js function forceSessionEndTrigger(opts) { - if (process.env.HIVEMIND_SKILIFY_WORKER === "1") + if (process.env.HIVEMIND_SKILLIFY_WORKER === "1") return; if (!opts.cwd) return; try { const { key: projectKey, project } = deriveProjectKey(opts.cwd); if (!tryAcquireWorkerLock(projectKey)) { - skilifyLog(`SessionEnd: skilify worker already running for ${projectKey}, skipping`); + skillifyLog(`SessionEnd: skillify worker already running for ${projectKey}, skipping`); return; } if (readState(projectKey)) { resetCounter(projectKey); } - skilifyLog(`SessionEnd: spawning skilify worker for project=${project} agent=${opts.agent}`); + skillifyLog(`SessionEnd: spawning skillify worker for project=${project} agent=${opts.agent}`); try { - spawnSkilifyWorker({ + spawnSkillifyWorker({ config: opts.config, cwd: opts.cwd, projectKey, @@ -515,14 +552,14 @@ function forceSessionEndTrigger(opts) { reason: "SessionEnd" }); } catch (e) { - skilifyLog(`SessionEnd spawn failed: ${e?.message ?? e}`); + skillifyLog(`SessionEnd spawn failed: ${e?.message ?? e}`); try { releaseWorkerLock(projectKey); } catch { } } } catch (e) { - skilifyLog(`SessionEnd trigger error: ${e?.message ?? e}`); + skillifyLog(`SessionEnd trigger error: ${e?.message ?? e}`); } } diff --git a/claude-code/bundle/session-start.js b/claude-code/bundle/session-start.js index 597ff4b7..556ce4d5 100755 --- a/claude-code/bundle/session-start.js +++ b/claude-code/bundle/session-start.js @@ -54,8 +54,8 @@ var init_index_marker_store = __esm({ // dist/src/hooks/session-start.js import { fileURLToPath } from "node:url"; -import { dirname as dirname4, join as join12 } from "node:path"; -import { homedir as homedir8 } from "node:os"; +import { dirname as dirname4, join as join13 } from "node:path"; +import { homedir as homedir9 } from "node:os"; // dist/src/commands/auth.js import { execSync } from "node:child_process"; @@ -697,12 +697,12 @@ async function autoUpdate(creds, opts) { log3(`agent=${opts.agent} dispatched (pid=${pid ?? "?"}) (${Date.now() - t0}ms total)`); } -// dist/src/skilify/pull.js -import { existsSync as existsSync7, readFileSync as readFileSync7, writeFileSync as writeFileSync5, mkdirSync as mkdirSync6, renameSync as renameSync2, lstatSync as lstatSync2, readlinkSync, symlinkSync, unlinkSync as unlinkSync3 } from "node:fs"; -import { homedir as homedir7 } from "node:os"; -import { dirname as dirname3, join as join11 } from "node:path"; +// dist/src/skillify/pull.js +import { existsSync as existsSync8, readFileSync as readFileSync7, writeFileSync as writeFileSync5, mkdirSync as mkdirSync6, renameSync as renameSync3, lstatSync as lstatSync2, readlinkSync, symlinkSync, unlinkSync as unlinkSync3 } from "node:fs"; +import { homedir as homedir8 } from "node:os"; +import { dirname as dirname3, join as join12 } from "node:path"; -// dist/src/skilify/skill-writer.js +// dist/src/skillify/skill-writer.js import { existsSync as existsSync4, mkdirSync as mkdirSync4, readFileSync as readFileSync5, readdirSync, statSync, writeFileSync as writeFileSync3 } from "node:fs"; import { homedir as homedir4 } from "node:os"; import { join as join8 } from "node:path"; @@ -763,18 +763,51 @@ function parseFrontmatter(text) { return { fm, body }; } -// dist/src/skilify/manifest.js -import { existsSync as existsSync5, lstatSync, mkdirSync as mkdirSync5, readFileSync as readFileSync6, renameSync, unlinkSync as unlinkSync2, writeFileSync as writeFileSync4 } from "node:fs"; +// dist/src/skillify/manifest.js +import { existsSync as existsSync6, lstatSync, mkdirSync as mkdirSync5, readFileSync as readFileSync6, renameSync as renameSync2, unlinkSync as unlinkSync2, writeFileSync as writeFileSync4 } from "node:fs"; +import { homedir as homedir6 } from "node:os"; +import { dirname as dirname2, join as join10 } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync5, renameSync } from "node:fs"; import { homedir as homedir5 } from "node:os"; -import { dirname as dirname2, join as join9 } from "node:path"; +import { join as join9 } from "node:path"; +var dlog = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join9(homedir5(), ".deeplake", "state"); + const legacy = join9(root, "skilify"); + const current = join9(root, "skillify"); + if (!existsSync5(legacy)) + return; + if (existsSync5(current)) + return; + try { + renameSync(legacy, current); + dlog(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/manifest.js function emptyManifest() { return { version: 1, entries: [] }; } function manifestPath() { - return join9(homedir5(), ".deeplake", "state", "skilify", "pulled.json"); + return join10(homedir6(), ".deeplake", "state", "skillify", "pulled.json"); } function loadManifest(path = manifestPath()) { - if (!existsSync5(path)) + migrateLegacyStateDir(); + if (!existsSync6(path)) return emptyManifest(); let raw; try { @@ -824,10 +857,11 @@ function loadManifest(path = manifestPath()) { } } function saveManifest(m, path = manifestPath()) { + migrateLegacyStateDir(); mkdirSync5(dirname2(path), { recursive: true }); const tmp = `${path}.tmp`; writeFileSync4(tmp, JSON.stringify(m, null, 2) + "\n", { mode: 384 }); - renameSync(tmp, path); + renameSync2(tmp, path); } function recordPull(entry, path = manifestPath()) { const m = loadManifest(path); @@ -862,7 +896,7 @@ function pruneOrphanedEntries(path = manifestPath()) { const live = []; let pruned = 0; for (const e of m.entries) { - if (existsSync5(join9(e.installRoot, e.dirName))) { + if (existsSync6(join10(e.installRoot, e.dirName))) { live.push(e); continue; } @@ -874,31 +908,31 @@ function pruneOrphanedEntries(path = manifestPath()) { return pruned; } -// dist/src/skilify/agent-roots.js -import { existsSync as existsSync6 } from "node:fs"; -import { homedir as homedir6 } from "node:os"; -import { join as join10 } from "node:path"; +// dist/src/skillify/agent-roots.js +import { existsSync as existsSync7 } from "node:fs"; +import { homedir as homedir7 } from "node:os"; +import { join as join11 } from "node:path"; function resolveDetected(home) { const out = []; - const codexInstalled = existsSync6(join10(home, ".codex")); - const piInstalled = existsSync6(join10(home, ".pi", "agent")); - const hermesInstalled = existsSync6(join10(home, ".hermes")); + const codexInstalled = existsSync7(join11(home, ".codex")); + const piInstalled = existsSync7(join11(home, ".pi", "agent")); + const hermesInstalled = existsSync7(join11(home, ".hermes")); if (codexInstalled || piInstalled) { - out.push(join10(home, ".agents", "skills")); + out.push(join11(home, ".agents", "skills")); } if (hermesInstalled) { - out.push(join10(home, ".hermes", "skills")); + out.push(join11(home, ".hermes", "skills")); } if (piInstalled) { - out.push(join10(home, ".pi", "agent", "skills")); + out.push(join11(home, ".pi", "agent", "skills")); } return out; } -function detectAgentSkillsRoots(canonicalRoot, home = homedir6()) { +function detectAgentSkillsRoots(canonicalRoot, home = homedir7()) { return resolveDetected(home).filter((p) => p !== canonicalRoot); } -// dist/src/skilify/pull.js +// dist/src/skillify/pull.js function assertValidAuthor(author) { if (!author) throw new Error("author is empty"); @@ -930,15 +964,15 @@ function isMissingTableError(message) { } function resolvePullDestination(install, cwd) { if (install === "global") - return join11(homedir7(), ".claude", "skills"); + return join12(homedir8(), ".claude", "skills"); if (!cwd) throw new Error("install=project requires a cwd"); - return join11(cwd, ".claude", "skills"); + return join12(cwd, ".claude", "skills"); } function fanOutSymlinks(canonicalDir, dirName, agentRoots) { const out = []; for (const root of agentRoots) { - const link = join11(root, dirName); + const link = join12(root, dirName); let existing; try { existing = lstatSync2(link); @@ -981,8 +1015,8 @@ function backfillSymlinks(installRoot) { return; const detected = detectAgentSkillsRoots(installRoot); for (const entry of entries) { - const canonical = join11(entry.installRoot, entry.dirName); - if (!existsSync7(canonical)) + const canonical = join12(entry.installRoot, entry.dirName); + if (!existsSync8(canonical)) continue; const fresh = fanOutSymlinks(canonical, entry.dirName, detected); if (sameSorted(fresh, entry.symlinks)) @@ -1067,7 +1101,7 @@ function renderFrontmatter(fm) { return lines.join("\n"); } function readLocalVersion(path) { - if (!existsSync7(path)) + if (!existsSync8(path)) return null; try { const text = readFileSync7(path, "utf-8"); @@ -1156,8 +1190,8 @@ async function runPull(opts) { summary.skipped++; continue; } - const skillDir = join11(root, dirName); - const skillFile = join11(skillDir, "SKILL.md"); + const skillDir = join12(root, dirName); + const skillFile = join12(skillDir, "SKILL.md"); const remoteVersion = Number(row.version ?? 1); const localVersion = readLocalVersion(skillFile); const action = decideAction({ @@ -1169,9 +1203,9 @@ async function runPull(opts) { let manifestError; if (action === "wrote") { mkdirSync6(skillDir, { recursive: true }); - if (existsSync7(skillFile)) { + if (existsSync8(skillFile)) { try { - renameSync2(skillFile, `${skillFile}.bak`); + renameSync3(skillFile, `${skillFile}.bak`); } catch { } } @@ -1216,8 +1250,8 @@ async function runPull(opts) { return summary; } -// dist/src/skilify/auto-pull.js -var log4 = (msg) => log("skilify-autopull", msg); +// dist/src/skillify/auto-pull.js +var log4 = (msg) => log("skillify-autopull", msg); var DEFAULT_TIMEOUT_MS = 5e3; function withTimeout(p, ms) { let timer = null; @@ -1304,31 +1338,31 @@ Organization management \u2014 each argument is SEPARATE (do NOT quote subcomman - hivemind remove \u2014 remove member Skill management (mine + share reusable Claude skills across the org): -- hivemind skilify \u2014 show scope, team, install, per-project state -- hivemind skilify pull \u2014 sync project skills from the org table to local FS -- hivemind skilify pull --user \u2014 only skills authored by that user -- hivemind skilify pull --users \u2014 only skills from those authors -- hivemind skilify pull --all-users \u2014 explicit "no author filter" (default) -- hivemind skilify pull --to \u2014 install location (project=cwd/.claude/skills, global=~/.claude/skills) -- hivemind skilify pull --dry-run \u2014 preview without touching disk -- hivemind skilify pull --force \u2014 overwrite local files even if up-to-date (creates .bak) -- hivemind skilify pull \u2014 pull only that one skill (combines with --user) -- hivemind skilify unpull \u2014 remove every skill previously installed by pull -- hivemind skilify unpull --user \u2014 remove only that author's pulls -- hivemind skilify unpull --not-mine \u2014 remove all pulls except your own -- hivemind skilify unpull --dry-run \u2014 preview without touching disk -- hivemind skilify scope \u2014 sharing scope for newly mined skills -- hivemind skilify install \u2014 default install location for new skills -- hivemind skilify promote \u2014 move a project skill to the global location -- hivemind skilify team add|remove|list \u2014 manage team member list +- hivemind skillify \u2014 show scope, team, install, per-project state +- hivemind skillify pull \u2014 sync project skills from the org table to local FS +- hivemind skillify pull --user \u2014 only skills authored by that user +- hivemind skillify pull --users \u2014 only skills from those authors +- hivemind skillify pull --all-users \u2014 explicit "no author filter" (default) +- hivemind skillify pull --to \u2014 install location (project=cwd/.claude/skills, global=~/.claude/skills) +- hivemind skillify pull --dry-run \u2014 preview without touching disk +- hivemind skillify pull --force \u2014 overwrite local files even if up-to-date (creates .bak) +- hivemind skillify pull \u2014 pull only that one skill (combines with --user) +- hivemind skillify unpull \u2014 remove every skill previously installed by pull +- hivemind skillify unpull --user \u2014 remove only that author's pulls +- hivemind skillify unpull --not-mine \u2014 remove all pulls except your own +- hivemind skillify unpull --dry-run \u2014 preview without touching disk +- hivemind skillify scope \u2014 sharing scope for newly mined skills +- hivemind skillify install \u2014 default install location for new skills +- hivemind skillify promote \u2014 move a project skill to the global location +- hivemind skillify team add|remove|list \u2014 manage team member list IMPORTANT: Only use bash commands (cat, ls, grep, echo, jq, head, tail, etc.) to interact with ~/.deeplake/memory/. Do NOT use python, python3, node, curl, or other interpreters \u2014 they are not available in the memory filesystem. Avoid bash brace expansions like \`{1..10}\` (not fully supported); spell out paths explicitly. Bash output is capped at 10MB total \u2014 avoid \`for f in *.json; do cat $f\` style loops on the whole sessions dir. LIMITS: Do NOT spawn subagents to read deeplake memory. If a file returns empty after 2 attempts, skip it and move on. Report what you found rather than exhaustively retrying. Debugging: Set HIVEMIND_DEBUG=1 to enable verbose logging to ~/.deeplake/hook-debug.log`; -var HOME = homedir8(); -var { log: wikiLog } = makeWikiLogger(join12(HOME, ".claude", "hooks")); +var HOME = homedir9(); +var { log: wikiLog } = makeWikiLogger(join13(HOME, ".claude", "hooks")); async function createPlaceholder(api, table, sessionId, cwd, userName, orgName, workspaceId) { const summaryPath = `/summaries/${userName}/${sessionId}.md`; const existing = await api.query(`SELECT path FROM "${table}" WHERE path = '${sqlStr(summaryPath)}' LIMIT 1`); diff --git a/claude-code/bundle/skilify-worker.js b/claude-code/bundle/skillify-worker.js similarity index 92% rename from claude-code/bundle/skilify-worker.js rename to claude-code/bundle/skillify-worker.js index 8eb534fd..987a16ba 100755 --- a/claude-code/bundle/skilify-worker.js +++ b/claude-code/bundle/skillify-worker.js @@ -1,8 +1,8 @@ #!/usr/bin/env node -// dist/src/skilify/skilify-worker.js -import { readFileSync as readFileSync3, writeFileSync as writeFileSync3, existsSync as existsSync4, appendFileSync as appendFileSync2, rmSync } from "node:fs"; -import { join as join5 } from "node:path"; +// dist/src/skillify/skillify-worker.js +import { readFileSync as readFileSync3, writeFileSync as writeFileSync3, existsSync as existsSync5, appendFileSync as appendFileSync2, rmSync } from "node:fs"; +import { join as join6 } from "node:path"; // dist/src/utils/debug.js import { appendFileSync } from "node:fs"; @@ -29,7 +29,7 @@ function deeplakeClientHeader() { return { [DEEPLAKE_CLIENT_HEADER]: deeplakeClientValue() }; } -// dist/src/skilify/extractors/index.js +// dist/src/skillify/extractors/index.js function extractPairs(rows) { const pairs = []; let pendingPrompt = null; @@ -60,7 +60,7 @@ function extractPairs(rows) { return pairs; } -// dist/src/skilify/skill-writer.js +// dist/src/skillify/skill-writer.js import { existsSync, mkdirSync, readFileSync, readdirSync, statSync, writeFileSync } from "node:fs"; import { homedir as homedir2 } from "node:os"; import { join as join2 } from "node:path"; @@ -216,7 +216,7 @@ function resolveSkillsRoot(install, cwd) { return join2(cwd, ".claude", "skills"); } -// dist/src/skilify/skills-table.js +// dist/src/skillify/skills-table.js import { randomUUID } from "node:crypto"; // dist/src/utils/sql.js @@ -227,7 +227,7 @@ function sqlIdent(name) { return name; } -// dist/src/skilify/skills-table.js +// dist/src/skillify/skills-table.js function createSkillsTableSql(tableName) { const safe = sqlIdent(tableName); return `CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`; @@ -256,7 +256,7 @@ async function insertSkillRow(args) { } } -// dist/src/skilify/gate-parser.js +// dist/src/skillify/gate-parser.js function extractJsonBlock(s) { const trimmed = s.trim(); if (!trimmed) @@ -294,7 +294,7 @@ function parseVerdict(raw) { } } -// dist/src/skilify/gate-runner.js +// dist/src/skillify/gate-runner.js import { execFileSync } from "node:child_process"; import { existsSync as existsSync2 } from "node:fs"; import { homedir as homedir3 } from "node:os"; @@ -403,28 +403,61 @@ function runGate(opts) { } } -// dist/src/skilify/state.js -import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync2, renameSync, existsSync as existsSync3, unlinkSync, openSync, closeSync } from "node:fs"; +// dist/src/skillify/state.js +import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync2, renameSync as renameSync2, existsSync as existsSync4, unlinkSync, openSync, closeSync } from "node:fs"; import { execSync } from "node:child_process"; -import { homedir as homedir4 } from "node:os"; +import { homedir as homedir5 } from "node:os"; import { createHash } from "node:crypto"; -import { join as join4, basename } from "node:path"; -var dlog = (msg) => log("skilify-state", msg); -var STATE_DIR = join4(homedir4(), ".deeplake", "state", "skilify"); +import { join as join5, basename } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync3, renameSync } from "node:fs"; +import { homedir as homedir4 } from "node:os"; +import { join as join4 } from "node:path"; +var dlog = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join4(homedir4(), ".deeplake", "state"); + const legacy = join4(root, "skilify"); + const current = join4(root, "skillify"); + if (!existsSync3(legacy)) + return; + if (existsSync3(current)) + return; + try { + renameSync(legacy, current); + dlog(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/state.js +var dlog2 = (msg) => log("skillify-state", msg); +var STATE_DIR = join5(homedir5(), ".deeplake", "state", "skillify"); var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4)); var TRIGGER_THRESHOLD = (() => { - const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? ""); + const n = Number(process.env.HIVEMIND_SKILLIFY_EVERY_N_TURNS ?? ""); return Number.isInteger(n) && n > 0 ? n : 20; })(); function statePath(projectKey) { - return join4(STATE_DIR, `${projectKey}.json`); + return join5(STATE_DIR, `${projectKey}.json`); } function lockPath(projectKey) { - return join4(STATE_DIR, `${projectKey}.lock`); + return join5(STATE_DIR, `${projectKey}.lock`); } function readState(projectKey) { + migrateLegacyStateDir(); const p = statePath(projectKey); - if (!existsSync3(p)) + if (!existsSync4(p)) return null; try { return JSON.parse(readFileSync2(p, "utf-8")); @@ -433,13 +466,15 @@ function readState(projectKey) { } } function writeState(projectKey, state) { + migrateLegacyStateDir(); mkdirSync2(STATE_DIR, { recursive: true }); const p = statePath(projectKey); const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; writeFileSync2(tmp, JSON.stringify(state, null, 2)); - renameSync(tmp, p); + renameSync2(tmp, p); } function withRmwLock(projectKey, fn) { + migrateLegacyStateDir(); mkdirSync2(STATE_DIR, { recursive: true }); const rmw = lockPath(projectKey) + ".rmw"; const deadline = Date.now() + 2e3; @@ -451,11 +486,11 @@ function withRmwLock(projectKey, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { - dlog(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); + dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); try { unlinkSync(rmw); } catch (unlinkErr) { - dlog(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); + dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); } continue; } @@ -469,7 +504,7 @@ function withRmwLock(projectKey, fn) { try { unlinkSync(rmw); } catch (unlinkErr) { - dlog(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); + dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); } } } @@ -509,18 +544,18 @@ function releaseWorkerLock(projectKey) { } } -// dist/src/skilify/skilify-worker.js +// dist/src/skillify/skillify-worker.js var cfg = JSON.parse(readFileSync3(process.argv[2], "utf-8")); var tmpDir = cfg.tmpDir; -var verdictPath = join5(tmpDir, "verdict.json"); -var promptPath = join5(tmpDir, "prompt.txt"); +var verdictPath = join6(tmpDir, "verdict.json"); +var promptPath = join6(tmpDir, "prompt.txt"); var SESSIONS_TO_MINE = 10; var PAIR_CHAR_CAP = 2e3; var TOTAL_PAIRS_CHAR_CAP = 4e4; var EXISTING_SKILLS_CHAR_CAP = 3e4; function wlog(msg) { try { - appendFileSync2(cfg.skilifyLog, `[${utcTimestamp()}] skilify-worker(${cfg.projectKey}): ${msg} + appendFileSync2(cfg.skillifyLog, `[${utcTimestamp()}] skillify-worker(${cfg.projectKey}): ${msg} `); } catch { } @@ -715,7 +750,7 @@ function buildPrompt(pairs) { ].join("\n"); } function readVerdict(stdout) { - if (existsSync4(verdictPath)) { + if (existsSync5(verdictPath)) { try { const text = readFileSync3(verdictPath, "utf-8"); const v2 = parseVerdict(text); @@ -784,9 +819,9 @@ async function main() { timeoutMs: 12e4 }); try { - writeFileSync3(join5(tmpDir, "gate-stdout.txt"), gate.stdout); + writeFileSync3(join6(tmpDir, "gate-stdout.txt"), gate.stdout); if (gate.stderr) - writeFileSync3(join5(tmpDir, "gate-stderr.txt"), gate.stderr); + writeFileSync3(join6(tmpDir, "gate-stderr.txt"), gate.stderr); } catch { } if (gate.errored) { diff --git a/claude-code/tests/codex-session-start-hook.test.ts b/claude-code/tests/codex-session-start-hook.test.ts index 3c8bb037..fc13abe6 100644 --- a/claude-code/tests/codex-session-start-hook.test.ts +++ b/claude-code/tests/codex-session-start-hook.test.ts @@ -27,9 +27,9 @@ vi.mock("../../src/utils/debug.js", () => ({ log: (_t: string, msg: string) => debugLogMock(msg), })); // Stub the auto-pull so the hook test doesn't hit the real Deeplake API or -// touch the developer's ~/.deeplake/state/skilify timestamp file. Tests for -// the auto-pull module itself live in claude-code/tests/skilify-auto-pull.test.ts. -vi.mock("../../src/skilify/auto-pull.js", () => ({ +// touch the developer's ~/.deeplake/state/skillify timestamp file. Tests for +// the auto-pull module itself live in claude-code/tests/skillify-auto-pull.test.ts. +vi.mock("../../src/skillify/auto-pull.js", () => ({ autoPullSkills: (...a: any[]) => autoPullSkillsMock(...a), })); vi.mock("node:child_process", async () => { diff --git a/claude-code/tests/cursor-session-start-hook.test.ts b/claude-code/tests/cursor-session-start-hook.test.ts index 42f47ab7..92b2dfad 100644 --- a/claude-code/tests/cursor-session-start-hook.test.ts +++ b/claude-code/tests/cursor-session-start-hook.test.ts @@ -67,7 +67,7 @@ beforeEach(() => { // Disable auto-pull during this test: autoPullSkills would otherwise issue // a third SQL query (against `skills`) through the same DeeplakeApi mock, // breaking call-count assertions. The auto-pull module's behaviour is - // covered exhaustively in skilify-auto-pull.test.ts, so the hook tests + // covered exhaustively in skillify-auto-pull.test.ts, so the hook tests // never need it active. process.env.HIVEMIND_AUTOPULL_DISABLED = "1"; }); diff --git a/claude-code/tests/hermes-session-start-hook.test.ts b/claude-code/tests/hermes-session-start-hook.test.ts index 04ea8cc9..5038ac0f 100644 --- a/claude-code/tests/hermes-session-start-hook.test.ts +++ b/claude-code/tests/hermes-session-start-hook.test.ts @@ -67,7 +67,7 @@ beforeEach(() => { // Disable auto-pull during this test: autoPullSkills would otherwise issue // a third SQL query (against `skills`) through the same DeeplakeApi mock, // breaking call-count assertions. The auto-pull module's behaviour is - // covered exhaustively in skilify-auto-pull.test.ts, so the hook tests + // covered exhaustively in skillify-auto-pull.test.ts, so the hook tests // never need it active. process.env.HIVEMIND_AUTOPULL_DISABLED = "1"; }); diff --git a/claude-code/tests/session-start-hook.test.ts b/claude-code/tests/session-start-hook.test.ts index 32709f2a..c0b322f8 100644 --- a/claude-code/tests/session-start-hook.test.ts +++ b/claude-code/tests/session-start-hook.test.ts @@ -110,7 +110,7 @@ beforeEach(() => { // Disable auto-pull during this test: autoPullSkills would otherwise issue // an extra SQL query (against `skills`) through the same DeeplakeApi mock, // breaking the placeholder-branching call-count assertions. The auto-pull - // module's behaviour is covered exhaustively in skilify-auto-pull.test.ts. + // module's behaviour is covered exhaustively in skillify-auto-pull.test.ts. process.env.HIVEMIND_AUTOPULL_DISABLED = "1"; }); diff --git a/claude-code/tests/sessions-table.test.ts b/claude-code/tests/sessions-table.test.ts index 7369adf3..c4f8ac4b 100644 --- a/claude-code/tests/sessions-table.test.ts +++ b/claude-code/tests/sessions-table.test.ts @@ -268,7 +268,7 @@ describe("session files are read-only", () => { }); }); -describe("ensureSkillsTable schema (skilify provenance table)", () => { +describe("ensureSkillsTable schema (skillify provenance table)", () => { it("creates skills table with all expected columns when missing", async () => { const { DeeplakeApi } = await import("../../src/deeplake-api.js"); const api = new DeeplakeApi("token", "https://api.test", "org", "ws", "memory"); diff --git a/claude-code/tests/skilify-agent-roots.test.ts b/claude-code/tests/skillify-agent-roots.test.ts similarity index 97% rename from claude-code/tests/skilify-agent-roots.test.ts rename to claude-code/tests/skillify-agent-roots.test.ts index bd021b48..06ae4eb7 100644 --- a/claude-code/tests/skilify-agent-roots.test.ts +++ b/claude-code/tests/skillify-agent-roots.test.ts @@ -2,12 +2,12 @@ import { afterEach, beforeEach, describe, expect, it } from "vitest"; import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs"; import { tmpdir } from "node:os"; import { join } from "node:path"; -import { detectAgentSkillsRoots } from "../../src/skilify/agent-roots.js"; +import { detectAgentSkillsRoots } from "../../src/skillify/agent-roots.js"; let tmpHome: string; beforeEach(() => { - tmpHome = mkdtempSync(join(tmpdir(), "skilify-agent-roots-")); + tmpHome = mkdtempSync(join(tmpdir(), "skillify-agent-roots-")); }); afterEach(() => { diff --git a/claude-code/tests/skilify-auto-pull.test.ts b/claude-code/tests/skillify-auto-pull.test.ts similarity index 97% rename from claude-code/tests/skilify-auto-pull.test.ts rename to claude-code/tests/skillify-auto-pull.test.ts index 42dc5e28..874d9cd0 100644 --- a/claude-code/tests/skilify-auto-pull.test.ts +++ b/claude-code/tests/skillify-auto-pull.test.ts @@ -1,5 +1,5 @@ /** - * Tests for src/skilify/auto-pull.ts. + * Tests for src/skillify/auto-pull.ts. * * Mocks at the network seam — the real `runPull` runs end-to-end against an * injected QueryFn that returns canned rows. Filesystem effects are scoped @@ -7,7 +7,7 @@ * ~/.claude on the developer's machine. * * Coverage targets per vitest.config.ts (90% lines / 90% functions / - * 70% branches for src/skilify/auto-pull.ts). + * 70% branches for src/skillify/auto-pull.ts). */ import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; @@ -28,13 +28,13 @@ vi.mock("../../src/deeplake-api.js", () => ({ }, })); -import { autoPullSkills } from "../../src/skilify/auto-pull.js"; -import type { QueryFn } from "../../src/skilify/pull.js"; +import { autoPullSkills } from "../../src/skillify/auto-pull.js"; +import type { QueryFn } from "../../src/skillify/pull.js"; import type { Config } from "../../src/config.js"; // ─── Test harness ────────────────────────────────────────────────────────────── // We pin HOME to a per-test temp dir so any file writes land in the sandbox -// instead of the user's real ~/.deeplake. Same trick the rest of the skilify +// instead of the user's real ~/.deeplake. Same trick the rest of the skillify // suite uses. let tmpHome: string; @@ -138,7 +138,7 @@ describe("autoPullSkills — runs every call", () => { expect(calls).toHaveLength(1); expect(calls[0]).toContain(`FROM "skills"`); expect(calls[0]).not.toMatch(/WHERE/); // users=[] → no author filter - // Pulled skills land at /--/SKILL.md (see pull.ts:330). + // Pulled skills land at /--/SKILL.md (see src/skillify/pull.ts:330). expect(existsSync(join(tmpHome, ".claude/skills/shared-skill--alice/SKILL.md"))).toBe(true); }); diff --git a/claude-code/tests/skilify-bundle-scan.test.ts b/claude-code/tests/skillify-bundle-scan.test.ts similarity index 54% rename from claude-code/tests/skilify-bundle-scan.test.ts rename to claude-code/tests/skillify-bundle-scan.test.ts index 2d832ec9..ff959a38 100644 --- a/claude-code/tests/skilify-bundle-scan.test.ts +++ b/claude-code/tests/skillify-bundle-scan.test.ts @@ -3,7 +3,7 @@ import { existsSync, readFileSync } from "node:fs"; import { join } from "node:path"; /** - * Bundle-level guard: assert the skilify worker is actually shipped in + * Bundle-level guard: assert the skillify worker is actually shipped in * every agent's bundle and that each agent's hook bundle contains the * trigger wiring. Source-level tests prove the modules are correct; * these tests prove `npm run build` didn't drop them. @@ -16,14 +16,14 @@ function bundlePath(agent: string, file: string): string { return join(ROOT, agent, "bundle", file); } -describe("skilify-worker bundle is shipped per agent", () => { +describe("skillify-worker bundle is shipped per agent", () => { for (const agent of AGENTS) { - it(`${agent}/bundle/skilify-worker.js exists and contains the worker entry`, () => { - const path = bundlePath(agent, "skilify-worker.js"); + it(`${agent}/bundle/skillify-worker.js exists and contains the worker entry`, () => { + const path = bundlePath(agent, "skillify-worker.js"); expect(existsSync(path), `${path} missing`).toBe(true); const text = readFileSync(path, "utf-8"); - // Sanity: bundle should have the skilify log channel and the gate prompt. - expect(text).toContain("skilify-worker("); + // Sanity: bundle should have the skillify log channel and the gate prompt. + expect(text).toContain("skillify-worker("); expect(text).toContain("EXISTING PROJECT SKILLS"); // Watermark advance is the SKIP hot path. expect(text).toContain("advancing watermark"); @@ -85,8 +85,50 @@ describe("each agent records the correct agent name", () => { describe("known anti-patterns are absent from bundled worker", () => { it("does not UPDATE the skills table — append-only by design (CLAUDE.md UPDATE-coalescing quirk)", () => { for (const agent of AGENTS) { - const text = readFileSync(bundlePath(agent, "skilify-worker.js"), "utf-8"); - expect(text, `${agent}: skilify-worker.js contains UPDATE on skills table`).not.toMatch(/UPDATE\s+"?skills"?\s+SET/i); + const text = readFileSync(bundlePath(agent, "skillify-worker.js"), "utf-8"); + expect(text, `${agent}: skillify-worker.js contains UPDATE on skills table`).not.toMatch(/UPDATE\s+"?skills"?\s+SET/i); } }); }); + +describe("legacy state-dir migration is shipped in every agent's bundle", () => { + // The migration call wires into the four read/write entry points so a + // post-rename worker / SessionStart sees the migrated state. If any of + // these regressions ship, users with a populated ~/.deeplake/state/skilify/ + // would silently start fresh on ~/.deeplake/state/skillify/. + // + // claude-code/codex/cursor/hermes ship the shared TS module compiled into + // skillify-worker.js + the SessionStart hooks. pi ships skillify-worker.js + // too (no SessionStart hook). openclaw inlines an equivalent helper + // because its self-contained bundle can't import from src/skillify. + const SHARED_AGENTS = [...AGENTS, "pi"] as const; + + for (const agent of SHARED_AGENTS) { + it(`${agent}/bundle/skillify-worker.js: migration helper present and called from readState`, () => { + const text = readFileSync(bundlePath(agent, "skillify-worker.js"), "utf-8"); + expect(text, `${agent}: migrateLegacyStateDir helper missing`).toContain("function migrateLegacyStateDir"); + // readState is the first state file the worker touches; if migration + // isn't called here the worker re-mines already-processed sessions. + expect(text, `${agent}: readState missing migrateLegacyStateDir call`).toMatch( + /function readState\([^)]*\)\s*\{\s*migrateLegacyStateDir\(\)/, + ); + // Narrow-catch behaviour: only EXDEV/EPERM swallowed; everything else rethrows. + expect(text, `${agent}: migration swallows too broadly`).toMatch( + /code === "EXDEV" \|\| code === "EPERM"/, + ); + }); + } + + it("openclaw/dist/index.js: inlined migration present and called before fsMkdir", () => { + const text = readFileSync(join(ROOT, "openclaw", "dist", "index.js"), "utf-8"); + expect(text).toContain("function migrateOpenclawSkillifyLegacyStateDir"); + // Must be called inside tryAcquireOpenclawSkillifyLock before the fsMkdir. + // The order matters: once fsMkdir creates the new dir, the migration + // becomes a no-op and any legacy data is orphaned. + expect(text).toMatch( + /function tryAcquireOpenclawSkillifyLock[\s\S]{0,200}migrateOpenclawSkillifyLegacyStateDir\(\)[\s\S]{0,200}fsMkdir/, + ); + // Same narrow-catch as the shared helper. + expect(text).toMatch(/code === "EXDEV" \|\| code === "EPERM"/); + }); +}); diff --git a/claude-code/tests/skilify-cli.test.ts b/claude-code/tests/skillify-cli.test.ts similarity index 82% rename from claude-code/tests/skilify-cli.test.ts rename to claude-code/tests/skillify-cli.test.ts index 31008a3d..39b1b031 100644 --- a/claude-code/tests/skilify-cli.test.ts +++ b/claude-code/tests/skillify-cli.test.ts @@ -24,7 +24,7 @@ vi.mock("../../src/deeplake-api.js", () => ({ }, })); -import { runSkilifyCommand } from "../../src/commands/skilify.js"; +import { runSkillifyCommand } from "../../src/commands/skillify.js"; import { loadConfig } from "../../src/config.js"; const loadConfigMock = loadConfig as unknown as ReturnType; @@ -35,7 +35,7 @@ const VALID_CONFIG = { orgName: "org", }; -const STATE_DIR = join(homedir(), ".deeplake", "state", "skilify"); +const STATE_DIR = join(homedir(), ".deeplake", "state", "skillify"); const CONFIG_PATH = join(STATE_DIR, "config.json"); let configBackup: string | null = null; let logSpy: ReturnType; @@ -79,7 +79,7 @@ function expectExit(code: number, fn: () => void): void { describe("status (default subcommand)", () => { it("prints scope, team, install when config is empty", () => { - runSkilifyCommand([]); + runSkillifyCommand([]); const out = logged.join("\n"); expect(out).toMatch(/scope:\s+me/); expect(out).toMatch(/team:\s+\(empty\)/); @@ -87,24 +87,24 @@ describe("status (default subcommand)", () => { }); it("`status` subcommand alias", () => { - runSkilifyCommand(["status"]); + runSkillifyCommand(["status"]); expect(logged.join("\n")).toMatch(/scope:/); }); it("does NOT count config.json or pulled.json as tracked projects", () => { - // Both files live in the same STATE_DIR but are skilify's own bookkeeping; + // Both files live in the same STATE_DIR but are skillify's own bookkeeping; // counting them would inflate "N project(s) tracked" and the parse loop // below would JSON.parse the wrong shape and silently swallow the error. - const stateHome = mkdtempSync(join(tmpdir(), "skilify-cli-status-")); + const stateHome = mkdtempSync(join(tmpdir(), "skillify-cli-status-")); const prevHome = process.env.HOME; process.env.HOME = stateHome; try { - const stateDir = join(stateHome, ".deeplake", "state", "skilify"); + const stateDir = join(stateHome, ".deeplake", "state", "skillify"); mkdirSync(stateDir, { recursive: true }); writeFileSync(join(stateDir, "config.json"), JSON.stringify({ scope: "me", team: [], install: "global" })); writeFileSync(join(stateDir, "pulled.json"), JSON.stringify({ version: 1, entries: [] })); logged = []; - runSkilifyCommand([]); + runSkillifyCommand([]); const out = logged.join("\n"); expect(out).toMatch(/state: \(no projects tracked yet\)/); expect(out).not.toMatch(/project\(s\) tracked/); @@ -120,23 +120,23 @@ describe("status (default subcommand)", () => { describe("scope", () => { it("sets scope=team", () => { - runSkilifyCommand(["scope", "team"]); + runSkillifyCommand(["scope", "team"]); expect(logged.join("\n")).toMatch(/Scope set to 'team'/); expect(JSON.parse(readFileSync(CONFIG_PATH, "utf-8")).scope).toBe("team"); }); it("warns when scope=team but team list is empty", () => { - runSkilifyCommand(["scope", "team"]); + runSkillifyCommand(["scope", "team"]); expect(logged.join("\n")).toMatch(/team list is empty/); }); it("rejects invalid scope", () => { - expectExit(1, () => runSkilifyCommand(["scope", "bogus"])); + expectExit(1, () => runSkillifyCommand(["scope", "bogus"])); expect(erred.join("\n")).toMatch(/Invalid scope 'bogus'/); }); it("rejects empty scope arg", () => { - expectExit(1, () => runSkilifyCommand(["scope", ""])); + expectExit(1, () => runSkillifyCommand(["scope", ""])); }); }); @@ -144,18 +144,18 @@ describe("scope", () => { describe("install", () => { it("sets install=global", () => { - runSkilifyCommand(["install", "global"]); + runSkillifyCommand(["install", "global"]); expect(logged.join("\n")).toMatch(/Install location set to 'global'/); expect(JSON.parse(readFileSync(CONFIG_PATH, "utf-8")).install).toBe("global"); }); it("sets install=project", () => { - runSkilifyCommand(["install", "project"]); + runSkillifyCommand(["install", "project"]); expect(JSON.parse(readFileSync(CONFIG_PATH, "utf-8")).install).toBe("project"); }); it("rejects invalid install location", () => { - expectExit(1, () => runSkilifyCommand(["install", "weird"])); + expectExit(1, () => runSkillifyCommand(["install", "weird"])); }); }); @@ -163,45 +163,45 @@ describe("install", () => { describe("team", () => { it("adds, lists, removes a member", () => { - runSkilifyCommand(["team", "add", "alice"]); + runSkillifyCommand(["team", "add", "alice"]); expect(logged.join("\n")).toMatch(/Added 'alice'/); logged.length = 0; - runSkilifyCommand(["team", "list"]); + runSkillifyCommand(["team", "list"]); expect(logged.join("\n")).toMatch(/^alice$/m); logged.length = 0; - runSkilifyCommand(["team", "remove", "alice"]); + runSkillifyCommand(["team", "remove", "alice"]); expect(logged.join("\n")).toMatch(/Removed 'alice'/); }); it("dedupes when adding an existing name", () => { - runSkilifyCommand(["team", "add", "alice"]); + runSkillifyCommand(["team", "add", "alice"]); logged.length = 0; - runSkilifyCommand(["team", "add", "alice"]); + runSkillifyCommand(["team", "add", "alice"]); expect(logged.join("\n")).toMatch(/already in the team list/); }); it("no-ops when removing a non-existent name", () => { - runSkilifyCommand(["team", "remove", "ghost"]); + runSkillifyCommand(["team", "remove", "ghost"]); expect(logged.join("\n")).toMatch(/not in the team list/); }); it("team list when empty prints sentinel", () => { - runSkilifyCommand(["team", "list"]); + runSkillifyCommand(["team", "list"]); expect(logged.join("\n")).toMatch(/empty/); }); it("rejects unknown team action", () => { - expectExit(1, () => runSkilifyCommand(["team", "bogus"])); + expectExit(1, () => runSkillifyCommand(["team", "bogus"])); }); it("rejects team add with no name", () => { - expectExit(1, () => runSkilifyCommand(["team", "add"])); + expectExit(1, () => runSkillifyCommand(["team", "add"])); }); it("rejects team remove with no name", () => { - expectExit(1, () => runSkilifyCommand(["team", "remove"])); + expectExit(1, () => runSkillifyCommand(["team", "remove"])); }); }); @@ -209,13 +209,13 @@ describe("team", () => { describe("promote", () => { it("rejects empty skill name", () => { - expectExit(1, () => runSkilifyCommand(["promote"])); + expectExit(1, () => runSkillifyCommand(["promote"])); }); it("errors when project skill is missing", () => { - const dir = mkdtempSync(join(tmpdir(), "skilify-cli-")); + const dir = mkdtempSync(join(tmpdir(), "skillify-cli-")); process.chdir(dir); - expectExit(1, () => runSkilifyCommand(["promote", "nonexistent-skill"])); + expectExit(1, () => runSkillifyCommand(["promote", "nonexistent-skill"])); expect(erred.join("\n")).toMatch(/not found/); rmSync(dir, { recursive: true, force: true }); }); @@ -225,7 +225,7 @@ describe("promote", () => { describe("pull", () => { it("runs --dry-run and prints summary", async () => { - runSkilifyCommand(["pull", "--dry-run"]); + runSkillifyCommand(["pull", "--dry-run"]); // Async — wait for promise tail await new Promise(r => setImmediate(r)); const out = logged.join("\n"); @@ -235,40 +235,40 @@ describe("pull", () => { }); it("--to global is default destination", async () => { - runSkilifyCommand(["pull", "--dry-run"]); + runSkillifyCommand(["pull", "--dry-run"]); await new Promise(r => setImmediate(r)); expect(logged.join("\n")).toMatch(/Destination:.*\.claude\/skills/); }); it("--to project lands files in cwd/.claude/skills", async () => { - const dir = mkdtempSync(join(tmpdir(), "skilify-cli-pull-")); + const dir = mkdtempSync(join(tmpdir(), "skillify-cli-pull-")); process.chdir(dir); - runSkilifyCommand(["pull", "--to", "project", "--dry-run"]); + runSkillifyCommand(["pull", "--to", "project", "--dry-run"]); await new Promise(r => setImmediate(r)); expect(logged.join("\n")).toMatch(new RegExp(`Destination:\\s+${dir}/.claude/skills`)); rmSync(dir, { recursive: true, force: true }); }); it("--user X filters by single author", async () => { - runSkilifyCommand(["pull", "--user", "alice", "--dry-run"]); + runSkillifyCommand(["pull", "--user", "alice", "--dry-run"]); await new Promise(r => setImmediate(r)); expect(logged.join("\n")).toMatch(/Filter:\s+alice/); }); it("--users a,b,c filters by multiple authors", async () => { - runSkilifyCommand(["pull", "--users", "alice,bob,carol", "--dry-run"]); + runSkillifyCommand(["pull", "--users", "alice,bob,carol", "--dry-run"]); await new Promise(r => setImmediate(r)); expect(logged.join("\n")).toMatch(/Filter:\s+alice, bob, carol/); }); it("--all-users explicitly filters by no author", async () => { - runSkilifyCommand(["pull", "--all-users", "--dry-run"]); + runSkillifyCommand(["pull", "--all-users", "--dry-run"]); await new Promise(r => setImmediate(r)); expect(logged.join("\n")).toMatch(/Filter:\s+all users/); }); it("positional skill-name flows into the filter", async () => { - runSkilifyCommand(["pull", "fake-skill", "--dry-run"]); + runSkillifyCommand(["pull", "fake-skill", "--dry-run"]); await new Promise(r => setImmediate(r)); expect(logged.join("\n")).toMatch(/skill='fake-skill'/); }); @@ -288,7 +288,7 @@ describe("unpull", () => { let unpullHome: string; let originalHome: string | undefined; beforeEach(() => { - unpullHome = mkdtempSync(join(tmpdir(), "skilify-cli-unpull-home-")); + unpullHome = mkdtempSync(join(tmpdir(), "skillify-cli-unpull-home-")); originalHome = process.env.HOME; process.env.HOME = unpullHome; }); @@ -299,7 +299,7 @@ describe("unpull", () => { }); it("--dry-run on empty manifest reports zero work", () => { - runSkilifyCommand(["unpull", "--dry-run"]); + runSkillifyCommand(["unpull", "--dry-run"]); const out = logged.join("\n"); expect(out).toMatch(/Scanning:/); expect(out).toMatch(/Filter:\s+dry-run/); @@ -307,7 +307,7 @@ describe("unpull", () => { }); it("default filter description is 'no filter — all pulled'", () => { - runSkilifyCommand(["unpull"]); + runSkillifyCommand(["unpull"]); expect(logged.join("\n")).toMatch(/Filter:\s+\(no filter — all pulled\)/); }); @@ -315,7 +315,7 @@ describe("unpull", () => { // --all and --legacy-cleanup are mutually exclusive with --user/--users // /--not-mine (see filter+all conflict guard), so the manifest-only // path is the right surface to assert flag composition on. - runSkilifyCommand(["unpull", "--user", "alice", "--not-mine", "--dry-run"]); + runSkillifyCommand(["unpull", "--user", "alice", "--not-mine", "--dry-run"]); const out = logged.join("\n"); expect(out).toMatch(/users=alice/); expect(out).toMatch(/not-mine/); @@ -323,7 +323,7 @@ describe("unpull", () => { }); it("composes disk-walk flags into the filter description", () => { - runSkilifyCommand(["unpull", "--all", "--legacy-cleanup", "--dry-run"]); + runSkillifyCommand(["unpull", "--all", "--legacy-cleanup", "--dry-run"]); const out = logged.join("\n"); expect(out).toMatch(/all/); expect(out).toMatch(/legacy-cleanup/); @@ -331,14 +331,14 @@ describe("unpull", () => { }); it("--users a,b,c parses CSV into the filter", () => { - runSkilifyCommand(["unpull", "--users", "alice,bob,carol", "--dry-run"]); + runSkillifyCommand(["unpull", "--users", "alice,bob,carol", "--dry-run"]); expect(logged.join("\n")).toMatch(/users=alice,bob,carol/); }); it("--to project scopes the scanning root to cwd", () => { - const dir = mkdtempSync(join(tmpdir(), "skilify-cli-unpull-proj-")); + const dir = mkdtempSync(join(tmpdir(), "skillify-cli-unpull-proj-")); process.chdir(dir); - runSkilifyCommand(["unpull", "--to", "project", "--dry-run"]); + runSkillifyCommand(["unpull", "--to", "project", "--dry-run"]); expect(logged.join("\n")).toMatch(new RegExp(`Scanning:\\s+${dir}/.claude/skills`)); rmSync(dir, { recursive: true, force: true }); }); @@ -346,40 +346,40 @@ describe("unpull", () => { it("--to with invalid value reports error", async () => { // unpullSkills throws on bad input; the dispatcher's `.catch` logs // the message via console.error and exits 1. - runSkilifyCommand(["unpull", "--to", "weird"]); + runSkillifyCommand(["unpull", "--to", "weird"]); await new Promise(r => setImmediate(r)); expect(erred.join("\n")).toMatch(/Invalid --to/); }); it("integrates with pull: round-trip clears manifest + disk", async () => { // 1. pull populates manifest + disk - runSkilifyCommand(["pull", "--user", "alice", "--to", "global"]); + runSkillifyCommand(["pull", "--user", "alice", "--to", "global"]); await new Promise(r => setImmediate(r)); const out1 = logged.join("\n"); expect(out1).toMatch(/1 written/); logged = []; // 2. unpull clears it - runSkilifyCommand(["unpull", "--user", "alice"]); + runSkillifyCommand(["unpull", "--user", "alice"]); const out2 = logged.join("\n"); expect(out2).toMatch(/1 removed/); expect(out2).toMatch(/fake-skill--alice/); // 3. re-running unpull is idempotent (no entries, no errors) logged = []; - runSkilifyCommand(["unpull"]); + runSkillifyCommand(["unpull"]); expect(logged.join("\n")).toMatch(/Scanned 0 dir\(s\)/); }); it("emits 'manifest-pruned' tag when an entry's directory is missing on disk", async () => { // pull installs a skill, then we delete its dir out-of-band so the // manifest entry becomes an orphan - runSkilifyCommand(["pull", "--user", "alice", "--to", "global"]); + runSkillifyCommand(["pull", "--user", "alice", "--to", "global"]); await new Promise(r => setImmediate(r)); rmSync(join(unpullHome, ".claude", "skills"), { recursive: true, force: true }); logged = []; - runSkilifyCommand(["unpull"]); + runSkillifyCommand(["unpull"]); const out = logged.join("\n"); expect(out).toMatch(/pruned \(orphan\)/); expect(out).toMatch(/manifest-pruned/); @@ -392,7 +392,7 @@ describe("unpull", () => { it("default unpull works when not logged in (no Deeplake call required)", async () => { loadConfigMock.mockReturnValue(null); - runSkilifyCommand(["unpull", "--dry-run"]); + runSkillifyCommand(["unpull", "--dry-run"]); await new Promise(r => setImmediate(r)); expect(erred.join("\n")).not.toMatch(/login/i); expect(logged.join("\n")).toMatch(/Result: 0 removed/); @@ -400,7 +400,7 @@ describe("unpull", () => { it("--user X works when not logged in (filter is local, not a server query)", async () => { loadConfigMock.mockReturnValue(null); - runSkilifyCommand(["unpull", "--user", "alice", "--dry-run"]); + runSkillifyCommand(["unpull", "--user", "alice", "--dry-run"]); await new Promise(r => setImmediate(r)); expect(erred.join("\n")).not.toMatch(/login/i); expect(logged.join("\n")).toMatch(/users=alice/); @@ -408,7 +408,7 @@ describe("unpull", () => { it("--not-mine still requires login (needs myUsername to exclude self)", async () => { loadConfigMock.mockReturnValue(null); - runSkilifyCommand(["unpull", "--not-mine", "--dry-run"]); + runSkillifyCommand(["unpull", "--not-mine", "--dry-run"]); await new Promise(r => setImmediate(r)); expect(erred.join("\n")).toMatch(/--not-mine requires a logged-in user/); }); @@ -416,13 +416,13 @@ describe("unpull", () => { // ── filter+all conflict surfacing ───────────────────────────────────────── it("--all combined with --user surfaces a clear error message", async () => { - runSkilifyCommand(["unpull", "--all", "--user", "alice"]); + runSkillifyCommand(["unpull", "--all", "--user", "alice"]); await new Promise(r => setImmediate(r)); expect(erred.join("\n")).toMatch(/--all.*--user/); }); it("--legacy-cleanup combined with --not-mine surfaces a clear error message", async () => { - runSkilifyCommand(["unpull", "--legacy-cleanup", "--not-mine"]); + runSkillifyCommand(["unpull", "--legacy-cleanup", "--not-mine"]); await new Promise(r => setImmediate(r)); expect(erred.join("\n")).toMatch(/--legacy-cleanup.*--not-mine/); }); @@ -432,17 +432,17 @@ describe("unpull", () => { describe("usage", () => { it("--help prints usage", () => { - runSkilifyCommand(["--help"]); + runSkillifyCommand(["--help"]); expect(logged.join("\n")).toMatch(/Usage:/); }); it("-h prints usage", () => { - runSkilifyCommand(["-h"]); + runSkillifyCommand(["-h"]); expect(logged.join("\n")).toMatch(/Usage:/); }); it("unknown subcommand exits 1", () => { - expectExit(1, () => runSkilifyCommand(["totally-unknown"])); - expect(erred.join("\n")).toMatch(/Unknown skilify subcommand/); + expectExit(1, () => runSkillifyCommand(["totally-unknown"])); + expect(erred.join("\n")).toMatch(/Unknown skillify subcommand/); }); }); diff --git a/claude-code/tests/skilify-extractor.test.ts b/claude-code/tests/skillify-extractor.test.ts similarity index 97% rename from claude-code/tests/skilify-extractor.test.ts rename to claude-code/tests/skillify-extractor.test.ts index aadde8e2..0a88c679 100644 --- a/claude-code/tests/skilify-extractor.test.ts +++ b/claude-code/tests/skillify-extractor.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect } from "vitest"; -import { extractPairs, type SessionRow } from "../../src/skilify/extractors/index.js"; +import { extractPairs, type SessionRow } from "../../src/skillify/extractors/index.js"; describe("extractPairs", () => { it("pairs a single user prompt with the immediately following assistant message", () => { diff --git a/claude-code/tests/skilify-gate-parser.test.ts b/claude-code/tests/skillify-gate-parser.test.ts similarity index 96% rename from claude-code/tests/skilify-gate-parser.test.ts rename to claude-code/tests/skillify-gate-parser.test.ts index 66bc4c24..2d240b24 100644 --- a/claude-code/tests/skilify-gate-parser.test.ts +++ b/claude-code/tests/skillify-gate-parser.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "vitest"; -import { extractJsonBlock, parseVerdict } from "../../src/skilify/gate-parser.js"; +import { extractJsonBlock, parseVerdict } from "../../src/skillify/gate-parser.js"; describe("extractJsonBlock", () => { it("returns null for empty / whitespace input", () => { diff --git a/claude-code/tests/skilify-gate-runner.test.ts b/claude-code/tests/skillify-gate-runner.test.ts similarity index 99% rename from claude-code/tests/skilify-gate-runner.test.ts rename to claude-code/tests/skillify-gate-runner.test.ts index 6fae8d48..ce342b0b 100644 --- a/claude-code/tests/skilify-gate-runner.test.ts +++ b/claude-code/tests/skillify-gate-runner.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "vitest"; -import { runGate, findAgentBin, type Agent } from "../../src/skilify/gate-runner.js"; +import { runGate, findAgentBin, type Agent } from "../../src/skillify/gate-runner.js"; describe("findAgentBin", () => { it("returns a path for each known agent (PATH lookup or fallback)", () => { diff --git a/claude-code/tests/skillify-legacy-migration.test.ts b/claude-code/tests/skillify-legacy-migration.test.ts new file mode 100644 index 00000000..22df0a16 --- /dev/null +++ b/claude-code/tests/skillify-legacy-migration.test.ts @@ -0,0 +1,193 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import { existsSync, mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync, chmodSync } from "node:fs"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; + +/** + * Tests for src/skillify/legacy-migration.ts. + * + * Each `it` runs in a fresh `process.env.HOME = mkdtempSync(...)` so the + * helper's `homedir()`-based path resolution targets a sandbox dir, never + * the real user state. The module-level `attempted` flag is reset by + * re-importing via `vi.resetModules()` between tests — otherwise the second + * call short-circuits and we'd be testing the cache rather than the logic. + */ + +let sandboxHome: string; +let prevHome: string | undefined; +// Windows `os.homedir()` resolves from USERPROFILE / HOMEDRIVE+HOMEPATH, not +// HOME. CI is ubuntu-only today but sandboxing all three keeps the test from +// touching real user state if anyone runs it on Windows locally. +let prevUserProfile: string | undefined; +let prevHomeDrive: string | undefined; +let prevHomePath: string | undefined; + +const legacyOf = (h: string) => join(h, ".deeplake", "state", "skilify"); +const currentOf = (h: string) => join(h, ".deeplake", "state", "skillify"); + +async function freshMigrate() { + vi.resetModules(); + const mod = await import("../../src/skillify/legacy-migration.js"); + return mod.migrateLegacyStateDir; +} + +beforeEach(() => { + sandboxHome = mkdtempSync(join(tmpdir(), "skillify-migration-")); + prevHome = process.env.HOME; + prevUserProfile = process.env.USERPROFILE; + prevHomeDrive = process.env.HOMEDRIVE; + prevHomePath = process.env.HOMEPATH; + process.env.HOME = sandboxHome; + process.env.USERPROFILE = sandboxHome; + delete process.env.HOMEDRIVE; + delete process.env.HOMEPATH; +}); + +afterEach(() => { + if (prevHome === undefined) delete process.env.HOME; + else process.env.HOME = prevHome; + if (prevUserProfile === undefined) delete process.env.USERPROFILE; + else process.env.USERPROFILE = prevUserProfile; + if (prevHomeDrive === undefined) delete process.env.HOMEDRIVE; + else process.env.HOMEDRIVE = prevHomeDrive; + if (prevHomePath === undefined) delete process.env.HOMEPATH; + else process.env.HOMEPATH = prevHomePath; + // chmodSync the sandbox readable in case a test removed perms; otherwise + // rmSync hits EACCES on cleanup and leaks the temp dir across runs. + try { chmodSync(sandboxHome, 0o755); } catch { /* nothing */ } + rmSync(sandboxHome, { recursive: true, force: true }); +}); + +describe("migrateLegacyStateDir", () => { + it("no-op when legacy dir does not exist", async () => { + const migrate = await freshMigrate(); + migrate(); + expect(existsSync(legacyOf(sandboxHome))).toBe(false); + expect(existsSync(currentOf(sandboxHome))).toBe(false); + }); + + it("no-op when current dir already exists (legacy preserved untouched)", async () => { + const legacy = legacyOf(sandboxHome); + const current = currentOf(sandboxHome); + mkdirSync(legacy, { recursive: true }); + writeFileSync(join(legacy, "config.json"), '{"scope":"team"}'); + mkdirSync(current, { recursive: true }); + writeFileSync(join(current, "config.json"), '{"scope":"me"}'); + + const migrate = await freshMigrate(); + migrate(); + + // Both still exist; current's contents NOT clobbered with legacy's. + expect(existsSync(legacy)).toBe(true); + expect(existsSync(current)).toBe(true); + expect(readFileSync(join(current, "config.json"), "utf-8")).toBe('{"scope":"me"}'); + expect(readFileSync(join(legacy, "config.json"), "utf-8")).toBe('{"scope":"team"}'); + }); + + it("renames legacy → current when only legacy exists, preserving contents", async () => { + const legacy = legacyOf(sandboxHome); + const current = currentOf(sandboxHome); + mkdirSync(legacy, { recursive: true }); + writeFileSync(join(legacy, "config.json"), '{"scope":"team","team":["alice"],"install":"global"}'); + writeFileSync(join(legacy, "pulled.json"), '{"version":1,"entries":[]}'); + writeFileSync(join(legacy, "abc123.json"), '{"counter":3}'); + + const migrate = await freshMigrate(); + migrate(); + + expect(existsSync(legacy)).toBe(false); + expect(existsSync(current)).toBe(true); + expect(readFileSync(join(current, "config.json"), "utf-8")) + .toBe('{"scope":"team","team":["alice"],"install":"global"}'); + expect(readFileSync(join(current, "pulled.json"), "utf-8")) + .toBe('{"version":1,"entries":[]}'); + expect(readFileSync(join(current, "abc123.json"), "utf-8")).toBe('{"counter":3}'); + }); + + it("idempotent: second call is a no-op even if legacy reappears", async () => { + const legacy = legacyOf(sandboxHome); + const current = currentOf(sandboxHome); + mkdirSync(legacy, { recursive: true }); + writeFileSync(join(legacy, "config.json"), '{"scope":"team"}'); + + const migrate = await freshMigrate(); + migrate(); + expect(existsSync(current)).toBe(true); + expect(existsSync(legacy)).toBe(false); + + // Recreate legacy with conflicting content; second call must NOT touch it. + mkdirSync(legacy, { recursive: true }); + writeFileSync(join(legacy, "config.json"), '{"scope":"org"}'); + rmSync(current, { recursive: true, force: true }); + + migrate(); + + // attempted=true → migrate did not run; current was NOT recreated from + // the new legacy. Confirms the `attempted` short-circuit holds. + expect(existsSync(current)).toBe(false); + expect(existsSync(legacy)).toBe(true); + expect(readFileSync(join(legacy, "config.json"), "utf-8")).toBe('{"scope":"org"}'); + }); + + it.each([ + ["EXDEV", "cross-device link not permitted"], + ["EPERM", "operation not permitted"], + ])("swallows %s renameSync failure and leaves legacy in place", async (code, message) => { + // We can't realistically force a true EXDEV inside a single tmpfs in + // CI, so we mock fs at the module level. Re-import after the mock so + // the helper picks up the stubbed renameSync. + const legacy = legacyOf(sandboxHome); + mkdirSync(legacy, { recursive: true }); + writeFileSync(join(legacy, "config.json"), '{"scope":"me"}'); + + vi.resetModules(); + vi.doMock("node:fs", async () => { + const real = await vi.importActual("node:fs"); + return { + ...real, + renameSync: () => { + const err = new Error(`${code}: ${message}`) as NodeJS.ErrnoException; + err.code = code; + throw err; + }, + }; + }); + const { migrateLegacyStateDir } = await import("../../src/skillify/legacy-migration.js"); + + expect(() => migrateLegacyStateDir()).not.toThrow(); + expect(existsSync(legacy)).toBe(true); + expect(readFileSync(join(legacy, "config.json"), "utf-8")).toBe('{"scope":"me"}'); + + vi.doUnmock("node:fs"); + }); + + it("re-throws unexpected renameSync failures (EIO, ENOSPC, etc.)", async () => { + // EIO/ENOSPC/anything else is NOT in the documented fallback set. + // Swallowing it would leave the user on a fresh skillify state dir + // with their legacy state silently orphaned. The helper must propagate + // so the caller (or the user) sees the real I/O failure. + const legacy = legacyOf(sandboxHome); + mkdirSync(legacy, { recursive: true }); + writeFileSync(join(legacy, "config.json"), '{"scope":"me"}'); + + vi.resetModules(); + vi.doMock("node:fs", async () => { + const real = await vi.importActual("node:fs"); + return { + ...real, + renameSync: () => { + const err = new Error("EIO: i/o error") as NodeJS.ErrnoException; + err.code = "EIO"; + throw err; + }, + }; + }); + const { migrateLegacyStateDir } = await import("../../src/skillify/legacy-migration.js"); + + expect(() => migrateLegacyStateDir()).toThrow(/EIO/); + // Legacy still in place — the caller can decide what to do. + expect(existsSync(legacy)).toBe(true); + + vi.doUnmock("node:fs"); + }); +}); diff --git a/claude-code/tests/skilify-manifest.test.ts b/claude-code/tests/skillify-manifest.test.ts similarity index 98% rename from claude-code/tests/skilify-manifest.test.ts rename to claude-code/tests/skillify-manifest.test.ts index 0f744295..96abd89c 100644 --- a/claude-code/tests/skilify-manifest.test.ts +++ b/claude-code/tests/skillify-manifest.test.ts @@ -15,13 +15,13 @@ import { pruneOrphanedEntries, unlinkSymlinks, type PulledEntry, -} from "../../src/skilify/manifest.js"; +} from "../../src/skillify/manifest.js"; let fakeHome: string; let originalHome: string | undefined; beforeEach(() => { - fakeHome = mkdtempSync(join(tmpdir(), "skilify-manifest-")); + fakeHome = mkdtempSync(join(tmpdir(), "skillify-manifest-")); originalHome = process.env.HOME; process.env.HOME = fakeHome; }); @@ -46,8 +46,8 @@ const sampleEntry = (over: Partial = {}): PulledEntry => ({ }); describe("manifestPath", () => { - it("resolves to ~/.deeplake/state/skilify/pulled.json under HOME", () => { - expect(manifestPath()).toBe(join(fakeHome, ".deeplake", "state", "skilify", "pulled.json")); + it("resolves to ~/.deeplake/state/skillify/pulled.json under HOME", () => { + expect(manifestPath()).toBe(join(fakeHome, ".deeplake", "state", "skillify", "pulled.json")); }); }); @@ -134,7 +134,7 @@ describe("saveManifest", () => { it("creates parent directories on first write", () => { expect(existsSync(join(fakeHome, ".deeplake"))).toBe(false); saveManifest({ version: 1, entries: [] }); - expect(existsSync(join(fakeHome, ".deeplake", "state", "skilify"))).toBe(true); + expect(existsSync(join(fakeHome, ".deeplake", "state", "skillify"))).toBe(true); }); }); diff --git a/claude-code/tests/skilify-pull.test.ts b/claude-code/tests/skillify-pull.test.ts similarity index 99% rename from claude-code/tests/skilify-pull.test.ts rename to claude-code/tests/skillify-pull.test.ts index 90520895..d7f5ff91 100644 --- a/claude-code/tests/skilify-pull.test.ts +++ b/claude-code/tests/skillify-pull.test.ts @@ -14,9 +14,9 @@ import { isMissingTableError, assertValidAuthor, type QueryFn, -} from "../../src/skilify/pull.js"; +} from "../../src/skillify/pull.js"; import { lstatSync, readlinkSync, symlinkSync } from "node:fs"; -import { loadManifest } from "../../src/skilify/manifest.js"; +import { loadManifest } from "../../src/skillify/manifest.js"; let projectRoot: string; let projectSkillsRoot: string; @@ -24,11 +24,11 @@ let fakeHome: string; let originalHome: string | undefined; beforeEach(() => { - projectRoot = mkdtempSync(join(tmpdir(), "skilify-pull-")); + projectRoot = mkdtempSync(join(tmpdir(), "skillify-pull-")); projectSkillsRoot = join(projectRoot, ".claude", "skills"); // Isolate HOME so the manifest written by recordPull lands in a temp // directory instead of polluting the developer's real ~/.deeplake state. - fakeHome = mkdtempSync(join(tmpdir(), "skilify-pull-home-")); + fakeHome = mkdtempSync(join(tmpdir(), "skillify-pull-home-")); originalHome = process.env.HOME; process.env.HOME = fakeHome; }); diff --git a/claude-code/tests/skilify-scope-config.test.ts b/claude-code/tests/skillify-scope-config.test.ts similarity index 97% rename from claude-code/tests/skilify-scope-config.test.ts rename to claude-code/tests/skillify-scope-config.test.ts index 0cd30010..71b5e77a 100644 --- a/claude-code/tests/skilify-scope-config.test.ts +++ b/claude-code/tests/skillify-scope-config.test.ts @@ -2,9 +2,9 @@ import { afterEach, beforeEach, describe, expect, it } from "vitest"; import { existsSync, mkdirSync, readFileSync, rmSync, writeFileSync } from "node:fs"; import { homedir } from "node:os"; import { join } from "node:path"; -import { loadScopeConfig, saveScopeConfig } from "../../src/skilify/scope-config.js"; +import { loadScopeConfig, saveScopeConfig } from "../../src/skillify/scope-config.js"; -const STATE_DIR = join(homedir(), ".deeplake", "state", "skilify"); +const STATE_DIR = join(homedir(), ".deeplake", "state", "skillify"); const CONFIG_PATH = join(STATE_DIR, "config.json"); let backup: string | null = null; diff --git a/claude-code/tests/skilify-session-start-injection.test.ts b/claude-code/tests/skillify-session-start-injection.test.ts similarity index 70% rename from claude-code/tests/skilify-session-start-injection.test.ts rename to claude-code/tests/skillify-session-start-injection.test.ts index 671bbbf6..a5c8eba6 100644 --- a/claude-code/tests/skilify-session-start-injection.test.ts +++ b/claude-code/tests/skillify-session-start-injection.test.ts @@ -4,10 +4,10 @@ import { resolve } from "node:path"; /** * Bundle-level guard: every agent's session-start.js bundle must inject the - * SKILLS (skilify) section into the agent's developer context. Skilify + * SKILLS (skillify) section into the agent's developer context. Skillify * commands are part of the same hivemind family as the auth-login subcommands * — without this injection, agents have no way to discover that - * `hivemind skilify pull --user X`, `--to global`, `--dry-run`, etc. exist. + * `hivemind skillify pull --user X`, `--to global`, `--dry-run`, etc. exist. * * Each session-start.ts source file embeds the SKILLS section as a literal * string and resolves the HIVEMIND_CLI placeholder to the absolute path of @@ -29,13 +29,13 @@ const SESSION_START_BUNDLES: Array<[string, string]> = [ // - OpenClaw exposes its surface via openclaw/skills/SKILL.md (loaded by // the openclaw runtime's skill index, not bundled JS) // Both are still part of the discoverability matrix and must advertise the -// skilify family alongside the four hook-driven agents. +// skillify family alongside the four hook-driven agents. const NON_BUNDLE_SURFACES: Array<[string, string]> = [ ["pi-extension-source", resolve(BUNDLE_ROOT, "pi", "extension-source", "hivemind.ts")], ["openclaw-skill", resolve(BUNDLE_ROOT, "openclaw", "skills", "SKILL.md")], ]; -describe("skilify SessionStart injection (per-agent bundles)", () => { +describe("skillify SessionStart injection (per-agent bundles)", () => { it.each(SESSION_START_BUNDLES)("%s bundle exists", (_label, p) => { expect(existsSync(p)).toBe(true); }); @@ -45,25 +45,25 @@ describe("skilify SessionStart injection (per-agent bundles)", () => { (_label, p) => { const text = readFileSync(p, "utf-8"); // Claude Code uses the long header "Skill management"; the others use - // the short "SKILLS (skilify)" header. Either is acceptable. + // the short "SKILLS (skillify)" header. Either is acceptable. const hasHeader = - text.includes("Skill management") || text.includes("SKILLS (skilify)"); + text.includes("Skill management") || text.includes("SKILLS (skillify)"); expect(hasHeader).toBe(true); } ); it.each(SESSION_START_BUNDLES)( - "%s bundle advertises the high-value skilify pull invocations", + "%s bundle advertises the high-value skillify pull invocations", (_label, p) => { const text = readFileSync(p, "utf-8"); // The exact subcommands every agent must surface to be useful. - expect(text).toMatch(/skilify pull/); - expect(text).toMatch(/skilify pull --user/); - expect(text).toMatch(/skilify pull --users/); - expect(text).toMatch(/skilify pull --all-users/); - expect(text).toMatch(/skilify pull --dry-run/); - expect(text).toMatch(/skilify scope/); - expect(text).toMatch(/skilify team/); + expect(text).toMatch(/skillify pull/); + expect(text).toMatch(/skillify pull --user/); + expect(text).toMatch(/skillify pull --users/); + expect(text).toMatch(/skillify pull --all-users/); + expect(text).toMatch(/skillify pull --dry-run/); + expect(text).toMatch(/skillify scope/); + expect(text).toMatch(/skillify team/); } ); @@ -71,7 +71,7 @@ describe("skilify SessionStart injection (per-agent bundles)", () => { "%s bundle uses bare `hivemind ` form (no HIVEMIND_CLI placeholder leak)", (_label, p) => { const text = readFileSync(p, "utf-8"); - // After the npm-bin unification: the inject text uses bare `hivemind skilify`, + // After the npm-bin unification: the inject text uses bare `hivemind skillify`, // `hivemind login`, etc. There must be NO HIVEMIND_CLI const, NO placeholder // substitution, and NO literal "HIVEMIND_CLI" string in the inject anywhere. expect(text).not.toMatch(/HIVEMIND_CLI/); @@ -80,7 +80,7 @@ describe("skilify SessionStart injection (per-agent bundles)", () => { // unification. expect(text).not.toMatch(/replace\(\s*\/HIVEMIND_CLI/); // Inject must contain the bare hivemind invocations the agent should suggest. - expect(text).toMatch(/hivemind skilify\b/); + expect(text).toMatch(/hivemind skillify\b/); } ); @@ -101,19 +101,19 @@ describe("skilify SessionStart injection (per-agent bundles)", () => { ); }); -describe("skilify discoverability on non-bundle agent surfaces (Pi + OpenClaw)", () => { +describe("skillify discoverability on non-bundle agent surfaces (Pi + OpenClaw)", () => { it.each(NON_BUNDLE_SURFACES)("%s file exists", (_label, p) => { expect(existsSync(p)).toBe(true); }); it.each(NON_BUNDLE_SURFACES)( - "%s advertises the skilify family (SKILLS / Skill Management section)", + "%s advertises the skillify family (SKILLS / Skill Management section)", (_label, p) => { const text = readFileSync(p, "utf-8"); - // Pi uses "SKILLS (skilify)" inline in its CONTEXT_PREAMBLE; OpenClaw's + // Pi uses "SKILLS (skillify)" inline in its CONTEXT_PREAMBLE; OpenClaw's // SKILL.md uses a markdown "## Skill Management" header. Either is fine. const hasHeader = - text.includes("SKILLS (skilify)") || + text.includes("SKILLS (skillify)") || text.includes("Skill Management") || text.includes("Skill management"); expect(hasHeader).toBe(true); @@ -121,16 +121,16 @@ describe("skilify discoverability on non-bundle agent surfaces (Pi + OpenClaw)", ); it.each(NON_BUNDLE_SURFACES)( - "%s lists the high-value skilify pull invocations", + "%s lists the high-value skillify pull invocations", (_label, p) => { const text = readFileSync(p, "utf-8"); - expect(text).toMatch(/skilify pull/); - expect(text).toMatch(/skilify pull --user/); - expect(text).toMatch(/skilify pull --users/); - expect(text).toMatch(/skilify pull --all-users/); - expect(text).toMatch(/skilify pull --dry-run/); - expect(text).toMatch(/skilify scope/); - expect(text).toMatch(/skilify team/); + expect(text).toMatch(/skillify pull/); + expect(text).toMatch(/skillify pull --user/); + expect(text).toMatch(/skillify pull --users/); + expect(text).toMatch(/skillify pull --all-users/); + expect(text).toMatch(/skillify pull --dry-run/); + expect(text).toMatch(/skillify scope/); + expect(text).toMatch(/skillify team/); } ); @@ -147,78 +147,78 @@ describe("skilify discoverability on non-bundle agent surfaces (Pi + OpenClaw)", }); }); -describe("Pi skilify worker (mining) wiring", () => { +describe("Pi skillify worker (mining) wiring", () => { // Pi mines via a separate bundled worker spawned from session_shutdown, // installed alongside wiki-worker.js by `hivemind pi install`. These // assertions catch any regression that drops the bundle entry, removes // the install copy, or unwires the spawn call. - it("ships pi/bundle/skilify-worker.js after build", () => { - const p = resolve(BUNDLE_ROOT, "pi", "bundle", "skilify-worker.js"); + it("ships pi/bundle/skillify-worker.js after build", () => { + const p = resolve(BUNDLE_ROOT, "pi", "bundle", "skillify-worker.js"); expect(existsSync(p)).toBe(true); }); - it("esbuild config registers the pi skilify-worker entry", () => { + it("esbuild config registers the pi skillify-worker entry", () => { const cfg = readFileSync(resolve(BUNDLE_ROOT, "esbuild.config.mjs"), "utf-8"); - // Inside the piWorker array we must list the skilify-worker entry. - expect(cfg).toMatch(/dist\/src\/skilify\/skilify-worker\.js[^"]*"\s*,\s*out:\s*"skilify-worker"/); + // Inside the piWorker array we must list the skillify-worker entry. + expect(cfg).toMatch(/dist\/src\/skillify\/skillify-worker\.js[^"]*"\s*,\s*out:\s*"skillify-worker"/); }); - it("install-pi.ts copies pi/bundle/skilify-worker.js to ~/.pi/agent/hivemind/", () => { + it("install-pi.ts copies pi/bundle/skillify-worker.js to ~/.pi/agent/hivemind/", () => { const src = readFileSync(resolve(BUNDLE_ROOT, "src", "cli", "install-pi.ts"), "utf-8"); - expect(src).toMatch(/SKILIFY_WORKER_PATH\s*=/); - // join(pkgRoot(), "pi", "bundle", "skilify-worker.js") — the source path - expect(src).toMatch(/"pi",\s*"bundle",\s*"skilify-worker\.js"/); - // copyFileSync(srcSkilifyWorker, SKILIFY_WORKER_PATH) — the install step - expect(src).toMatch(/copyFileSync\(srcSkilifyWorker,\s*SKILIFY_WORKER_PATH\)/); + expect(src).toMatch(/SKILLIFY_WORKER_PATH\s*=/); + // join(pkgRoot(), "pi", "bundle", "skillify-worker.js") — the source path + expect(src).toMatch(/"pi",\s*"bundle",\s*"skillify-worker\.js"/); + // copyFileSync(srcSkillifyWorker, SKILLIFY_WORKER_PATH) — the install step + expect(src).toMatch(/copyFileSync\(srcSkillifyWorker,\s*SKILLIFY_WORKER_PATH\)/); }); - it("pi extension defines spawnPiSkilifyWorker and wires it into session_shutdown", () => { + it("pi extension defines spawnPiSkillifyWorker and wires it into session_shutdown", () => { const ext = readFileSync(resolve(BUNDLE_ROOT, "pi", "extension-source", "hivemind.ts"), "utf-8"); // Function exists - expect(ext).toMatch(/function spawnPiSkilifyWorker\b/); + expect(ext).toMatch(/function spawnPiSkillifyWorker\b/); // Path const points at the right install location - expect(ext).toMatch(/PI_SKILIFY_WORKER_PATH\s*=\s*join\(homedir\(\),\s*"\.pi",\s*"agent",\s*"hivemind",\s*"skilify-worker\.js"\)/); - // Spawned with HIVEMIND_SKILIFY_WORKER=1 + HIVEMIND_CAPTURE=false (recursion guard + no echo) - expect(ext).toMatch(/HIVEMIND_SKILIFY_WORKER:\s*"1"/); + expect(ext).toMatch(/PI_SKILLIFY_WORKER_PATH\s*=\s*join\(homedir\(\),\s*"\.pi",\s*"agent",\s*"hivemind",\s*"skillify-worker\.js"\)/); + // Spawned with HIVEMIND_SKILLIFY_WORKER=1 + HIVEMIND_CAPTURE=false (recursion guard + no echo) + expect(ext).toMatch(/HIVEMIND_SKILLIFY_WORKER:\s*"1"/); // session_shutdown handler invokes it after spawnWikiWorker - expect(ext).toMatch(/session_shutdown[\s\S]{0,2000}spawnWikiWorker[\s\S]{0,500}spawnPiSkilifyWorker/); + expect(ext).toMatch(/session_shutdown[\s\S]{0,2000}spawnWikiWorker[\s\S]{0,500}spawnPiSkillifyWorker/); }); - it("pi skilify worker bundle embeds the same worker code as the other agents", () => { + it("pi skillify worker bundle embeds the same worker code as the other agents", () => { // Same shared module — guard against an accidental empty bundle by // checking the canonical entry-point + module markers are present. - const text = readFileSync(resolve(BUNDLE_ROOT, "pi", "bundle", "skilify-worker.js"), "utf-8"); + const text = readFileSync(resolve(BUNDLE_ROOT, "pi", "bundle", "skillify-worker.js"), "utf-8"); // The worker reads its config from process.argv[2] expect(text).toMatch(/process\.argv\[2\]/); // The worker writes to the skills table via INSERT (append-only design) expect(text).toMatch(/INSERT INTO/); - // The worker pulls in the skilify gate-runner module (per-agent CLI dispatch) + // The worker pulls in the skillify gate-runner module (per-agent CLI dispatch) expect(text).toMatch(/gate-runner|runGate/); // Worker-specific helper that doesn't appear in unrelated bundles - expect(text).toMatch(/skilifyLog/); + expect(text).toMatch(/skillifyLog/); }); }); -describe("OpenClaw skilify worker (mining) wiring", () => { +describe("OpenClaw skillify worker (mining) wiring", () => { // OpenClaw mines via a separate bundled worker spawned from the agent_end // hook. The worker bundle is built as a second openclaw esbuild entry - // landing at openclaw/dist/skilify-worker.js (sibling of index.js). + // landing at openclaw/dist/skillify-worker.js (sibling of index.js). // install-openclaw.ts already copies the entire dist/ recursively, so // no install step change is required. - it("ships openclaw/dist/skilify-worker.js after build", () => { - const p = resolve(BUNDLE_ROOT, "openclaw", "dist", "skilify-worker.js"); + it("ships openclaw/dist/skillify-worker.js after build", () => { + const p = resolve(BUNDLE_ROOT, "openclaw", "dist", "skillify-worker.js"); expect(existsSync(p)).toBe(true); }); - it("esbuild config registers the openclaw skilify-worker entry", () => { + it("esbuild config registers the openclaw skillify-worker entry", () => { const cfg = readFileSync(resolve(BUNDLE_ROOT, "esbuild.config.mjs"), "utf-8"); - // The openclaw skilify-worker is a SEPARATE build call (so the main + // The openclaw skillify-worker is a SEPARATE build call (so the main // openclaw bundle's child_process stub doesn't apply, and so the chunk // graph stays isolated from the gateway's split chunks). - expect(cfg).toMatch(/"skilify-worker":\s*"dist\/src\/skilify\/skilify-worker\.js"/); - expect(cfg).toMatch(/outdir:\s*"openclaw\/dist"[\s\S]{0,200}skilify-worker/); + expect(cfg).toMatch(/"skillify-worker":\s*"dist\/src\/skillify\/skillify-worker\.js"/); + expect(cfg).toMatch(/outdir:\s*"openclaw\/dist"[\s\S]{0,200}skillify-worker/); }); it("openclaw/src/index.ts bypasses the child_process stub via createRequire", () => { @@ -230,15 +230,15 @@ describe("OpenClaw skilify worker (mining) wiring", () => { expect(src).toMatch(/requireFromOpenclaw\("node:child_process"\)/); }); - it("openclaw/src/index.ts defines spawnOpenclawSkilifyWorker and wires it into agent_end", () => { + it("openclaw/src/index.ts defines spawnOpenclawSkillifyWorker and wires it into agent_end", () => { const src = readFileSync(resolve(BUNDLE_ROOT, "openclaw", "src", "index.ts"), "utf-8"); - expect(src).toMatch(/function spawnOpenclawSkilifyWorker\b/); - // OPENCLAW_SKILIFY_WORKER_PATH must be a sibling of import.meta.url - expect(src).toMatch(/OPENCLAW_SKILIFY_WORKER_PATH\s*=\s*joinPath\(__openclaw_dirname,\s*"skilify-worker\.js"\)/); - // HIVEMIND_SKILIFY_WORKER=1 recursion guard set on spawn env - expect(src).toMatch(/HIVEMIND_SKILIFY_WORKER:\s*"1"/); + expect(src).toMatch(/function spawnOpenclawSkillifyWorker\b/); + // OPENCLAW_SKILLIFY_WORKER_PATH must be a sibling of import.meta.url + expect(src).toMatch(/OPENCLAW_SKILLIFY_WORKER_PATH\s*=\s*joinPath\(__openclaw_dirname,\s*"skillify-worker\.js"\)/); + // HIVEMIND_SKILLIFY_WORKER=1 recursion guard set on spawn env + expect(src).toMatch(/HIVEMIND_SKILLIFY_WORKER:\s*"1"/); // agent_end hook calls it after the capture loop - expect(src).toMatch(/agent_end[\s\S]{0,3500}Auto-captured[\s\S]{0,500}spawnOpenclawSkilifyWorker/); + expect(src).toMatch(/agent_end[\s\S]{0,3500}Auto-captured[\s\S]{0,500}spawnOpenclawSkillifyWorker/); // install: "global" — no per-project cwd, skills land under ~/.claude/skills/ expect(src).toMatch(/install:\s*"global"/); }); @@ -252,8 +252,8 @@ describe("OpenClaw skilify worker (mining) wiring", () => { // The destructure may also pull other primitives (e.g. execFileSync) for the // gate-agent detection path; allow extra destructured fields. expect(text).toMatch(/var\s*\{\s*spawn:\s*realSpawn[\s\S]{0,200}\}\s*=\s*requireFromOpenclaw/); - // spawnOpenclawSkilifyWorker function present in bundle - expect(text).toMatch(/spawnOpenclawSkilifyWorker/); + // spawnOpenclawSkillifyWorker function present in bundle + expect(text).toMatch(/spawnOpenclawSkillifyWorker/); // realSpawn(process.execPath, [path, configPath], ...) — the actual spawn site expect(text).toMatch(/realSpawn\(process\.execPath/); }); @@ -285,23 +285,23 @@ describe("OpenClaw skilify worker (mining) wiring", () => { }); it("openclaw worker bundle embeds the same shared worker code as other agents", () => { - const text = readFileSync(resolve(BUNDLE_ROOT, "openclaw", "dist", "skilify-worker.js"), "utf-8"); + const text = readFileSync(resolve(BUNDLE_ROOT, "openclaw", "dist", "skillify-worker.js"), "utf-8"); expect(text).toMatch(/process\.argv\[2\]/); expect(text).toMatch(/INSERT INTO/); expect(text).toMatch(/gate-runner|runGate/); - expect(text).toMatch(/skilifyLog/); + expect(text).toMatch(/skillifyLog/); }); }); -describe("hivemind CLI USAGE help advertises skilify", () => { - // Source-of-truth scan: USAGE block in src/cli/index.ts must list skilify. +describe("hivemind CLI USAGE help advertises skillify", () => { + // Source-of-truth scan: USAGE block in src/cli/index.ts must list skillify. // Bundle scan would also work but the source is canonical for help text. - it("`hivemind --help` documents the skilify subcommand family", () => { + it("`hivemind --help` documents the skillify subcommand family", () => { const cli = resolve(BUNDLE_ROOT, "bundle", "cli.js"); const text = readFileSync(cli, "utf-8"); expect(text).toMatch(/Skill management/); - expect(text).toMatch(/hivemind skilify pull/); - expect(text).toMatch(/hivemind skilify scope/); - expect(text).toMatch(/hivemind skilify team/); + expect(text).toMatch(/hivemind skillify pull/); + expect(text).toMatch(/hivemind skillify scope/); + expect(text).toMatch(/hivemind skillify team/); }); }); diff --git a/claude-code/tests/skilify-skill-writer.test.ts b/claude-code/tests/skillify-skill-writer.test.ts similarity index 98% rename from claude-code/tests/skilify-skill-writer.test.ts rename to claude-code/tests/skillify-skill-writer.test.ts index 7d6a5df7..ce5d969a 100644 --- a/claude-code/tests/skilify-skill-writer.test.ts +++ b/claude-code/tests/skillify-skill-writer.test.ts @@ -9,13 +9,13 @@ import { listSkills, resolveSkillsRoot, assertValidSkillName, -} from "../../src/skilify/skill-writer.js"; +} from "../../src/skillify/skill-writer.js"; let projectRoot: string; let skillsRoot: string; beforeEach(() => { - projectRoot = mkdtempSync(join(tmpdir(), "skilify-skill-writer-")); + projectRoot = mkdtempSync(join(tmpdir(), "skillify-skill-writer-")); skillsRoot = join(projectRoot, ".claude", "skills"); }); diff --git a/claude-code/tests/skilify-skills-table.test.ts b/claude-code/tests/skillify-skills-table.test.ts similarity index 99% rename from claude-code/tests/skilify-skills-table.test.ts rename to claude-code/tests/skillify-skills-table.test.ts index 6cde2e05..ffc500f5 100644 --- a/claude-code/tests/skilify-skills-table.test.ts +++ b/claude-code/tests/skillify-skills-table.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "vitest"; -import { insertSkillRow, createSkillsTableSql } from "../../src/skilify/skills-table.js"; +import { insertSkillRow, createSkillsTableSql } from "../../src/skillify/skills-table.js"; type Call = { sql: string }; diff --git a/claude-code/tests/skilify-state.test.ts b/claude-code/tests/skillify-state.test.ts similarity index 95% rename from claude-code/tests/skilify-state.test.ts rename to claude-code/tests/skillify-state.test.ts index 4088b0f2..8190e639 100644 --- a/claude-code/tests/skilify-state.test.ts +++ b/claude-code/tests/skillify-state.test.ts @@ -13,17 +13,17 @@ import { tryAcquireWorkerLock, releaseWorkerLock, TRIGGER_THRESHOLD, -} from "../../src/skilify/state.js"; +} from "../../src/skillify/state.js"; -const STATE_DIR = join(homedir(), ".deeplake", "state", "skilify"); +const STATE_DIR = join(homedir(), ".deeplake", "state", "skillify"); /** * Use a unique cwd per test so the derived project key never collides with * other tests or real user state. The state files end up in the real - * ~/.deeplake/state/skilify dir but with random keys we own and clean up. + * ~/.deeplake/state/skillify dir but with random keys we own and clean up. */ function freshCwd(): string { - return `/tmp/skilify-test-${randomUUID()}`; + return `/tmp/skillify-test-${randomUUID()}`; } let trackedKeys: string[] = []; @@ -81,7 +81,7 @@ describe("bumpStopCounter", () => { expect(s.counter).toBe(3); }); - it("persists state to disk under ~/.deeplake/state/skilify", () => { + it("persists state to disk under ~/.deeplake/state/skillify", () => { const cwd = freshCwd(); const s = bumpStopCounter(cwd); track(s.projectKey); diff --git a/claude-code/tests/skilify-triggers.test.ts b/claude-code/tests/skillify-triggers.test.ts similarity index 89% rename from claude-code/tests/skilify-triggers.test.ts rename to claude-code/tests/skillify-triggers.test.ts index a7fa94f4..0e4e1a92 100644 --- a/claude-code/tests/skilify-triggers.test.ts +++ b/claude-code/tests/skillify-triggers.test.ts @@ -7,17 +7,17 @@ import { randomUUID } from "node:crypto"; // Mock the spawn helper so triggers don't actually fork a worker subprocess. const spawnCalls: any[] = []; let spawnShouldThrow = false; -vi.mock("../../src/skilify/spawn-skilify-worker.js", () => ({ - spawnSkilifyWorker: (opts: any) => { +vi.mock("../../src/skillify/spawn-skillify-worker.js", () => ({ + spawnSkillifyWorker: (opts: any) => { if (spawnShouldThrow) throw new Error("synthetic spawn failure"); spawnCalls.push(opts); }, - skilifyLog: () => { /* no-op for tests */ }, + skillifyLog: () => { /* no-op for tests */ }, bundleDirFromImportMeta: (url: string) => url, })); // Import AFTER vi.mock so the mock is in place. -import { tryStopCounterTrigger, forceSessionEndTrigger } from "../../src/skilify/triggers.js"; +import { tryStopCounterTrigger, forceSessionEndTrigger } from "../../src/skillify/triggers.js"; import { deriveProjectKey, bumpStopCounter, @@ -25,9 +25,9 @@ import { releaseWorkerLock, readState, TRIGGER_THRESHOLD, -} from "../../src/skilify/state.js"; +} from "../../src/skillify/state.js"; -const STATE_DIR = join(homedir(), ".deeplake", "state", "skilify"); +const STATE_DIR = join(homedir(), ".deeplake", "state", "skillify"); let tracked: string[] = []; beforeEach(() => { @@ -42,10 +42,10 @@ afterEach(() => { try { rmSync(join(STATE_DIR, `${key}${ext}`)); } catch { /* nothing */ } } } - delete process.env.HIVEMIND_SKILIFY_WORKER; + delete process.env.HIVEMIND_SKILLIFY_WORKER; }); -function freshCwd(): string { return `/tmp/skilify-trig-${randomUUID()}`; } +function freshCwd(): string { return `/tmp/skillify-trig-${randomUUID()}`; } function track(key: string): string { tracked.push(key); return key; } const fakeOpts = (cwd: string, agent = "claude_code") => ({ @@ -59,8 +59,8 @@ const fakeOpts = (cwd: string, agent = "claude_code") => ({ // ── tryStopCounterTrigger ────────────────────────────────────────────────── describe("tryStopCounterTrigger", () => { - it("returns immediately when HIVEMIND_SKILIFY_WORKER=1 (recursion guard)", () => { - process.env.HIVEMIND_SKILIFY_WORKER = "1"; + it("returns immediately when HIVEMIND_SKILLIFY_WORKER=1 (recursion guard)", () => { + process.env.HIVEMIND_SKILLIFY_WORKER = "1"; tryStopCounterTrigger(fakeOpts(freshCwd())); expect(spawnCalls).toHaveLength(0); }); @@ -121,8 +121,8 @@ describe("tryStopCounterTrigger", () => { // ── forceSessionEndTrigger ───────────────────────────────────────────────── describe("forceSessionEndTrigger", () => { - it("returns immediately when HIVEMIND_SKILIFY_WORKER=1", () => { - process.env.HIVEMIND_SKILIFY_WORKER = "1"; + it("returns immediately when HIVEMIND_SKILLIFY_WORKER=1", () => { + process.env.HIVEMIND_SKILLIFY_WORKER = "1"; forceSessionEndTrigger(fakeOpts(freshCwd())); expect(spawnCalls).toHaveLength(0); }); diff --git a/claude-code/tests/skilify-unpull.test.ts b/claude-code/tests/skillify-unpull.test.ts similarity index 97% rename from claude-code/tests/skilify-unpull.test.ts rename to claude-code/tests/skillify-unpull.test.ts index 2ecc587c..717353b8 100644 --- a/claude-code/tests/skilify-unpull.test.ts +++ b/claude-code/tests/skillify-unpull.test.ts @@ -5,8 +5,8 @@ import { } from "node:fs"; import { tmpdir } from "node:os"; import { join } from "node:path"; -import { recordPull, loadManifest } from "../../src/skilify/manifest.js"; -import { runUnpull } from "../../src/skilify/unpull.js"; +import { recordPull, loadManifest } from "../../src/skillify/manifest.js"; +import { runUnpull } from "../../src/skillify/unpull.js"; let projectRoot: string; let projectSkillsRoot: string; @@ -14,10 +14,10 @@ let fakeHome: string; let originalHome: string | undefined; beforeEach(() => { - projectRoot = mkdtempSync(join(tmpdir(), "skilify-unpull-proj-")); + projectRoot = mkdtempSync(join(tmpdir(), "skillify-unpull-proj-")); projectSkillsRoot = join(projectRoot, ".claude", "skills"); mkdirSync(projectSkillsRoot, { recursive: true }); - fakeHome = mkdtempSync(join(tmpdir(), "skilify-unpull-home-")); + fakeHome = mkdtempSync(join(tmpdir(), "skillify-unpull-home-")); originalHome = process.env.HOME; process.env.HOME = fakeHome; }); @@ -162,7 +162,7 @@ describe("runUnpull --all and --legacy-cleanup", () => { expect(r.removed).toBe(1); }); - it("--legacy-cleanup removes 16-hex-char project_key dirs from older skilify versions", () => { + it("--legacy-cleanup removes 16-hex-char project_key dirs from older skillify versions", () => { const legacy = join(projectSkillsRoot, "abcd1234abcd1234"); mkdirSync(join(legacy, "old-skill"), { recursive: true }); writeFileSync(join(legacy, "old-skill", "SKILL.md"), "---\nname: old-skill\n---"); diff --git a/codex/bundle/session-start.js b/codex/bundle/session-start.js index 4717882c..dc26d414 100755 --- a/codex/bundle/session-start.js +++ b/codex/bundle/session-start.js @@ -55,7 +55,7 @@ var init_index_marker_store = __esm({ // dist/src/hooks/codex/session-start.js import { spawn } from "node:child_process"; import { fileURLToPath } from "node:url"; -import { dirname as dirname4, join as join10 } from "node:path"; +import { dirname as dirname4, join as join11 } from "node:path"; // dist/src/commands/auth.js import { execSync } from "node:child_process"; @@ -619,12 +619,12 @@ var DeeplakeApi = class { } }; -// dist/src/skilify/pull.js -import { existsSync as existsSync6, readFileSync as readFileSync7, writeFileSync as writeFileSync5, mkdirSync as mkdirSync5, renameSync as renameSync2, lstatSync as lstatSync2, readlinkSync, symlinkSync, unlinkSync as unlinkSync3 } from "node:fs"; -import { homedir as homedir7 } from "node:os"; -import { dirname as dirname3, join as join9 } from "node:path"; +// dist/src/skillify/pull.js +import { existsSync as existsSync7, readFileSync as readFileSync7, writeFileSync as writeFileSync5, mkdirSync as mkdirSync5, renameSync as renameSync3, lstatSync as lstatSync2, readlinkSync, symlinkSync, unlinkSync as unlinkSync3 } from "node:fs"; +import { homedir as homedir8 } from "node:os"; +import { dirname as dirname3, join as join10 } from "node:path"; -// dist/src/skilify/skill-writer.js +// dist/src/skillify/skill-writer.js import { existsSync as existsSync3, mkdirSync as mkdirSync3, readFileSync as readFileSync5, readdirSync, statSync, writeFileSync as writeFileSync3 } from "node:fs"; import { homedir as homedir4 } from "node:os"; import { join as join6 } from "node:path"; @@ -685,18 +685,51 @@ function parseFrontmatter(text) { return { fm, body }; } -// dist/src/skilify/manifest.js -import { existsSync as existsSync4, lstatSync, mkdirSync as mkdirSync4, readFileSync as readFileSync6, renameSync, unlinkSync as unlinkSync2, writeFileSync as writeFileSync4 } from "node:fs"; +// dist/src/skillify/manifest.js +import { existsSync as existsSync5, lstatSync, mkdirSync as mkdirSync4, readFileSync as readFileSync6, renameSync as renameSync2, unlinkSync as unlinkSync2, writeFileSync as writeFileSync4 } from "node:fs"; +import { homedir as homedir6 } from "node:os"; +import { dirname as dirname2, join as join8 } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync4, renameSync } from "node:fs"; import { homedir as homedir5 } from "node:os"; -import { dirname as dirname2, join as join7 } from "node:path"; +import { join as join7 } from "node:path"; +var dlog = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join7(homedir5(), ".deeplake", "state"); + const legacy = join7(root, "skilify"); + const current = join7(root, "skillify"); + if (!existsSync4(legacy)) + return; + if (existsSync4(current)) + return; + try { + renameSync(legacy, current); + dlog(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/manifest.js function emptyManifest() { return { version: 1, entries: [] }; } function manifestPath() { - return join7(homedir5(), ".deeplake", "state", "skilify", "pulled.json"); + return join8(homedir6(), ".deeplake", "state", "skillify", "pulled.json"); } function loadManifest(path = manifestPath()) { - if (!existsSync4(path)) + migrateLegacyStateDir(); + if (!existsSync5(path)) return emptyManifest(); let raw; try { @@ -746,10 +779,11 @@ function loadManifest(path = manifestPath()) { } } function saveManifest(m, path = manifestPath()) { + migrateLegacyStateDir(); mkdirSync4(dirname2(path), { recursive: true }); const tmp = `${path}.tmp`; writeFileSync4(tmp, JSON.stringify(m, null, 2) + "\n", { mode: 384 }); - renameSync(tmp, path); + renameSync2(tmp, path); } function recordPull(entry, path = manifestPath()) { const m = loadManifest(path); @@ -784,7 +818,7 @@ function pruneOrphanedEntries(path = manifestPath()) { const live = []; let pruned = 0; for (const e of m.entries) { - if (existsSync4(join7(e.installRoot, e.dirName))) { + if (existsSync5(join8(e.installRoot, e.dirName))) { live.push(e); continue; } @@ -796,31 +830,31 @@ function pruneOrphanedEntries(path = manifestPath()) { return pruned; } -// dist/src/skilify/agent-roots.js -import { existsSync as existsSync5 } from "node:fs"; -import { homedir as homedir6 } from "node:os"; -import { join as join8 } from "node:path"; +// dist/src/skillify/agent-roots.js +import { existsSync as existsSync6 } from "node:fs"; +import { homedir as homedir7 } from "node:os"; +import { join as join9 } from "node:path"; function resolveDetected(home) { const out = []; - const codexInstalled = existsSync5(join8(home, ".codex")); - const piInstalled = existsSync5(join8(home, ".pi", "agent")); - const hermesInstalled = existsSync5(join8(home, ".hermes")); + const codexInstalled = existsSync6(join9(home, ".codex")); + const piInstalled = existsSync6(join9(home, ".pi", "agent")); + const hermesInstalled = existsSync6(join9(home, ".hermes")); if (codexInstalled || piInstalled) { - out.push(join8(home, ".agents", "skills")); + out.push(join9(home, ".agents", "skills")); } if (hermesInstalled) { - out.push(join8(home, ".hermes", "skills")); + out.push(join9(home, ".hermes", "skills")); } if (piInstalled) { - out.push(join8(home, ".pi", "agent", "skills")); + out.push(join9(home, ".pi", "agent", "skills")); } return out; } -function detectAgentSkillsRoots(canonicalRoot, home = homedir6()) { +function detectAgentSkillsRoots(canonicalRoot, home = homedir7()) { return resolveDetected(home).filter((p) => p !== canonicalRoot); } -// dist/src/skilify/pull.js +// dist/src/skillify/pull.js function assertValidAuthor(author) { if (!author) throw new Error("author is empty"); @@ -852,15 +886,15 @@ function isMissingTableError(message) { } function resolvePullDestination(install, cwd) { if (install === "global") - return join9(homedir7(), ".claude", "skills"); + return join10(homedir8(), ".claude", "skills"); if (!cwd) throw new Error("install=project requires a cwd"); - return join9(cwd, ".claude", "skills"); + return join10(cwd, ".claude", "skills"); } function fanOutSymlinks(canonicalDir, dirName, agentRoots) { const out = []; for (const root of agentRoots) { - const link = join9(root, dirName); + const link = join10(root, dirName); let existing; try { existing = lstatSync2(link); @@ -903,8 +937,8 @@ function backfillSymlinks(installRoot) { return; const detected = detectAgentSkillsRoots(installRoot); for (const entry of entries) { - const canonical = join9(entry.installRoot, entry.dirName); - if (!existsSync6(canonical)) + const canonical = join10(entry.installRoot, entry.dirName); + if (!existsSync7(canonical)) continue; const fresh = fanOutSymlinks(canonical, entry.dirName, detected); if (sameSorted(fresh, entry.symlinks)) @@ -989,7 +1023,7 @@ function renderFrontmatter(fm) { return lines.join("\n"); } function readLocalVersion(path) { - if (!existsSync6(path)) + if (!existsSync7(path)) return null; try { const text = readFileSync7(path, "utf-8"); @@ -1078,8 +1112,8 @@ async function runPull(opts) { summary.skipped++; continue; } - const skillDir = join9(root, dirName); - const skillFile = join9(skillDir, "SKILL.md"); + const skillDir = join10(root, dirName); + const skillFile = join10(skillDir, "SKILL.md"); const remoteVersion = Number(row.version ?? 1); const localVersion = readLocalVersion(skillFile); const action = decideAction({ @@ -1091,9 +1125,9 @@ async function runPull(opts) { let manifestError; if (action === "wrote") { mkdirSync5(skillDir, { recursive: true }); - if (existsSync6(skillFile)) { + if (existsSync7(skillFile)) { try { - renameSync2(skillFile, `${skillFile}.bak`); + renameSync3(skillFile, `${skillFile}.bak`); } catch { } } @@ -1138,8 +1172,8 @@ async function runPull(opts) { return summary; } -// dist/src/skilify/auto-pull.js -var log3 = (msg) => log("skilify-autopull", msg); +// dist/src/skillify/auto-pull.js +var log3 = (msg) => log("skillify-autopull", msg); var DEFAULT_TIMEOUT_MS = 5e3; function withTimeout(p, ms) { let timer = null; @@ -1223,23 +1257,23 @@ Organization management \u2014 each argument is SEPARATE (do NOT quote subcomman - hivemind members \u2014 list members - hivemind remove \u2014 remove member -SKILLS (skilify) \u2014 mine + share reusable skills across the org: -- hivemind skilify \u2014 show scope/team/install + per-project state -- hivemind skilify pull \u2014 sync project skills from the org table -- hivemind skilify pull --user \u2014 only that author's skills -- hivemind skilify pull --users a,b,c \u2014 multiple authors (CSV) -- hivemind skilify pull --all-users \u2014 explicit "no author filter" -- hivemind skilify pull --to project|global \u2014 install location -- hivemind skilify pull --dry-run \u2014 preview only -- hivemind skilify pull --force \u2014 overwrite local (creates .bak) -- hivemind skilify pull \u2014 pull only that skill (combines with --user) -- hivemind skilify unpull \u2014 remove every skill previously installed by pull -- hivemind skilify unpull --user \u2014 remove only that author's pulls -- hivemind skilify unpull --not-mine \u2014 remove all pulls except your own -- hivemind skilify unpull --dry-run \u2014 preview without touching disk -- hivemind skilify scope \u2014 sharing scope for new skills -- hivemind skilify install \u2014 default install location -- hivemind skilify team add|remove|list \u2014 manage team list`; +SKILLS (skillify) \u2014 mine + share reusable skills across the org: +- hivemind skillify \u2014 show scope/team/install + per-project state +- hivemind skillify pull \u2014 sync project skills from the org table +- hivemind skillify pull --user \u2014 only that author's skills +- hivemind skillify pull --users a,b,c \u2014 multiple authors (CSV) +- hivemind skillify pull --all-users \u2014 explicit "no author filter" +- hivemind skillify pull --to project|global \u2014 install location +- hivemind skillify pull --dry-run \u2014 preview only +- hivemind skillify pull --force \u2014 overwrite local (creates .bak) +- hivemind skillify pull \u2014 pull only that skill (combines with --user) +- hivemind skillify unpull \u2014 remove every skill previously installed by pull +- hivemind skillify unpull --user \u2014 remove only that author's pulls +- hivemind skillify unpull --not-mine \u2014 remove all pulls except your own +- hivemind skillify unpull --dry-run \u2014 preview without touching disk +- hivemind skillify scope \u2014 sharing scope for new skills +- hivemind skillify install \u2014 default install location +- hivemind skillify team add|remove|list \u2014 manage team list`; async function main() { if (process.env.HIVEMIND_WIKI_WORKER === "1") return; @@ -1251,7 +1285,7 @@ async function main() { log4(`credentials loaded: org=${creds.orgName ?? creds.orgId}`); } if (creds?.token) { - const setupScript = join10(__bundleDir, "session-start-setup.js"); + const setupScript = join11(__bundleDir, "session-start-setup.js"); const child = spawn("node", [setupScript], { detached: true, stdio: ["pipe", "ignore", "ignore"], diff --git a/hermes/bundle/skilify-worker.js b/codex/bundle/skillify-worker.js similarity index 92% rename from hermes/bundle/skilify-worker.js rename to codex/bundle/skillify-worker.js index 8eb534fd..987a16ba 100755 --- a/hermes/bundle/skilify-worker.js +++ b/codex/bundle/skillify-worker.js @@ -1,8 +1,8 @@ #!/usr/bin/env node -// dist/src/skilify/skilify-worker.js -import { readFileSync as readFileSync3, writeFileSync as writeFileSync3, existsSync as existsSync4, appendFileSync as appendFileSync2, rmSync } from "node:fs"; -import { join as join5 } from "node:path"; +// dist/src/skillify/skillify-worker.js +import { readFileSync as readFileSync3, writeFileSync as writeFileSync3, existsSync as existsSync5, appendFileSync as appendFileSync2, rmSync } from "node:fs"; +import { join as join6 } from "node:path"; // dist/src/utils/debug.js import { appendFileSync } from "node:fs"; @@ -29,7 +29,7 @@ function deeplakeClientHeader() { return { [DEEPLAKE_CLIENT_HEADER]: deeplakeClientValue() }; } -// dist/src/skilify/extractors/index.js +// dist/src/skillify/extractors/index.js function extractPairs(rows) { const pairs = []; let pendingPrompt = null; @@ -60,7 +60,7 @@ function extractPairs(rows) { return pairs; } -// dist/src/skilify/skill-writer.js +// dist/src/skillify/skill-writer.js import { existsSync, mkdirSync, readFileSync, readdirSync, statSync, writeFileSync } from "node:fs"; import { homedir as homedir2 } from "node:os"; import { join as join2 } from "node:path"; @@ -216,7 +216,7 @@ function resolveSkillsRoot(install, cwd) { return join2(cwd, ".claude", "skills"); } -// dist/src/skilify/skills-table.js +// dist/src/skillify/skills-table.js import { randomUUID } from "node:crypto"; // dist/src/utils/sql.js @@ -227,7 +227,7 @@ function sqlIdent(name) { return name; } -// dist/src/skilify/skills-table.js +// dist/src/skillify/skills-table.js function createSkillsTableSql(tableName) { const safe = sqlIdent(tableName); return `CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`; @@ -256,7 +256,7 @@ async function insertSkillRow(args) { } } -// dist/src/skilify/gate-parser.js +// dist/src/skillify/gate-parser.js function extractJsonBlock(s) { const trimmed = s.trim(); if (!trimmed) @@ -294,7 +294,7 @@ function parseVerdict(raw) { } } -// dist/src/skilify/gate-runner.js +// dist/src/skillify/gate-runner.js import { execFileSync } from "node:child_process"; import { existsSync as existsSync2 } from "node:fs"; import { homedir as homedir3 } from "node:os"; @@ -403,28 +403,61 @@ function runGate(opts) { } } -// dist/src/skilify/state.js -import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync2, renameSync, existsSync as existsSync3, unlinkSync, openSync, closeSync } from "node:fs"; +// dist/src/skillify/state.js +import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync2, renameSync as renameSync2, existsSync as existsSync4, unlinkSync, openSync, closeSync } from "node:fs"; import { execSync } from "node:child_process"; -import { homedir as homedir4 } from "node:os"; +import { homedir as homedir5 } from "node:os"; import { createHash } from "node:crypto"; -import { join as join4, basename } from "node:path"; -var dlog = (msg) => log("skilify-state", msg); -var STATE_DIR = join4(homedir4(), ".deeplake", "state", "skilify"); +import { join as join5, basename } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync3, renameSync } from "node:fs"; +import { homedir as homedir4 } from "node:os"; +import { join as join4 } from "node:path"; +var dlog = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join4(homedir4(), ".deeplake", "state"); + const legacy = join4(root, "skilify"); + const current = join4(root, "skillify"); + if (!existsSync3(legacy)) + return; + if (existsSync3(current)) + return; + try { + renameSync(legacy, current); + dlog(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/state.js +var dlog2 = (msg) => log("skillify-state", msg); +var STATE_DIR = join5(homedir5(), ".deeplake", "state", "skillify"); var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4)); var TRIGGER_THRESHOLD = (() => { - const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? ""); + const n = Number(process.env.HIVEMIND_SKILLIFY_EVERY_N_TURNS ?? ""); return Number.isInteger(n) && n > 0 ? n : 20; })(); function statePath(projectKey) { - return join4(STATE_DIR, `${projectKey}.json`); + return join5(STATE_DIR, `${projectKey}.json`); } function lockPath(projectKey) { - return join4(STATE_DIR, `${projectKey}.lock`); + return join5(STATE_DIR, `${projectKey}.lock`); } function readState(projectKey) { + migrateLegacyStateDir(); const p = statePath(projectKey); - if (!existsSync3(p)) + if (!existsSync4(p)) return null; try { return JSON.parse(readFileSync2(p, "utf-8")); @@ -433,13 +466,15 @@ function readState(projectKey) { } } function writeState(projectKey, state) { + migrateLegacyStateDir(); mkdirSync2(STATE_DIR, { recursive: true }); const p = statePath(projectKey); const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; writeFileSync2(tmp, JSON.stringify(state, null, 2)); - renameSync(tmp, p); + renameSync2(tmp, p); } function withRmwLock(projectKey, fn) { + migrateLegacyStateDir(); mkdirSync2(STATE_DIR, { recursive: true }); const rmw = lockPath(projectKey) + ".rmw"; const deadline = Date.now() + 2e3; @@ -451,11 +486,11 @@ function withRmwLock(projectKey, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { - dlog(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); + dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); try { unlinkSync(rmw); } catch (unlinkErr) { - dlog(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); + dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); } continue; } @@ -469,7 +504,7 @@ function withRmwLock(projectKey, fn) { try { unlinkSync(rmw); } catch (unlinkErr) { - dlog(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); + dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); } } } @@ -509,18 +544,18 @@ function releaseWorkerLock(projectKey) { } } -// dist/src/skilify/skilify-worker.js +// dist/src/skillify/skillify-worker.js var cfg = JSON.parse(readFileSync3(process.argv[2], "utf-8")); var tmpDir = cfg.tmpDir; -var verdictPath = join5(tmpDir, "verdict.json"); -var promptPath = join5(tmpDir, "prompt.txt"); +var verdictPath = join6(tmpDir, "verdict.json"); +var promptPath = join6(tmpDir, "prompt.txt"); var SESSIONS_TO_MINE = 10; var PAIR_CHAR_CAP = 2e3; var TOTAL_PAIRS_CHAR_CAP = 4e4; var EXISTING_SKILLS_CHAR_CAP = 3e4; function wlog(msg) { try { - appendFileSync2(cfg.skilifyLog, `[${utcTimestamp()}] skilify-worker(${cfg.projectKey}): ${msg} + appendFileSync2(cfg.skillifyLog, `[${utcTimestamp()}] skillify-worker(${cfg.projectKey}): ${msg} `); } catch { } @@ -715,7 +750,7 @@ function buildPrompt(pairs) { ].join("\n"); } function readVerdict(stdout) { - if (existsSync4(verdictPath)) { + if (existsSync5(verdictPath)) { try { const text = readFileSync3(verdictPath, "utf-8"); const v2 = parseVerdict(text); @@ -784,9 +819,9 @@ async function main() { timeoutMs: 12e4 }); try { - writeFileSync3(join5(tmpDir, "gate-stdout.txt"), gate.stdout); + writeFileSync3(join6(tmpDir, "gate-stdout.txt"), gate.stdout); if (gate.stderr) - writeFileSync3(join5(tmpDir, "gate-stderr.txt"), gate.stderr); + writeFileSync3(join6(tmpDir, "gate-stderr.txt"), gate.stderr); } catch { } if (gate.errored) { diff --git a/codex/bundle/stop.js b/codex/bundle/stop.js index 4323ea55..e9492308 100755 --- a/codex/bundle/stop.js +++ b/codex/bundle/stop.js @@ -53,9 +53,9 @@ var init_index_marker_store = __esm({ }); // dist/src/hooks/codex/stop.js -import { readFileSync as readFileSync7, existsSync as existsSync8 } from "node:fs"; +import { readFileSync as readFileSync7, existsSync as existsSync9 } from "node:fs"; import { fileURLToPath as fileURLToPath3 } from "node:url"; -import { dirname as dirname3, join as join13 } from "node:path"; +import { dirname as dirname3, join as join14 } from "node:path"; // dist/src/utils/stdin.js function readStdin() { @@ -679,14 +679,14 @@ function bundleDirFromImportMeta(importMetaUrl) { return dirname(fileURLToPath(importMetaUrl)); } -// dist/src/skilify/spawn-skilify-worker.js +// dist/src/skillify/spawn-skillify-worker.js import { spawn as spawn2 } from "node:child_process"; import { fileURLToPath as fileURLToPath2 } from "node:url"; import { dirname as dirname2, join as join7 } from "node:path"; import { writeFileSync as writeFileSync3, mkdirSync as mkdirSync4, appendFileSync as appendFileSync3, chmodSync } from "node:fs"; import { homedir as homedir5, tmpdir as tmpdir3 } from "node:os"; -// dist/src/skilify/gate-runner.js +// dist/src/skillify/gate-runner.js import { execFileSync } from "node:child_process"; import { existsSync as existsSync3 } from "node:fs"; import { homedir as homedir4 } from "node:os"; @@ -717,20 +717,20 @@ function findAgentBin(agent) { } } -// dist/src/skilify/spawn-skilify-worker.js +// dist/src/skillify/spawn-skillify-worker.js var HOME2 = homedir5(); -var SKILIFY_LOG = join7(HOME2, ".claude", "hooks", "skilify.log"); -function skilifyLog(msg) { +var SKILLIFY_LOG = join7(HOME2, ".claude", "hooks", "skillify.log"); +function skillifyLog(msg) { try { - mkdirSync4(dirname2(SKILIFY_LOG), { recursive: true }); - appendFileSync3(SKILIFY_LOG, `[${utcTimestamp()}] ${msg} + mkdirSync4(dirname2(SKILLIFY_LOG), { recursive: true }); + appendFileSync3(SKILLIFY_LOG, `[${utcTimestamp()}] ${msg} `); } catch { } } -function spawnSkilifyWorker(opts) { +function spawnSkillifyWorker(opts) { const { config, cwd, projectKey, project, bundleDir, agent, scopeConfig, currentSessionId, reason } = opts; - const tmpDir = join7(tmpdir3(), `deeplake-skilify-${projectKey}-${Date.now()}`); + const tmpDir = join7(tmpdir3(), `deeplake-skillify-${projectKey}-${Date.now()}`); mkdirSync4(tmpDir, { recursive: true, mode: 448 }); const gateBin = findAgentBin(agent); const configFile = join7(tmpDir, "config.json"); @@ -756,40 +756,72 @@ function spawnSkilifyWorker(opts) { hermesModel: process.env.HIVEMIND_HERMES_MODEL, piProvider: process.env.HIVEMIND_PI_PROVIDER, piModel: process.env.HIVEMIND_PI_MODEL, - skilifyLog: SKILIFY_LOG, + skillifyLog: SKILLIFY_LOG, currentSessionId }), { mode: 384 }); try { chmodSync(configFile, 384); } catch { } - skilifyLog(`${reason}: spawning skilify worker for project=${project} key=${projectKey}`); - const workerPath = join7(bundleDir, "skilify-worker.js"); + skillifyLog(`${reason}: spawning skillify worker for project=${project} key=${projectKey}`); + const workerPath = join7(bundleDir, "skillify-worker.js"); spawn2("nohup", ["node", workerPath, configFile], { detached: true, stdio: ["ignore", "ignore", "ignore"] }).unref(); - skilifyLog(`${reason}: spawned skilify worker for ${projectKey}`); + skillifyLog(`${reason}: spawned skillify worker for ${projectKey}`); } -// dist/src/skilify/state.js -import { readFileSync as readFileSync3, writeFileSync as writeFileSync4, writeSync, mkdirSync as mkdirSync5, renameSync, existsSync as existsSync4, unlinkSync, openSync, closeSync } from "node:fs"; +// dist/src/skillify/state.js +import { readFileSync as readFileSync3, writeFileSync as writeFileSync4, writeSync, mkdirSync as mkdirSync5, renameSync as renameSync2, existsSync as existsSync5, unlinkSync, openSync, closeSync } from "node:fs"; import { execSync as execSync2 } from "node:child_process"; -import { homedir as homedir6 } from "node:os"; +import { homedir as homedir7 } from "node:os"; import { createHash } from "node:crypto"; -import { join as join8, basename } from "node:path"; -var dlog = (msg) => log("skilify-state", msg); -var STATE_DIR = join8(homedir6(), ".deeplake", "state", "skilify"); +import { join as join9, basename } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync4, renameSync } from "node:fs"; +import { homedir as homedir6 } from "node:os"; +import { join as join8 } from "node:path"; +var dlog = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join8(homedir6(), ".deeplake", "state"); + const legacy = join8(root, "skilify"); + const current = join8(root, "skillify"); + if (!existsSync4(legacy)) + return; + if (existsSync4(current)) + return; + try { + renameSync(legacy, current); + dlog(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/state.js +var dlog2 = (msg) => log("skillify-state", msg); +var STATE_DIR = join9(homedir7(), ".deeplake", "state", "skillify"); var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4)); var TRIGGER_THRESHOLD = (() => { - const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? ""); + const n = Number(process.env.HIVEMIND_SKILLIFY_EVERY_N_TURNS ?? ""); return Number.isInteger(n) && n > 0 ? n : 20; })(); function statePath(projectKey) { - return join8(STATE_DIR, `${projectKey}.json`); + return join9(STATE_DIR, `${projectKey}.json`); } function lockPath(projectKey) { - return join8(STATE_DIR, `${projectKey}.lock`); + return join9(STATE_DIR, `${projectKey}.lock`); } function deriveProjectKey(cwd) { const project = basename(cwd) || "unknown"; @@ -807,8 +839,9 @@ function deriveProjectKey(cwd) { return { key, project }; } function readState(projectKey) { + migrateLegacyStateDir(); const p = statePath(projectKey); - if (!existsSync4(p)) + if (!existsSync5(p)) return null; try { return JSON.parse(readFileSync3(p, "utf-8")); @@ -817,13 +850,15 @@ function readState(projectKey) { } } function writeState(projectKey, state) { + migrateLegacyStateDir(); mkdirSync5(STATE_DIR, { recursive: true }); const p = statePath(projectKey); const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; writeFileSync4(tmp, JSON.stringify(state, null, 2)); - renameSync(tmp, p); + renameSync2(tmp, p); } function withRmwLock(projectKey, fn) { + migrateLegacyStateDir(); mkdirSync5(STATE_DIR, { recursive: true }); const rmw = lockPath(projectKey) + ".rmw"; const deadline = Date.now() + 2e3; @@ -835,11 +870,11 @@ function withRmwLock(projectKey, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { - dlog(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); + dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); try { unlinkSync(rmw); } catch (unlinkErr) { - dlog(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); + dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); } continue; } @@ -853,7 +888,7 @@ function withRmwLock(projectKey, fn) { try { unlinkSync(rmw); } catch (unlinkErr) { - dlog(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); + dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); } } } @@ -866,20 +901,21 @@ function resetCounter(projectKey) { }); } function tryAcquireWorkerLock(projectKey, maxAgeMs = 10 * 60 * 1e3) { + migrateLegacyStateDir(); mkdirSync5(STATE_DIR, { recursive: true }); const p = lockPath(projectKey); - if (existsSync4(p)) { + if (existsSync5(p)) { try { const ageMs = Date.now() - parseInt(readFileSync3(p, "utf-8"), 10); if (Number.isFinite(ageMs) && ageMs < maxAgeMs) return false; } catch (readErr) { - dlog(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`); + dlog2(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`); } try { unlinkSync(p); } catch (unlinkErr) { - dlog(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`); + dlog2(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`); return false; } } @@ -903,15 +939,16 @@ function releaseWorkerLock(projectKey) { } } -// dist/src/skilify/scope-config.js -import { existsSync as existsSync5, mkdirSync as mkdirSync6, readFileSync as readFileSync4, writeFileSync as writeFileSync5 } from "node:fs"; -import { homedir as homedir7 } from "node:os"; -import { join as join9 } from "node:path"; -var STATE_DIR2 = join9(homedir7(), ".deeplake", "state", "skilify"); -var CONFIG_PATH = join9(STATE_DIR2, "config.json"); +// dist/src/skillify/scope-config.js +import { existsSync as existsSync6, mkdirSync as mkdirSync6, readFileSync as readFileSync4, writeFileSync as writeFileSync5 } from "node:fs"; +import { homedir as homedir8 } from "node:os"; +import { join as join10 } from "node:path"; +var STATE_DIR2 = join10(homedir8(), ".deeplake", "state", "skillify"); +var CONFIG_PATH = join10(STATE_DIR2, "config.json"); var DEFAULT = { scope: "me", team: [], install: "project" }; function loadScopeConfig() { - if (!existsSync5(CONFIG_PATH)) + migrateLegacyStateDir(); + if (!existsSync6(CONFIG_PATH)) return DEFAULT; try { const raw = JSON.parse(readFileSync4(CONFIG_PATH, "utf-8")); @@ -924,24 +961,24 @@ function loadScopeConfig() { } } -// dist/src/skilify/triggers.js +// dist/src/skillify/triggers.js function forceSessionEndTrigger(opts) { - if (process.env.HIVEMIND_SKILIFY_WORKER === "1") + if (process.env.HIVEMIND_SKILLIFY_WORKER === "1") return; if (!opts.cwd) return; try { const { key: projectKey, project } = deriveProjectKey(opts.cwd); if (!tryAcquireWorkerLock(projectKey)) { - skilifyLog(`SessionEnd: skilify worker already running for ${projectKey}, skipping`); + skillifyLog(`SessionEnd: skillify worker already running for ${projectKey}, skipping`); return; } if (readState(projectKey)) { resetCounter(projectKey); } - skilifyLog(`SessionEnd: spawning skilify worker for project=${project} agent=${opts.agent}`); + skillifyLog(`SessionEnd: spawning skillify worker for project=${project} agent=${opts.agent}`); try { - spawnSkilifyWorker({ + spawnSkillifyWorker({ config: opts.config, cwd: opts.cwd, projectKey, @@ -953,42 +990,42 @@ function forceSessionEndTrigger(opts) { reason: "SessionEnd" }); } catch (e) { - skilifyLog(`SessionEnd spawn failed: ${e?.message ?? e}`); + skillifyLog(`SessionEnd spawn failed: ${e?.message ?? e}`); try { releaseWorkerLock(projectKey); } catch { } } } catch (e) { - skilifyLog(`SessionEnd trigger error: ${e?.message ?? e}`); + skillifyLog(`SessionEnd trigger error: ${e?.message ?? e}`); } } // dist/src/hooks/summary-state.js -import { readFileSync as readFileSync5, writeFileSync as writeFileSync6, writeSync as writeSync2, mkdirSync as mkdirSync7, renameSync as renameSync2, existsSync as existsSync6, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs"; -import { homedir as homedir8 } from "node:os"; -import { join as join10 } from "node:path"; -var dlog2 = (msg) => log("summary-state", msg); -var STATE_DIR3 = join10(homedir8(), ".claude", "hooks", "summary-state"); +import { readFileSync as readFileSync5, writeFileSync as writeFileSync6, writeSync as writeSync2, mkdirSync as mkdirSync7, renameSync as renameSync3, existsSync as existsSync7, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs"; +import { homedir as homedir9 } from "node:os"; +import { join as join11 } from "node:path"; +var dlog3 = (msg) => log("summary-state", msg); +var STATE_DIR3 = join11(homedir9(), ".claude", "hooks", "summary-state"); var YIELD_BUF2 = new Int32Array(new SharedArrayBuffer(4)); function lockPath2(sessionId) { - return join10(STATE_DIR3, `${sessionId}.lock`); + return join11(STATE_DIR3, `${sessionId}.lock`); } function tryAcquireLock(sessionId, maxAgeMs = 10 * 60 * 1e3) { mkdirSync7(STATE_DIR3, { recursive: true }); const p = lockPath2(sessionId); - if (existsSync6(p)) { + if (existsSync7(p)) { try { const ageMs = Date.now() - parseInt(readFileSync5(p, "utf-8"), 10); if (Number.isFinite(ageMs) && ageMs < maxAgeMs) return false; } catch (readErr) { - dlog2(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`); + dlog3(`lock file unreadable for ${sessionId}, treating as stale: ${readErr.message}`); } try { unlinkSync2(p); } catch (unlinkErr) { - dlog2(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`); + dlog3(`could not unlink stale lock for ${sessionId}: ${unlinkErr.message}`); return false; } } @@ -1011,7 +1048,7 @@ function releaseLock(sessionId) { unlinkSync2(lockPath2(sessionId)); } catch (e) { if (e?.code !== "ENOENT") { - dlog2(`releaseLock unlink failed for ${sessionId}: ${e.message}`); + dlog3(`releaseLock unlink failed for ${sessionId}: ${e.message}`); } } } @@ -1025,9 +1062,9 @@ function buildSessionPath(config, sessionId) { // dist/src/embeddings/client.js import { connect } from "node:net"; import { spawn as spawn3 } from "node:child_process"; -import { openSync as openSync3, closeSync as closeSync3, writeSync as writeSync3, unlinkSync as unlinkSync3, existsSync as existsSync7, readFileSync as readFileSync6 } from "node:fs"; -import { homedir as homedir9 } from "node:os"; -import { join as join11 } from "node:path"; +import { openSync as openSync3, closeSync as closeSync3, writeSync as writeSync3, unlinkSync as unlinkSync3, existsSync as existsSync8, readFileSync as readFileSync6 } from "node:fs"; +import { homedir as homedir10 } from "node:os"; +import { join as join12 } from "node:path"; // dist/src/embeddings/protocol.js var DEFAULT_SOCKET_DIR = "/tmp"; @@ -1041,7 +1078,7 @@ function pidPathFor(uid, dir = DEFAULT_SOCKET_DIR) { } // dist/src/embeddings/client.js -var SHARED_DAEMON_PATH = join11(homedir9(), ".hivemind", "embed-deps", "embed-daemon.js"); +var SHARED_DAEMON_PATH = join12(homedir10(), ".hivemind", "embed-deps", "embed-daemon.js"); var log3 = (m) => log("embed-client", m); function getUid() { const uid = typeof process.getuid === "function" ? process.getuid() : void 0; @@ -1061,7 +1098,7 @@ var EmbedClient = class { this.socketPath = socketPathFor(uid, dir); this.pidPath = pidPathFor(uid, dir); this.timeoutMs = opts.timeoutMs ?? DEFAULT_CLIENT_TIMEOUT_MS; - this.daemonEntry = opts.daemonEntry ?? process.env.HIVEMIND_EMBED_DAEMON ?? (existsSync7(SHARED_DAEMON_PATH) ? SHARED_DAEMON_PATH : void 0); + this.daemonEntry = opts.daemonEntry ?? process.env.HIVEMIND_EMBED_DAEMON ?? (existsSync8(SHARED_DAEMON_PATH) ? SHARED_DAEMON_PATH : void 0); this.autoSpawn = opts.autoSpawn ?? true; this.spawnWaitMs = opts.spawnWaitMs ?? 5e3; } @@ -1161,7 +1198,7 @@ var EmbedClient = class { return; } } - if (!this.daemonEntry || !existsSync7(this.daemonEntry)) { + if (!this.daemonEntry || !existsSync8(this.daemonEntry)) { log3(`daemonEntry not configured or missing: ${this.daemonEntry}`); try { closeSync3(fd); @@ -1204,7 +1241,7 @@ var EmbedClient = class { while (Date.now() < deadline) { await sleep2(delay); delay = Math.min(delay * 1.5, 300); - if (!existsSync7(this.socketPath)) + if (!existsSync8(this.socketPath)) continue; try { return await this.connectOnce(); @@ -1265,8 +1302,8 @@ function embeddingSqlLiteral(vec) { // dist/src/embeddings/disable.js import { createRequire } from "node:module"; -import { homedir as homedir10 } from "node:os"; -import { join as join12 } from "node:path"; +import { homedir as homedir11 } from "node:os"; +import { join as join13 } from "node:path"; import { pathToFileURL } from "node:url"; var cachedStatus = null; function defaultResolveTransformers() { @@ -1275,7 +1312,7 @@ function defaultResolveTransformers() { return; } catch { } - const sharedDir = join12(homedir10(), ".hivemind", "embed-deps"); + const sharedDir = join13(homedir11(), ".hivemind", "embed-deps"); createRequire(pathToFileURL(`${sharedDir}/`).href).resolve("@huggingface/transformers"); } var _resolve = defaultResolveTransformers; @@ -1302,7 +1339,7 @@ function embeddingsDisabled() { // dist/src/hooks/codex/stop.js var log4 = (msg) => log("codex-stop", msg); function resolveEmbedDaemonPath() { - return join13(dirname3(fileURLToPath3(import.meta.url)), "embeddings", "embed-daemon.js"); + return join14(dirname3(fileURLToPath3(import.meta.url)), "embeddings", "embed-daemon.js"); } var CAPTURE = process.env.HIVEMIND_CAPTURE !== "false"; async function main() { @@ -1326,7 +1363,7 @@ async function main() { if (input.transcript_path) { try { const transcriptPath = input.transcript_path; - if (existsSync8(transcriptPath)) { + if (existsSync9(transcriptPath)) { const transcript = readFileSync7(transcriptPath, "utf-8"); const lines = transcript.trim().split("\n").reverse(); for (const line2 of lines) { diff --git a/cursor/bundle/capture.js b/cursor/bundle/capture.js index a86f1ff1..bc284d65 100755 --- a/cursor/bundle/capture.js +++ b/cursor/bundle/capture.js @@ -839,7 +839,7 @@ function embeddingsDisabled() { // dist/src/hooks/cursor/capture.js import { fileURLToPath as fileURLToPath3 } from "node:url"; -import { dirname as dirname3, join as join13 } from "node:path"; +import { dirname as dirname3, join as join14 } from "node:path"; // dist/src/hooks/summary-state.js import { readFileSync as readFileSync4, writeFileSync as writeFileSync2, writeSync as writeSync2, mkdirSync as mkdirSync2, renameSync, existsSync as existsSync4, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs"; @@ -1096,14 +1096,14 @@ function bundleDirFromImportMeta(importMetaUrl) { return dirname(fileURLToPath(importMetaUrl)); } -// dist/src/skilify/spawn-skilify-worker.js +// dist/src/skillify/spawn-skillify-worker.js import { spawn as spawn3 } from "node:child_process"; import { fileURLToPath as fileURLToPath2 } from "node:url"; import { dirname as dirname2, join as join10 } from "node:path"; import { writeFileSync as writeFileSync4, mkdirSync as mkdirSync5, appendFileSync as appendFileSync3, chmodSync } from "node:fs"; import { homedir as homedir8, tmpdir as tmpdir3 } from "node:os"; -// dist/src/skilify/gate-runner.js +// dist/src/skillify/gate-runner.js import { execFileSync } from "node:child_process"; import { existsSync as existsSync5 } from "node:fs"; import { homedir as homedir7 } from "node:os"; @@ -1134,20 +1134,20 @@ function findAgentBin(agent) { } } -// dist/src/skilify/spawn-skilify-worker.js +// dist/src/skillify/spawn-skillify-worker.js var HOME2 = homedir8(); -var SKILIFY_LOG = join10(HOME2, ".claude", "hooks", "skilify.log"); -function skilifyLog(msg) { +var SKILLIFY_LOG = join10(HOME2, ".claude", "hooks", "skillify.log"); +function skillifyLog(msg) { try { - mkdirSync5(dirname2(SKILIFY_LOG), { recursive: true }); - appendFileSync3(SKILIFY_LOG, `[${utcTimestamp()}] ${msg} + mkdirSync5(dirname2(SKILLIFY_LOG), { recursive: true }); + appendFileSync3(SKILLIFY_LOG, `[${utcTimestamp()}] ${msg} `); } catch { } } -function spawnSkilifyWorker(opts) { +function spawnSkillifyWorker(opts) { const { config, cwd, projectKey, project, bundleDir, agent, scopeConfig, currentSessionId, reason } = opts; - const tmpDir = join10(tmpdir3(), `deeplake-skilify-${projectKey}-${Date.now()}`); + const tmpDir = join10(tmpdir3(), `deeplake-skillify-${projectKey}-${Date.now()}`); mkdirSync5(tmpDir, { recursive: true, mode: 448 }); const gateBin = findAgentBin(agent); const configFile = join10(tmpDir, "config.json"); @@ -1173,40 +1173,72 @@ function spawnSkilifyWorker(opts) { hermesModel: process.env.HIVEMIND_HERMES_MODEL, piProvider: process.env.HIVEMIND_PI_PROVIDER, piModel: process.env.HIVEMIND_PI_MODEL, - skilifyLog: SKILIFY_LOG, + skillifyLog: SKILLIFY_LOG, currentSessionId }), { mode: 384 }); try { chmodSync(configFile, 384); } catch { } - skilifyLog(`${reason}: spawning skilify worker for project=${project} key=${projectKey}`); - const workerPath = join10(bundleDir, "skilify-worker.js"); + skillifyLog(`${reason}: spawning skillify worker for project=${project} key=${projectKey}`); + const workerPath = join10(bundleDir, "skillify-worker.js"); spawn3("nohup", ["node", workerPath, configFile], { detached: true, stdio: ["ignore", "ignore", "ignore"] }).unref(); - skilifyLog(`${reason}: spawned skilify worker for ${projectKey}`); + skillifyLog(`${reason}: spawned skillify worker for ${projectKey}`); } -// dist/src/skilify/state.js -import { readFileSync as readFileSync5, writeFileSync as writeFileSync5, writeSync as writeSync3, mkdirSync as mkdirSync6, renameSync as renameSync2, existsSync as existsSync6, unlinkSync as unlinkSync3, openSync as openSync3, closeSync as closeSync3 } from "node:fs"; +// dist/src/skillify/state.js +import { readFileSync as readFileSync5, writeFileSync as writeFileSync5, writeSync as writeSync3, mkdirSync as mkdirSync6, renameSync as renameSync3, existsSync as existsSync7, unlinkSync as unlinkSync3, openSync as openSync3, closeSync as closeSync3 } from "node:fs"; import { execSync as execSync2 } from "node:child_process"; -import { homedir as homedir9 } from "node:os"; +import { homedir as homedir10 } from "node:os"; import { createHash } from "node:crypto"; -import { join as join11, basename } from "node:path"; -var dlog2 = (msg) => log("skilify-state", msg); -var STATE_DIR2 = join11(homedir9(), ".deeplake", "state", "skilify"); +import { join as join12, basename } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync6, renameSync as renameSync2 } from "node:fs"; +import { homedir as homedir9 } from "node:os"; +import { join as join11 } from "node:path"; +var dlog2 = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join11(homedir9(), ".deeplake", "state"); + const legacy = join11(root, "skilify"); + const current = join11(root, "skillify"); + if (!existsSync6(legacy)) + return; + if (existsSync6(current)) + return; + try { + renameSync2(legacy, current); + dlog2(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog2(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/state.js +var dlog3 = (msg) => log("skillify-state", msg); +var STATE_DIR2 = join12(homedir10(), ".deeplake", "state", "skillify"); var YIELD_BUF2 = new Int32Array(new SharedArrayBuffer(4)); var TRIGGER_THRESHOLD = (() => { - const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? ""); + const n = Number(process.env.HIVEMIND_SKILLIFY_EVERY_N_TURNS ?? ""); return Number.isInteger(n) && n > 0 ? n : 20; })(); function statePath2(projectKey) { - return join11(STATE_DIR2, `${projectKey}.json`); + return join12(STATE_DIR2, `${projectKey}.json`); } function lockPath2(projectKey) { - return join11(STATE_DIR2, `${projectKey}.lock`); + return join12(STATE_DIR2, `${projectKey}.lock`); } function deriveProjectKey(cwd) { const project = basename(cwd) || "unknown"; @@ -1224,8 +1256,9 @@ function deriveProjectKey(cwd) { return { key, project }; } function readState2(projectKey) { + migrateLegacyStateDir(); const p = statePath2(projectKey); - if (!existsSync6(p)) + if (!existsSync7(p)) return null; try { return JSON.parse(readFileSync5(p, "utf-8")); @@ -1234,13 +1267,15 @@ function readState2(projectKey) { } } function writeState2(projectKey, state) { + migrateLegacyStateDir(); mkdirSync6(STATE_DIR2, { recursive: true }); const p = statePath2(projectKey); const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; writeFileSync5(tmp, JSON.stringify(state, null, 2)); - renameSync2(tmp, p); + renameSync3(tmp, p); } function withRmwLock2(projectKey, fn) { + migrateLegacyStateDir(); mkdirSync6(STATE_DIR2, { recursive: true }); const rmw = lockPath2(projectKey) + ".rmw"; const deadline = Date.now() + 2e3; @@ -1252,11 +1287,11 @@ function withRmwLock2(projectKey, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { - dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); + dlog3(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); try { unlinkSync3(rmw); } catch (unlinkErr) { - dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); + dlog3(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); } continue; } @@ -1270,7 +1305,7 @@ function withRmwLock2(projectKey, fn) { try { unlinkSync3(rmw); } catch (unlinkErr) { - dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); + dlog3(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); } } } @@ -1300,20 +1335,21 @@ function resetCounter(projectKey) { }); } function tryAcquireWorkerLock(projectKey, maxAgeMs = 10 * 60 * 1e3) { + migrateLegacyStateDir(); mkdirSync6(STATE_DIR2, { recursive: true }); const p = lockPath2(projectKey); - if (existsSync6(p)) { + if (existsSync7(p)) { try { const ageMs = Date.now() - parseInt(readFileSync5(p, "utf-8"), 10); if (Number.isFinite(ageMs) && ageMs < maxAgeMs) return false; } catch (readErr) { - dlog2(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`); + dlog3(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`); } try { unlinkSync3(p); } catch (unlinkErr) { - dlog2(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`); + dlog3(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`); return false; } } @@ -1337,15 +1373,16 @@ function releaseWorkerLock(projectKey) { } } -// dist/src/skilify/scope-config.js -import { existsSync as existsSync7, mkdirSync as mkdirSync7, readFileSync as readFileSync6, writeFileSync as writeFileSync6 } from "node:fs"; -import { homedir as homedir10 } from "node:os"; -import { join as join12 } from "node:path"; -var STATE_DIR3 = join12(homedir10(), ".deeplake", "state", "skilify"); -var CONFIG_PATH = join12(STATE_DIR3, "config.json"); +// dist/src/skillify/scope-config.js +import { existsSync as existsSync8, mkdirSync as mkdirSync7, readFileSync as readFileSync6, writeFileSync as writeFileSync6 } from "node:fs"; +import { homedir as homedir11 } from "node:os"; +import { join as join13 } from "node:path"; +var STATE_DIR3 = join13(homedir11(), ".deeplake", "state", "skillify"); +var CONFIG_PATH = join13(STATE_DIR3, "config.json"); var DEFAULT = { scope: "me", team: [], install: "project" }; function loadScopeConfig() { - if (!existsSync7(CONFIG_PATH)) + migrateLegacyStateDir(); + if (!existsSync8(CONFIG_PATH)) return DEFAULT; try { const raw = JSON.parse(readFileSync6(CONFIG_PATH, "utf-8")); @@ -1358,9 +1395,9 @@ function loadScopeConfig() { } } -// dist/src/skilify/triggers.js +// dist/src/skillify/triggers.js function tryStopCounterTrigger(opts) { - if (process.env.HIVEMIND_SKILIFY_WORKER === "1") + if (process.env.HIVEMIND_SKILLIFY_WORKER === "1") return; if (!opts.cwd) return; @@ -1369,13 +1406,13 @@ function tryStopCounterTrigger(opts) { if (state.counter < TRIGGER_THRESHOLD) return; if (!tryAcquireWorkerLock(state.projectKey)) { - skilifyLog(`Stop: trigger suppressed (worker lock held) project=${state.project}`); + skillifyLog(`Stop: trigger suppressed (worker lock held) project=${state.project}`); return; } - skilifyLog(`Stop: threshold hit (counter=${state.counter}, N=${TRIGGER_THRESHOLD}) project=${state.project} agent=${opts.agent}`); + skillifyLog(`Stop: threshold hit (counter=${state.counter}, N=${TRIGGER_THRESHOLD}) project=${state.project} agent=${opts.agent}`); resetCounter(state.projectKey); try { - spawnSkilifyWorker({ + spawnSkillifyWorker({ config: opts.config, cwd: opts.cwd, projectKey: state.projectKey, @@ -1387,21 +1424,21 @@ function tryStopCounterTrigger(opts) { reason: "Stop" }); } catch (e) { - skilifyLog(`Stop spawn failed: ${e?.message ?? e}`); + skillifyLog(`Stop spawn failed: ${e?.message ?? e}`); try { releaseWorkerLock(state.projectKey); } catch { } } } catch (e) { - skilifyLog(`Stop trigger error: ${e?.message ?? e}`); + skillifyLog(`Stop trigger error: ${e?.message ?? e}`); } } // dist/src/hooks/cursor/capture.js var log4 = (msg) => log("cursor-capture", msg); function resolveEmbedDaemonPath() { - return join13(dirname3(fileURLToPath3(import.meta.url)), "embeddings", "embed-daemon.js"); + return join14(dirname3(fileURLToPath3(import.meta.url)), "embeddings", "embed-daemon.js"); } var CAPTURE = process.env.HIVEMIND_CAPTURE !== "false"; function resolveCwd(input) { @@ -1489,7 +1526,7 @@ async function main() { } log4("capture ok \u2192 cloud"); maybeTriggerPeriodicSummary(sessionId, cwd, config); - if (event === "afterAgentResponse" && process.env.HIVEMIND_WIKI_WORKER !== "1" && process.env.HIVEMIND_SKILIFY_WORKER !== "1") { + if (event === "afterAgentResponse" && process.env.HIVEMIND_WIKI_WORKER !== "1" && process.env.HIVEMIND_SKILLIFY_WORKER !== "1") { tryStopCounterTrigger({ config, cwd, diff --git a/cursor/bundle/session-end.js b/cursor/bundle/session-end.js index 13faa928..bb469aa7 100755 --- a/cursor/bundle/session-end.js +++ b/cursor/bundle/session-end.js @@ -230,14 +230,14 @@ function bundleDirFromImportMeta(importMetaUrl) { return dirname(fileURLToPath(importMetaUrl)); } -// dist/src/skilify/spawn-skilify-worker.js +// dist/src/skillify/spawn-skillify-worker.js import { spawn as spawn2 } from "node:child_process"; import { fileURLToPath as fileURLToPath2 } from "node:url"; import { dirname as dirname2, join as join7 } from "node:path"; import { writeFileSync as writeFileSync3, mkdirSync as mkdirSync4, appendFileSync as appendFileSync3, chmodSync } from "node:fs"; import { homedir as homedir6, tmpdir as tmpdir2 } from "node:os"; -// dist/src/skilify/gate-runner.js +// dist/src/skillify/gate-runner.js import { execFileSync } from "node:child_process"; import { existsSync as existsSync3 } from "node:fs"; import { homedir as homedir5 } from "node:os"; @@ -268,20 +268,20 @@ function findAgentBin(agent) { } } -// dist/src/skilify/spawn-skilify-worker.js +// dist/src/skillify/spawn-skillify-worker.js var HOME2 = homedir6(); -var SKILIFY_LOG = join7(HOME2, ".claude", "hooks", "skilify.log"); -function skilifyLog(msg) { +var SKILLIFY_LOG = join7(HOME2, ".claude", "hooks", "skillify.log"); +function skillifyLog(msg) { try { - mkdirSync4(dirname2(SKILIFY_LOG), { recursive: true }); - appendFileSync3(SKILIFY_LOG, `[${utcTimestamp()}] ${msg} + mkdirSync4(dirname2(SKILLIFY_LOG), { recursive: true }); + appendFileSync3(SKILLIFY_LOG, `[${utcTimestamp()}] ${msg} `); } catch { } } -function spawnSkilifyWorker(opts) { +function spawnSkillifyWorker(opts) { const { config, cwd, projectKey, project, bundleDir, agent, scopeConfig, currentSessionId, reason } = opts; - const tmpDir = join7(tmpdir2(), `deeplake-skilify-${projectKey}-${Date.now()}`); + const tmpDir = join7(tmpdir2(), `deeplake-skillify-${projectKey}-${Date.now()}`); mkdirSync4(tmpDir, { recursive: true, mode: 448 }); const gateBin = findAgentBin(agent); const configFile = join7(tmpDir, "config.json"); @@ -307,40 +307,72 @@ function spawnSkilifyWorker(opts) { hermesModel: process.env.HIVEMIND_HERMES_MODEL, piProvider: process.env.HIVEMIND_PI_PROVIDER, piModel: process.env.HIVEMIND_PI_MODEL, - skilifyLog: SKILIFY_LOG, + skillifyLog: SKILLIFY_LOG, currentSessionId }), { mode: 384 }); try { chmodSync(configFile, 384); } catch { } - skilifyLog(`${reason}: spawning skilify worker for project=${project} key=${projectKey}`); - const workerPath = join7(bundleDir, "skilify-worker.js"); + skillifyLog(`${reason}: spawning skillify worker for project=${project} key=${projectKey}`); + const workerPath = join7(bundleDir, "skillify-worker.js"); spawn2("nohup", ["node", workerPath, configFile], { detached: true, stdio: ["ignore", "ignore", "ignore"] }).unref(); - skilifyLog(`${reason}: spawned skilify worker for ${projectKey}`); + skillifyLog(`${reason}: spawned skillify worker for ${projectKey}`); } -// dist/src/skilify/state.js -import { readFileSync as readFileSync3, writeFileSync as writeFileSync4, writeSync as writeSync2, mkdirSync as mkdirSync5, renameSync as renameSync2, existsSync as existsSync4, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs"; +// dist/src/skillify/state.js +import { readFileSync as readFileSync3, writeFileSync as writeFileSync4, writeSync as writeSync2, mkdirSync as mkdirSync5, renameSync as renameSync3, existsSync as existsSync5, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs"; import { execSync as execSync2 } from "node:child_process"; -import { homedir as homedir7 } from "node:os"; +import { homedir as homedir8 } from "node:os"; import { createHash } from "node:crypto"; -import { join as join8, basename } from "node:path"; -var dlog2 = (msg) => log("skilify-state", msg); -var STATE_DIR2 = join8(homedir7(), ".deeplake", "state", "skilify"); +import { join as join9, basename } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync4, renameSync as renameSync2 } from "node:fs"; +import { homedir as homedir7 } from "node:os"; +import { join as join8 } from "node:path"; +var dlog2 = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join8(homedir7(), ".deeplake", "state"); + const legacy = join8(root, "skilify"); + const current = join8(root, "skillify"); + if (!existsSync4(legacy)) + return; + if (existsSync4(current)) + return; + try { + renameSync2(legacy, current); + dlog2(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog2(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/state.js +var dlog3 = (msg) => log("skillify-state", msg); +var STATE_DIR2 = join9(homedir8(), ".deeplake", "state", "skillify"); var YIELD_BUF2 = new Int32Array(new SharedArrayBuffer(4)); var TRIGGER_THRESHOLD = (() => { - const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? ""); + const n = Number(process.env.HIVEMIND_SKILLIFY_EVERY_N_TURNS ?? ""); return Number.isInteger(n) && n > 0 ? n : 20; })(); function statePath(projectKey) { - return join8(STATE_DIR2, `${projectKey}.json`); + return join9(STATE_DIR2, `${projectKey}.json`); } function lockPath2(projectKey) { - return join8(STATE_DIR2, `${projectKey}.lock`); + return join9(STATE_DIR2, `${projectKey}.lock`); } function deriveProjectKey(cwd) { const project = basename(cwd) || "unknown"; @@ -358,8 +390,9 @@ function deriveProjectKey(cwd) { return { key, project }; } function readState(projectKey) { + migrateLegacyStateDir(); const p = statePath(projectKey); - if (!existsSync4(p)) + if (!existsSync5(p)) return null; try { return JSON.parse(readFileSync3(p, "utf-8")); @@ -368,13 +401,15 @@ function readState(projectKey) { } } function writeState(projectKey, state) { + migrateLegacyStateDir(); mkdirSync5(STATE_DIR2, { recursive: true }); const p = statePath(projectKey); const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; writeFileSync4(tmp, JSON.stringify(state, null, 2)); - renameSync2(tmp, p); + renameSync3(tmp, p); } function withRmwLock(projectKey, fn) { + migrateLegacyStateDir(); mkdirSync5(STATE_DIR2, { recursive: true }); const rmw = lockPath2(projectKey) + ".rmw"; const deadline = Date.now() + 2e3; @@ -386,11 +421,11 @@ function withRmwLock(projectKey, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { - dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); + dlog3(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); try { unlinkSync2(rmw); } catch (unlinkErr) { - dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); + dlog3(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); } continue; } @@ -404,7 +439,7 @@ function withRmwLock(projectKey, fn) { try { unlinkSync2(rmw); } catch (unlinkErr) { - dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); + dlog3(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); } } } @@ -417,20 +452,21 @@ function resetCounter(projectKey) { }); } function tryAcquireWorkerLock(projectKey, maxAgeMs = 10 * 60 * 1e3) { + migrateLegacyStateDir(); mkdirSync5(STATE_DIR2, { recursive: true }); const p = lockPath2(projectKey); - if (existsSync4(p)) { + if (existsSync5(p)) { try { const ageMs = Date.now() - parseInt(readFileSync3(p, "utf-8"), 10); if (Number.isFinite(ageMs) && ageMs < maxAgeMs) return false; } catch (readErr) { - dlog2(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`); + dlog3(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`); } try { unlinkSync2(p); } catch (unlinkErr) { - dlog2(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`); + dlog3(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`); return false; } } @@ -454,15 +490,16 @@ function releaseWorkerLock(projectKey) { } } -// dist/src/skilify/scope-config.js -import { existsSync as existsSync5, mkdirSync as mkdirSync6, readFileSync as readFileSync4, writeFileSync as writeFileSync5 } from "node:fs"; -import { homedir as homedir8 } from "node:os"; -import { join as join9 } from "node:path"; -var STATE_DIR3 = join9(homedir8(), ".deeplake", "state", "skilify"); -var CONFIG_PATH = join9(STATE_DIR3, "config.json"); +// dist/src/skillify/scope-config.js +import { existsSync as existsSync6, mkdirSync as mkdirSync6, readFileSync as readFileSync4, writeFileSync as writeFileSync5 } from "node:fs"; +import { homedir as homedir9 } from "node:os"; +import { join as join10 } from "node:path"; +var STATE_DIR3 = join10(homedir9(), ".deeplake", "state", "skillify"); +var CONFIG_PATH = join10(STATE_DIR3, "config.json"); var DEFAULT = { scope: "me", team: [], install: "project" }; function loadScopeConfig() { - if (!existsSync5(CONFIG_PATH)) + migrateLegacyStateDir(); + if (!existsSync6(CONFIG_PATH)) return DEFAULT; try { const raw = JSON.parse(readFileSync4(CONFIG_PATH, "utf-8")); @@ -475,24 +512,24 @@ function loadScopeConfig() { } } -// dist/src/skilify/triggers.js +// dist/src/skillify/triggers.js function forceSessionEndTrigger(opts) { - if (process.env.HIVEMIND_SKILIFY_WORKER === "1") + if (process.env.HIVEMIND_SKILLIFY_WORKER === "1") return; if (!opts.cwd) return; try { const { key: projectKey, project } = deriveProjectKey(opts.cwd); if (!tryAcquireWorkerLock(projectKey)) { - skilifyLog(`SessionEnd: skilify worker already running for ${projectKey}, skipping`); + skillifyLog(`SessionEnd: skillify worker already running for ${projectKey}, skipping`); return; } if (readState(projectKey)) { resetCounter(projectKey); } - skilifyLog(`SessionEnd: spawning skilify worker for project=${project} agent=${opts.agent}`); + skillifyLog(`SessionEnd: spawning skillify worker for project=${project} agent=${opts.agent}`); try { - spawnSkilifyWorker({ + spawnSkillifyWorker({ config: opts.config, cwd: opts.cwd, projectKey, @@ -504,14 +541,14 @@ function forceSessionEndTrigger(opts) { reason: "SessionEnd" }); } catch (e) { - skilifyLog(`SessionEnd spawn failed: ${e?.message ?? e}`); + skillifyLog(`SessionEnd spawn failed: ${e?.message ?? e}`); try { releaseWorkerLock(projectKey); } catch { } } } catch (e) { - skilifyLog(`SessionEnd trigger error: ${e?.message ?? e}`); + skillifyLog(`SessionEnd trigger error: ${e?.message ?? e}`); } } @@ -554,7 +591,7 @@ async function main() { sessionId }); } catch (e) { - wikiLog(`SessionEnd: skilify trigger failed: ${e?.message ?? e}`); + wikiLog(`SessionEnd: skillify trigger failed: ${e?.message ?? e}`); } } main().catch((e) => { diff --git a/cursor/bundle/session-start.js b/cursor/bundle/session-start.js index aa464280..b012211d 100755 --- a/cursor/bundle/session-start.js +++ b/cursor/bundle/session-start.js @@ -671,12 +671,12 @@ async function autoUpdate(creds, opts) { log3(`agent=${opts.agent} dispatched (pid=${pid ?? "?"}) (${Date.now() - t0}ms total)`); } -// dist/src/skilify/pull.js -import { existsSync as existsSync7, readFileSync as readFileSync7, writeFileSync as writeFileSync5, mkdirSync as mkdirSync5, renameSync as renameSync2, lstatSync as lstatSync2, readlinkSync, symlinkSync, unlinkSync as unlinkSync3 } from "node:fs"; -import { homedir as homedir7 } from "node:os"; -import { dirname as dirname3, join as join10 } from "node:path"; +// dist/src/skillify/pull.js +import { existsSync as existsSync8, readFileSync as readFileSync7, writeFileSync as writeFileSync5, mkdirSync as mkdirSync5, renameSync as renameSync3, lstatSync as lstatSync2, readlinkSync, symlinkSync, unlinkSync as unlinkSync3 } from "node:fs"; +import { homedir as homedir8 } from "node:os"; +import { dirname as dirname3, join as join11 } from "node:path"; -// dist/src/skilify/skill-writer.js +// dist/src/skillify/skill-writer.js import { existsSync as existsSync4, mkdirSync as mkdirSync3, readFileSync as readFileSync5, readdirSync, statSync, writeFileSync as writeFileSync3 } from "node:fs"; import { homedir as homedir4 } from "node:os"; import { join as join7 } from "node:path"; @@ -737,18 +737,51 @@ function parseFrontmatter(text) { return { fm, body }; } -// dist/src/skilify/manifest.js -import { existsSync as existsSync5, lstatSync, mkdirSync as mkdirSync4, readFileSync as readFileSync6, renameSync, unlinkSync as unlinkSync2, writeFileSync as writeFileSync4 } from "node:fs"; +// dist/src/skillify/manifest.js +import { existsSync as existsSync6, lstatSync, mkdirSync as mkdirSync4, readFileSync as readFileSync6, renameSync as renameSync2, unlinkSync as unlinkSync2, writeFileSync as writeFileSync4 } from "node:fs"; +import { homedir as homedir6 } from "node:os"; +import { dirname as dirname2, join as join9 } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync5, renameSync } from "node:fs"; import { homedir as homedir5 } from "node:os"; -import { dirname as dirname2, join as join8 } from "node:path"; +import { join as join8 } from "node:path"; +var dlog = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join8(homedir5(), ".deeplake", "state"); + const legacy = join8(root, "skilify"); + const current = join8(root, "skillify"); + if (!existsSync5(legacy)) + return; + if (existsSync5(current)) + return; + try { + renameSync(legacy, current); + dlog(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/manifest.js function emptyManifest() { return { version: 1, entries: [] }; } function manifestPath() { - return join8(homedir5(), ".deeplake", "state", "skilify", "pulled.json"); + return join9(homedir6(), ".deeplake", "state", "skillify", "pulled.json"); } function loadManifest(path = manifestPath()) { - if (!existsSync5(path)) + migrateLegacyStateDir(); + if (!existsSync6(path)) return emptyManifest(); let raw; try { @@ -798,10 +831,11 @@ function loadManifest(path = manifestPath()) { } } function saveManifest(m, path = manifestPath()) { + migrateLegacyStateDir(); mkdirSync4(dirname2(path), { recursive: true }); const tmp = `${path}.tmp`; writeFileSync4(tmp, JSON.stringify(m, null, 2) + "\n", { mode: 384 }); - renameSync(tmp, path); + renameSync2(tmp, path); } function recordPull(entry, path = manifestPath()) { const m = loadManifest(path); @@ -836,7 +870,7 @@ function pruneOrphanedEntries(path = manifestPath()) { const live = []; let pruned = 0; for (const e of m.entries) { - if (existsSync5(join8(e.installRoot, e.dirName))) { + if (existsSync6(join9(e.installRoot, e.dirName))) { live.push(e); continue; } @@ -848,31 +882,31 @@ function pruneOrphanedEntries(path = manifestPath()) { return pruned; } -// dist/src/skilify/agent-roots.js -import { existsSync as existsSync6 } from "node:fs"; -import { homedir as homedir6 } from "node:os"; -import { join as join9 } from "node:path"; +// dist/src/skillify/agent-roots.js +import { existsSync as existsSync7 } from "node:fs"; +import { homedir as homedir7 } from "node:os"; +import { join as join10 } from "node:path"; function resolveDetected(home) { const out = []; - const codexInstalled = existsSync6(join9(home, ".codex")); - const piInstalled = existsSync6(join9(home, ".pi", "agent")); - const hermesInstalled = existsSync6(join9(home, ".hermes")); + const codexInstalled = existsSync7(join10(home, ".codex")); + const piInstalled = existsSync7(join10(home, ".pi", "agent")); + const hermesInstalled = existsSync7(join10(home, ".hermes")); if (codexInstalled || piInstalled) { - out.push(join9(home, ".agents", "skills")); + out.push(join10(home, ".agents", "skills")); } if (hermesInstalled) { - out.push(join9(home, ".hermes", "skills")); + out.push(join10(home, ".hermes", "skills")); } if (piInstalled) { - out.push(join9(home, ".pi", "agent", "skills")); + out.push(join10(home, ".pi", "agent", "skills")); } return out; } -function detectAgentSkillsRoots(canonicalRoot, home = homedir6()) { +function detectAgentSkillsRoots(canonicalRoot, home = homedir7()) { return resolveDetected(home).filter((p) => p !== canonicalRoot); } -// dist/src/skilify/pull.js +// dist/src/skillify/pull.js function assertValidAuthor(author) { if (!author) throw new Error("author is empty"); @@ -904,15 +938,15 @@ function isMissingTableError(message) { } function resolvePullDestination(install, cwd) { if (install === "global") - return join10(homedir7(), ".claude", "skills"); + return join11(homedir8(), ".claude", "skills"); if (!cwd) throw new Error("install=project requires a cwd"); - return join10(cwd, ".claude", "skills"); + return join11(cwd, ".claude", "skills"); } function fanOutSymlinks(canonicalDir, dirName, agentRoots) { const out = []; for (const root of agentRoots) { - const link = join10(root, dirName); + const link = join11(root, dirName); let existing; try { existing = lstatSync2(link); @@ -955,8 +989,8 @@ function backfillSymlinks(installRoot) { return; const detected = detectAgentSkillsRoots(installRoot); for (const entry of entries) { - const canonical = join10(entry.installRoot, entry.dirName); - if (!existsSync7(canonical)) + const canonical = join11(entry.installRoot, entry.dirName); + if (!existsSync8(canonical)) continue; const fresh = fanOutSymlinks(canonical, entry.dirName, detected); if (sameSorted(fresh, entry.symlinks)) @@ -1041,7 +1075,7 @@ function renderFrontmatter(fm) { return lines.join("\n"); } function readLocalVersion(path) { - if (!existsSync7(path)) + if (!existsSync8(path)) return null; try { const text = readFileSync7(path, "utf-8"); @@ -1130,8 +1164,8 @@ async function runPull(opts) { summary.skipped++; continue; } - const skillDir = join10(root, dirName); - const skillFile = join10(skillDir, "SKILL.md"); + const skillDir = join11(root, dirName); + const skillFile = join11(skillDir, "SKILL.md"); const remoteVersion = Number(row.version ?? 1); const localVersion = readLocalVersion(skillFile); const action = decideAction({ @@ -1143,9 +1177,9 @@ async function runPull(opts) { let manifestError; if (action === "wrote") { mkdirSync5(skillDir, { recursive: true }); - if (existsSync7(skillFile)) { + if (existsSync8(skillFile)) { try { - renameSync2(skillFile, `${skillFile}.bak`); + renameSync3(skillFile, `${skillFile}.bak`); } catch { } } @@ -1190,8 +1224,8 @@ async function runPull(opts) { return summary; } -// dist/src/skilify/auto-pull.js -var log4 = (msg) => log("skilify-autopull", msg); +// dist/src/skillify/auto-pull.js +var log4 = (msg) => log("skillify-autopull", msg); var DEFAULT_TIMEOUT_MS = 5e3; function withTimeout(p, ms) { let timer = null; @@ -1264,23 +1298,23 @@ Organization management \u2014 each argument is SEPARATE (do NOT quote subcomman - hivemind members \u2014 list members - hivemind remove \u2014 remove member -SKILLS (skilify) \u2014 mine + share reusable skills across the org: -- hivemind skilify \u2014 show scope/team/install + per-project state -- hivemind skilify pull \u2014 sync project skills from the org table -- hivemind skilify pull --user \u2014 only that author's skills -- hivemind skilify pull --users a,b,c \u2014 multiple authors (CSV) -- hivemind skilify pull --all-users \u2014 explicit "no author filter" -- hivemind skilify pull --to project|global \u2014 install location -- hivemind skilify pull --dry-run \u2014 preview only -- hivemind skilify pull --force \u2014 overwrite local (creates .bak) -- hivemind skilify pull \u2014 pull only that skill (combines with --user) -- hivemind skilify unpull \u2014 remove every skill previously installed by pull -- hivemind skilify unpull --user \u2014 remove only that author's pulls -- hivemind skilify unpull --not-mine \u2014 remove all pulls except your own -- hivemind skilify unpull --dry-run \u2014 preview without touching disk -- hivemind skilify scope \u2014 sharing scope for new skills -- hivemind skilify install \u2014 default install location -- hivemind skilify team add|remove|list \u2014 manage team list`; +SKILLS (skillify) \u2014 mine + share reusable skills across the org: +- hivemind skillify \u2014 show scope/team/install + per-project state +- hivemind skillify pull \u2014 sync project skills from the org table +- hivemind skillify pull --user \u2014 only that author's skills +- hivemind skillify pull --users a,b,c \u2014 multiple authors (CSV) +- hivemind skillify pull --all-users \u2014 explicit "no author filter" +- hivemind skillify pull --to project|global \u2014 install location +- hivemind skillify pull --dry-run \u2014 preview only +- hivemind skillify pull --force \u2014 overwrite local (creates .bak) +- hivemind skillify pull \u2014 pull only that skill (combines with --user) +- hivemind skillify unpull \u2014 remove every skill previously installed by pull +- hivemind skillify unpull --user \u2014 remove only that author's pulls +- hivemind skillify unpull --not-mine \u2014 remove all pulls except your own +- hivemind skillify unpull --dry-run \u2014 preview without touching disk +- hivemind skillify scope \u2014 sharing scope for new skills +- hivemind skillify install \u2014 default install location +- hivemind skillify team add|remove|list \u2014 manage team list`; function resolveSessionId(input) { return input.session_id ?? input.conversation_id ?? `cursor-${Date.now()}`; } diff --git a/cursor/bundle/skilify-worker.js b/cursor/bundle/skillify-worker.js similarity index 92% rename from cursor/bundle/skilify-worker.js rename to cursor/bundle/skillify-worker.js index 8eb534fd..987a16ba 100755 --- a/cursor/bundle/skilify-worker.js +++ b/cursor/bundle/skillify-worker.js @@ -1,8 +1,8 @@ #!/usr/bin/env node -// dist/src/skilify/skilify-worker.js -import { readFileSync as readFileSync3, writeFileSync as writeFileSync3, existsSync as existsSync4, appendFileSync as appendFileSync2, rmSync } from "node:fs"; -import { join as join5 } from "node:path"; +// dist/src/skillify/skillify-worker.js +import { readFileSync as readFileSync3, writeFileSync as writeFileSync3, existsSync as existsSync5, appendFileSync as appendFileSync2, rmSync } from "node:fs"; +import { join as join6 } from "node:path"; // dist/src/utils/debug.js import { appendFileSync } from "node:fs"; @@ -29,7 +29,7 @@ function deeplakeClientHeader() { return { [DEEPLAKE_CLIENT_HEADER]: deeplakeClientValue() }; } -// dist/src/skilify/extractors/index.js +// dist/src/skillify/extractors/index.js function extractPairs(rows) { const pairs = []; let pendingPrompt = null; @@ -60,7 +60,7 @@ function extractPairs(rows) { return pairs; } -// dist/src/skilify/skill-writer.js +// dist/src/skillify/skill-writer.js import { existsSync, mkdirSync, readFileSync, readdirSync, statSync, writeFileSync } from "node:fs"; import { homedir as homedir2 } from "node:os"; import { join as join2 } from "node:path"; @@ -216,7 +216,7 @@ function resolveSkillsRoot(install, cwd) { return join2(cwd, ".claude", "skills"); } -// dist/src/skilify/skills-table.js +// dist/src/skillify/skills-table.js import { randomUUID } from "node:crypto"; // dist/src/utils/sql.js @@ -227,7 +227,7 @@ function sqlIdent(name) { return name; } -// dist/src/skilify/skills-table.js +// dist/src/skillify/skills-table.js function createSkillsTableSql(tableName) { const safe = sqlIdent(tableName); return `CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`; @@ -256,7 +256,7 @@ async function insertSkillRow(args) { } } -// dist/src/skilify/gate-parser.js +// dist/src/skillify/gate-parser.js function extractJsonBlock(s) { const trimmed = s.trim(); if (!trimmed) @@ -294,7 +294,7 @@ function parseVerdict(raw) { } } -// dist/src/skilify/gate-runner.js +// dist/src/skillify/gate-runner.js import { execFileSync } from "node:child_process"; import { existsSync as existsSync2 } from "node:fs"; import { homedir as homedir3 } from "node:os"; @@ -403,28 +403,61 @@ function runGate(opts) { } } -// dist/src/skilify/state.js -import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync2, renameSync, existsSync as existsSync3, unlinkSync, openSync, closeSync } from "node:fs"; +// dist/src/skillify/state.js +import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync2, renameSync as renameSync2, existsSync as existsSync4, unlinkSync, openSync, closeSync } from "node:fs"; import { execSync } from "node:child_process"; -import { homedir as homedir4 } from "node:os"; +import { homedir as homedir5 } from "node:os"; import { createHash } from "node:crypto"; -import { join as join4, basename } from "node:path"; -var dlog = (msg) => log("skilify-state", msg); -var STATE_DIR = join4(homedir4(), ".deeplake", "state", "skilify"); +import { join as join5, basename } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync3, renameSync } from "node:fs"; +import { homedir as homedir4 } from "node:os"; +import { join as join4 } from "node:path"; +var dlog = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join4(homedir4(), ".deeplake", "state"); + const legacy = join4(root, "skilify"); + const current = join4(root, "skillify"); + if (!existsSync3(legacy)) + return; + if (existsSync3(current)) + return; + try { + renameSync(legacy, current); + dlog(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/state.js +var dlog2 = (msg) => log("skillify-state", msg); +var STATE_DIR = join5(homedir5(), ".deeplake", "state", "skillify"); var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4)); var TRIGGER_THRESHOLD = (() => { - const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? ""); + const n = Number(process.env.HIVEMIND_SKILLIFY_EVERY_N_TURNS ?? ""); return Number.isInteger(n) && n > 0 ? n : 20; })(); function statePath(projectKey) { - return join4(STATE_DIR, `${projectKey}.json`); + return join5(STATE_DIR, `${projectKey}.json`); } function lockPath(projectKey) { - return join4(STATE_DIR, `${projectKey}.lock`); + return join5(STATE_DIR, `${projectKey}.lock`); } function readState(projectKey) { + migrateLegacyStateDir(); const p = statePath(projectKey); - if (!existsSync3(p)) + if (!existsSync4(p)) return null; try { return JSON.parse(readFileSync2(p, "utf-8")); @@ -433,13 +466,15 @@ function readState(projectKey) { } } function writeState(projectKey, state) { + migrateLegacyStateDir(); mkdirSync2(STATE_DIR, { recursive: true }); const p = statePath(projectKey); const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; writeFileSync2(tmp, JSON.stringify(state, null, 2)); - renameSync(tmp, p); + renameSync2(tmp, p); } function withRmwLock(projectKey, fn) { + migrateLegacyStateDir(); mkdirSync2(STATE_DIR, { recursive: true }); const rmw = lockPath(projectKey) + ".rmw"; const deadline = Date.now() + 2e3; @@ -451,11 +486,11 @@ function withRmwLock(projectKey, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { - dlog(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); + dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); try { unlinkSync(rmw); } catch (unlinkErr) { - dlog(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); + dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); } continue; } @@ -469,7 +504,7 @@ function withRmwLock(projectKey, fn) { try { unlinkSync(rmw); } catch (unlinkErr) { - dlog(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); + dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); } } } @@ -509,18 +544,18 @@ function releaseWorkerLock(projectKey) { } } -// dist/src/skilify/skilify-worker.js +// dist/src/skillify/skillify-worker.js var cfg = JSON.parse(readFileSync3(process.argv[2], "utf-8")); var tmpDir = cfg.tmpDir; -var verdictPath = join5(tmpDir, "verdict.json"); -var promptPath = join5(tmpDir, "prompt.txt"); +var verdictPath = join6(tmpDir, "verdict.json"); +var promptPath = join6(tmpDir, "prompt.txt"); var SESSIONS_TO_MINE = 10; var PAIR_CHAR_CAP = 2e3; var TOTAL_PAIRS_CHAR_CAP = 4e4; var EXISTING_SKILLS_CHAR_CAP = 3e4; function wlog(msg) { try { - appendFileSync2(cfg.skilifyLog, `[${utcTimestamp()}] skilify-worker(${cfg.projectKey}): ${msg} + appendFileSync2(cfg.skillifyLog, `[${utcTimestamp()}] skillify-worker(${cfg.projectKey}): ${msg} `); } catch { } @@ -715,7 +750,7 @@ function buildPrompt(pairs) { ].join("\n"); } function readVerdict(stdout) { - if (existsSync4(verdictPath)) { + if (existsSync5(verdictPath)) { try { const text = readFileSync3(verdictPath, "utf-8"); const v2 = parseVerdict(text); @@ -784,9 +819,9 @@ async function main() { timeoutMs: 12e4 }); try { - writeFileSync3(join5(tmpDir, "gate-stdout.txt"), gate.stdout); + writeFileSync3(join6(tmpDir, "gate-stdout.txt"), gate.stdout); if (gate.stderr) - writeFileSync3(join5(tmpDir, "gate-stderr.txt"), gate.stderr); + writeFileSync3(join6(tmpDir, "gate-stderr.txt"), gate.stderr); } catch { } if (gate.errored) { diff --git a/esbuild.config.mjs b/esbuild.config.mjs index 6c107535..6ad3ea34 100644 --- a/esbuild.config.mjs +++ b/esbuild.config.mjs @@ -16,7 +16,7 @@ const ccHooks = [ { entry: "dist/src/hooks/session-end.js", out: "session-end" }, { entry: "dist/src/hooks/plugin-cache-gc.js", out: "plugin-cache-gc" }, { entry: "dist/src/hooks/wiki-worker.js", out: "wiki-worker" }, - { entry: "dist/src/skilify/skilify-worker.js", out: "skilify-worker" }, + { entry: "dist/src/skillify/skillify-worker.js", out: "skillify-worker" }, ]; const ccShell = [ @@ -66,7 +66,7 @@ const codexHooks = [ { entry: "dist/src/hooks/codex/pre-tool-use.js", out: "pre-tool-use" }, { entry: "dist/src/hooks/codex/stop.js", out: "stop" }, { entry: "dist/src/hooks/codex/wiki-worker.js", out: "wiki-worker" }, - { entry: "dist/src/skilify/skilify-worker.js", out: "skilify-worker" }, + { entry: "dist/src/skillify/skillify-worker.js", out: "skillify-worker" }, ]; const codexShell = [ @@ -115,7 +115,7 @@ const cursorHooks = [ { entry: "dist/src/hooks/cursor/session-end.js", out: "session-end" }, { entry: "dist/src/hooks/cursor/pre-tool-use.js", out: "pre-tool-use" }, { entry: "dist/src/hooks/cursor/wiki-worker.js", out: "wiki-worker" }, - { entry: "dist/src/skilify/skilify-worker.js", out: "skilify-worker" }, + { entry: "dist/src/skillify/skillify-worker.js", out: "skillify-worker" }, ]; // Hermes Agent shell-hook bundles (matches Claude Code's wire protocol; see @@ -126,7 +126,7 @@ const hermesHooks = [ { entry: "dist/src/hooks/hermes/session-end.js", out: "session-end" }, { entry: "dist/src/hooks/hermes/pre-tool-use.js", out: "pre-tool-use" }, { entry: "dist/src/hooks/hermes/wiki-worker.js", out: "wiki-worker" }, - { entry: "dist/src/skilify/skilify-worker.js", out: "skilify-worker" }, + { entry: "dist/src/skillify/skillify-worker.js", out: "skillify-worker" }, ]; const cursorShell = [ @@ -205,20 +205,20 @@ for (const h of hermesAll) { chmodSync(`hermes/bundle/${h.out}.js`, 0o755); } -// Pi (badlogic/pi-mono) — ships a wiki-worker bundle, a skilify-worker +// Pi (badlogic/pi-mono) — ships a wiki-worker bundle, a skillify-worker // bundle, and an autopull-worker bundle. The pi extension itself is raw .ts // at pi/extension-source/hivemind.ts; we don't bundle it because pi's // runtime compiles + loads the .ts file directly. Embed daemon reuses the // canonical ~/.hivemind/embed-deps/embed-daemon.js — no per-pi embed -// bundle needed. Skilify worker is the same shared module used by +// bundle needed. Skillify worker is the same shared module used by // CC/Codex/Cursor/Hermes; pi spawns it from session_shutdown. // Autopull worker is the same maybeAutoPull() the other agents call // directly; pi can't import it (raw .ts, zero deps) so it spawns this // bundle synchronously from session_start. const piWorker = [ { entry: "dist/src/hooks/pi/wiki-worker.js", out: "wiki-worker" }, - { entry: "dist/src/skilify/skilify-worker.js", out: "skilify-worker" }, - { entry: "dist/src/skilify/autopull-worker.js", out: "autopull-worker" }, + { entry: "dist/src/skillify/skillify-worker.js", out: "skillify-worker" }, + { entry: "dist/src/skillify/autopull-worker.js", out: "autopull-worker" }, ]; await build({ entryPoints: Object.fromEntries(piWorker.map(h => [h.out, h.entry])), @@ -303,7 +303,7 @@ await build({ }); writeFileSync("openclaw/dist/package.json", esmPackageJson); -// OpenClaw skilify-worker bundle. Same shared module CC/Codex/Cursor/Hermes/Pi +// OpenClaw skillify-worker bundle. Same shared module CC/Codex/Cursor/Hermes/Pi // use; openclaw spawns it from its agent_end hook to mine reusable skills out // of just-captured sessions. Built as a SEPARATE entry (not added to the main // openclaw build above) because: @@ -312,11 +312,11 @@ writeFileSync("openclaw/dist/package.json", esmPackageJson); // with no stubs. // 2. The main bundle uses code splitting (chunks/), and we don't want the // worker's modules entangled with the gateway's chunk graph. -// Lands at openclaw/dist/skilify-worker.js — install-openclaw.ts already +// Lands at openclaw/dist/skillify-worker.js — install-openclaw.ts already // copies the entire dist/ recursively, so it ships to -// ~/.openclaw/extensions/hivemind/dist/skilify-worker.js with no other change. +// ~/.openclaw/extensions/hivemind/dist/skillify-worker.js with no other change. await build({ - entryPoints: { "skilify-worker": "dist/src/skilify/skilify-worker.js" }, + entryPoints: { "skillify-worker": "dist/src/skillify/skillify-worker.js" }, bundle: true, platform: "node", format: "esm", @@ -326,7 +326,7 @@ await build({ __HIVEMIND_VERSION__: JSON.stringify(hivemindVersion), }, }); -chmodSync("openclaw/dist/skilify-worker.js", 0o755); +chmodSync("openclaw/dist/skillify-worker.js", 0o755); // Hivemind MCP server (stdio). Reused by Cline / Roo / Kilo / any MCP-aware // agent. Lives at ~/.hivemind/mcp/server.js after install. diff --git a/hermes/bundle/capture.js b/hermes/bundle/capture.js index 859da309..648b11d9 100755 --- a/hermes/bundle/capture.js +++ b/hermes/bundle/capture.js @@ -838,7 +838,7 @@ function embeddingsDisabled() { // dist/src/hooks/hermes/capture.js import { fileURLToPath as fileURLToPath3 } from "node:url"; -import { dirname as dirname3, join as join13 } from "node:path"; +import { dirname as dirname3, join as join14 } from "node:path"; // dist/src/hooks/summary-state.js import { readFileSync as readFileSync4, writeFileSync as writeFileSync2, writeSync as writeSync2, mkdirSync as mkdirSync2, renameSync, existsSync as existsSync4, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs"; @@ -1096,14 +1096,14 @@ function bundleDirFromImportMeta(importMetaUrl) { return dirname(fileURLToPath(importMetaUrl)); } -// dist/src/skilify/spawn-skilify-worker.js +// dist/src/skillify/spawn-skillify-worker.js import { spawn as spawn3 } from "node:child_process"; import { fileURLToPath as fileURLToPath2 } from "node:url"; import { dirname as dirname2, join as join10 } from "node:path"; import { writeFileSync as writeFileSync4, mkdirSync as mkdirSync5, appendFileSync as appendFileSync3, chmodSync } from "node:fs"; import { homedir as homedir8, tmpdir as tmpdir3 } from "node:os"; -// dist/src/skilify/gate-runner.js +// dist/src/skillify/gate-runner.js import { execFileSync } from "node:child_process"; import { existsSync as existsSync5 } from "node:fs"; import { homedir as homedir7 } from "node:os"; @@ -1134,20 +1134,20 @@ function findAgentBin(agent) { } } -// dist/src/skilify/spawn-skilify-worker.js +// dist/src/skillify/spawn-skillify-worker.js var HOME2 = homedir8(); -var SKILIFY_LOG = join10(HOME2, ".claude", "hooks", "skilify.log"); -function skilifyLog(msg) { +var SKILLIFY_LOG = join10(HOME2, ".claude", "hooks", "skillify.log"); +function skillifyLog(msg) { try { - mkdirSync5(dirname2(SKILIFY_LOG), { recursive: true }); - appendFileSync3(SKILIFY_LOG, `[${utcTimestamp()}] ${msg} + mkdirSync5(dirname2(SKILLIFY_LOG), { recursive: true }); + appendFileSync3(SKILLIFY_LOG, `[${utcTimestamp()}] ${msg} `); } catch { } } -function spawnSkilifyWorker(opts) { +function spawnSkillifyWorker(opts) { const { config, cwd, projectKey, project, bundleDir, agent, scopeConfig, currentSessionId, reason } = opts; - const tmpDir = join10(tmpdir3(), `deeplake-skilify-${projectKey}-${Date.now()}`); + const tmpDir = join10(tmpdir3(), `deeplake-skillify-${projectKey}-${Date.now()}`); mkdirSync5(tmpDir, { recursive: true, mode: 448 }); const gateBin = findAgentBin(agent); const configFile = join10(tmpDir, "config.json"); @@ -1173,40 +1173,72 @@ function spawnSkilifyWorker(opts) { hermesModel: process.env.HIVEMIND_HERMES_MODEL, piProvider: process.env.HIVEMIND_PI_PROVIDER, piModel: process.env.HIVEMIND_PI_MODEL, - skilifyLog: SKILIFY_LOG, + skillifyLog: SKILLIFY_LOG, currentSessionId }), { mode: 384 }); try { chmodSync(configFile, 384); } catch { } - skilifyLog(`${reason}: spawning skilify worker for project=${project} key=${projectKey}`); - const workerPath = join10(bundleDir, "skilify-worker.js"); + skillifyLog(`${reason}: spawning skillify worker for project=${project} key=${projectKey}`); + const workerPath = join10(bundleDir, "skillify-worker.js"); spawn3("nohup", ["node", workerPath, configFile], { detached: true, stdio: ["ignore", "ignore", "ignore"] }).unref(); - skilifyLog(`${reason}: spawned skilify worker for ${projectKey}`); + skillifyLog(`${reason}: spawned skillify worker for ${projectKey}`); } -// dist/src/skilify/state.js -import { readFileSync as readFileSync5, writeFileSync as writeFileSync5, writeSync as writeSync3, mkdirSync as mkdirSync6, renameSync as renameSync2, existsSync as existsSync6, unlinkSync as unlinkSync3, openSync as openSync3, closeSync as closeSync3 } from "node:fs"; +// dist/src/skillify/state.js +import { readFileSync as readFileSync5, writeFileSync as writeFileSync5, writeSync as writeSync3, mkdirSync as mkdirSync6, renameSync as renameSync3, existsSync as existsSync7, unlinkSync as unlinkSync3, openSync as openSync3, closeSync as closeSync3 } from "node:fs"; import { execSync as execSync2 } from "node:child_process"; -import { homedir as homedir9 } from "node:os"; +import { homedir as homedir10 } from "node:os"; import { createHash } from "node:crypto"; -import { join as join11, basename } from "node:path"; -var dlog2 = (msg) => log("skilify-state", msg); -var STATE_DIR2 = join11(homedir9(), ".deeplake", "state", "skilify"); +import { join as join12, basename } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync6, renameSync as renameSync2 } from "node:fs"; +import { homedir as homedir9 } from "node:os"; +import { join as join11 } from "node:path"; +var dlog2 = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join11(homedir9(), ".deeplake", "state"); + const legacy = join11(root, "skilify"); + const current = join11(root, "skillify"); + if (!existsSync6(legacy)) + return; + if (existsSync6(current)) + return; + try { + renameSync2(legacy, current); + dlog2(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog2(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/state.js +var dlog3 = (msg) => log("skillify-state", msg); +var STATE_DIR2 = join12(homedir10(), ".deeplake", "state", "skillify"); var YIELD_BUF2 = new Int32Array(new SharedArrayBuffer(4)); var TRIGGER_THRESHOLD = (() => { - const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? ""); + const n = Number(process.env.HIVEMIND_SKILLIFY_EVERY_N_TURNS ?? ""); return Number.isInteger(n) && n > 0 ? n : 20; })(); function statePath2(projectKey) { - return join11(STATE_DIR2, `${projectKey}.json`); + return join12(STATE_DIR2, `${projectKey}.json`); } function lockPath2(projectKey) { - return join11(STATE_DIR2, `${projectKey}.lock`); + return join12(STATE_DIR2, `${projectKey}.lock`); } function deriveProjectKey(cwd) { const project = basename(cwd) || "unknown"; @@ -1224,8 +1256,9 @@ function deriveProjectKey(cwd) { return { key, project }; } function readState2(projectKey) { + migrateLegacyStateDir(); const p = statePath2(projectKey); - if (!existsSync6(p)) + if (!existsSync7(p)) return null; try { return JSON.parse(readFileSync5(p, "utf-8")); @@ -1234,13 +1267,15 @@ function readState2(projectKey) { } } function writeState2(projectKey, state) { + migrateLegacyStateDir(); mkdirSync6(STATE_DIR2, { recursive: true }); const p = statePath2(projectKey); const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; writeFileSync5(tmp, JSON.stringify(state, null, 2)); - renameSync2(tmp, p); + renameSync3(tmp, p); } function withRmwLock2(projectKey, fn) { + migrateLegacyStateDir(); mkdirSync6(STATE_DIR2, { recursive: true }); const rmw = lockPath2(projectKey) + ".rmw"; const deadline = Date.now() + 2e3; @@ -1252,11 +1287,11 @@ function withRmwLock2(projectKey, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { - dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); + dlog3(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); try { unlinkSync3(rmw); } catch (unlinkErr) { - dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); + dlog3(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); } continue; } @@ -1270,7 +1305,7 @@ function withRmwLock2(projectKey, fn) { try { unlinkSync3(rmw); } catch (unlinkErr) { - dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); + dlog3(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); } } } @@ -1300,20 +1335,21 @@ function resetCounter(projectKey) { }); } function tryAcquireWorkerLock(projectKey, maxAgeMs = 10 * 60 * 1e3) { + migrateLegacyStateDir(); mkdirSync6(STATE_DIR2, { recursive: true }); const p = lockPath2(projectKey); - if (existsSync6(p)) { + if (existsSync7(p)) { try { const ageMs = Date.now() - parseInt(readFileSync5(p, "utf-8"), 10); if (Number.isFinite(ageMs) && ageMs < maxAgeMs) return false; } catch (readErr) { - dlog2(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`); + dlog3(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`); } try { unlinkSync3(p); } catch (unlinkErr) { - dlog2(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`); + dlog3(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`); return false; } } @@ -1337,15 +1373,16 @@ function releaseWorkerLock(projectKey) { } } -// dist/src/skilify/scope-config.js -import { existsSync as existsSync7, mkdirSync as mkdirSync7, readFileSync as readFileSync6, writeFileSync as writeFileSync6 } from "node:fs"; -import { homedir as homedir10 } from "node:os"; -import { join as join12 } from "node:path"; -var STATE_DIR3 = join12(homedir10(), ".deeplake", "state", "skilify"); -var CONFIG_PATH = join12(STATE_DIR3, "config.json"); +// dist/src/skillify/scope-config.js +import { existsSync as existsSync8, mkdirSync as mkdirSync7, readFileSync as readFileSync6, writeFileSync as writeFileSync6 } from "node:fs"; +import { homedir as homedir11 } from "node:os"; +import { join as join13 } from "node:path"; +var STATE_DIR3 = join13(homedir11(), ".deeplake", "state", "skillify"); +var CONFIG_PATH = join13(STATE_DIR3, "config.json"); var DEFAULT = { scope: "me", team: [], install: "project" }; function loadScopeConfig() { - if (!existsSync7(CONFIG_PATH)) + migrateLegacyStateDir(); + if (!existsSync8(CONFIG_PATH)) return DEFAULT; try { const raw = JSON.parse(readFileSync6(CONFIG_PATH, "utf-8")); @@ -1358,9 +1395,9 @@ function loadScopeConfig() { } } -// dist/src/skilify/triggers.js +// dist/src/skillify/triggers.js function tryStopCounterTrigger(opts) { - if (process.env.HIVEMIND_SKILIFY_WORKER === "1") + if (process.env.HIVEMIND_SKILLIFY_WORKER === "1") return; if (!opts.cwd) return; @@ -1369,13 +1406,13 @@ function tryStopCounterTrigger(opts) { if (state.counter < TRIGGER_THRESHOLD) return; if (!tryAcquireWorkerLock(state.projectKey)) { - skilifyLog(`Stop: trigger suppressed (worker lock held) project=${state.project}`); + skillifyLog(`Stop: trigger suppressed (worker lock held) project=${state.project}`); return; } - skilifyLog(`Stop: threshold hit (counter=${state.counter}, N=${TRIGGER_THRESHOLD}) project=${state.project} agent=${opts.agent}`); + skillifyLog(`Stop: threshold hit (counter=${state.counter}, N=${TRIGGER_THRESHOLD}) project=${state.project} agent=${opts.agent}`); resetCounter(state.projectKey); try { - spawnSkilifyWorker({ + spawnSkillifyWorker({ config: opts.config, cwd: opts.cwd, projectKey: state.projectKey, @@ -1387,21 +1424,21 @@ function tryStopCounterTrigger(opts) { reason: "Stop" }); } catch (e) { - skilifyLog(`Stop spawn failed: ${e?.message ?? e}`); + skillifyLog(`Stop spawn failed: ${e?.message ?? e}`); try { releaseWorkerLock(state.projectKey); } catch { } } } catch (e) { - skilifyLog(`Stop trigger error: ${e?.message ?? e}`); + skillifyLog(`Stop trigger error: ${e?.message ?? e}`); } } // dist/src/hooks/hermes/capture.js var log4 = (msg) => log("hermes-capture", msg); function resolveEmbedDaemonPath() { - return join13(dirname3(fileURLToPath3(import.meta.url)), "embeddings", "embed-daemon.js"); + return join14(dirname3(fileURLToPath3(import.meta.url)), "embeddings", "embed-daemon.js"); } var CAPTURE = process.env.HIVEMIND_CAPTURE !== "false"; function pickString(...candidates) { @@ -1487,7 +1524,7 @@ async function main() { } log4("capture ok \u2192 cloud"); maybeTriggerPeriodicSummary(sessionId, cwd, config); - if (event === "post_llm_call" && process.env.HIVEMIND_WIKI_WORKER !== "1" && process.env.HIVEMIND_SKILIFY_WORKER !== "1") { + if (event === "post_llm_call" && process.env.HIVEMIND_WIKI_WORKER !== "1" && process.env.HIVEMIND_SKILLIFY_WORKER !== "1") { tryStopCounterTrigger({ config, cwd, diff --git a/hermes/bundle/session-end.js b/hermes/bundle/session-end.js index feeefdc2..5203dc19 100755 --- a/hermes/bundle/session-end.js +++ b/hermes/bundle/session-end.js @@ -229,14 +229,14 @@ function bundleDirFromImportMeta(importMetaUrl) { return dirname(fileURLToPath(importMetaUrl)); } -// dist/src/skilify/spawn-skilify-worker.js +// dist/src/skillify/spawn-skillify-worker.js import { spawn as spawn2 } from "node:child_process"; import { fileURLToPath as fileURLToPath2 } from "node:url"; import { dirname as dirname2, join as join7 } from "node:path"; import { writeFileSync as writeFileSync3, mkdirSync as mkdirSync4, appendFileSync as appendFileSync3, chmodSync } from "node:fs"; import { homedir as homedir6, tmpdir as tmpdir2 } from "node:os"; -// dist/src/skilify/gate-runner.js +// dist/src/skillify/gate-runner.js import { execFileSync } from "node:child_process"; import { existsSync as existsSync3 } from "node:fs"; import { homedir as homedir5 } from "node:os"; @@ -267,20 +267,20 @@ function findAgentBin(agent) { } } -// dist/src/skilify/spawn-skilify-worker.js +// dist/src/skillify/spawn-skillify-worker.js var HOME2 = homedir6(); -var SKILIFY_LOG = join7(HOME2, ".claude", "hooks", "skilify.log"); -function skilifyLog(msg) { +var SKILLIFY_LOG = join7(HOME2, ".claude", "hooks", "skillify.log"); +function skillifyLog(msg) { try { - mkdirSync4(dirname2(SKILIFY_LOG), { recursive: true }); - appendFileSync3(SKILIFY_LOG, `[${utcTimestamp()}] ${msg} + mkdirSync4(dirname2(SKILLIFY_LOG), { recursive: true }); + appendFileSync3(SKILLIFY_LOG, `[${utcTimestamp()}] ${msg} `); } catch { } } -function spawnSkilifyWorker(opts) { +function spawnSkillifyWorker(opts) { const { config, cwd, projectKey, project, bundleDir, agent, scopeConfig, currentSessionId, reason } = opts; - const tmpDir = join7(tmpdir2(), `deeplake-skilify-${projectKey}-${Date.now()}`); + const tmpDir = join7(tmpdir2(), `deeplake-skillify-${projectKey}-${Date.now()}`); mkdirSync4(tmpDir, { recursive: true, mode: 448 }); const gateBin = findAgentBin(agent); const configFile = join7(tmpDir, "config.json"); @@ -306,40 +306,72 @@ function spawnSkilifyWorker(opts) { hermesModel: process.env.HIVEMIND_HERMES_MODEL, piProvider: process.env.HIVEMIND_PI_PROVIDER, piModel: process.env.HIVEMIND_PI_MODEL, - skilifyLog: SKILIFY_LOG, + skillifyLog: SKILLIFY_LOG, currentSessionId }), { mode: 384 }); try { chmodSync(configFile, 384); } catch { } - skilifyLog(`${reason}: spawning skilify worker for project=${project} key=${projectKey}`); - const workerPath = join7(bundleDir, "skilify-worker.js"); + skillifyLog(`${reason}: spawning skillify worker for project=${project} key=${projectKey}`); + const workerPath = join7(bundleDir, "skillify-worker.js"); spawn2("nohup", ["node", workerPath, configFile], { detached: true, stdio: ["ignore", "ignore", "ignore"] }).unref(); - skilifyLog(`${reason}: spawned skilify worker for ${projectKey}`); + skillifyLog(`${reason}: spawned skillify worker for ${projectKey}`); } -// dist/src/skilify/state.js -import { readFileSync as readFileSync3, writeFileSync as writeFileSync4, writeSync as writeSync2, mkdirSync as mkdirSync5, renameSync as renameSync2, existsSync as existsSync4, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs"; +// dist/src/skillify/state.js +import { readFileSync as readFileSync3, writeFileSync as writeFileSync4, writeSync as writeSync2, mkdirSync as mkdirSync5, renameSync as renameSync3, existsSync as existsSync5, unlinkSync as unlinkSync2, openSync as openSync2, closeSync as closeSync2 } from "node:fs"; import { execSync as execSync2 } from "node:child_process"; -import { homedir as homedir7 } from "node:os"; +import { homedir as homedir8 } from "node:os"; import { createHash } from "node:crypto"; -import { join as join8, basename } from "node:path"; -var dlog2 = (msg) => log("skilify-state", msg); -var STATE_DIR2 = join8(homedir7(), ".deeplake", "state", "skilify"); +import { join as join9, basename } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync4, renameSync as renameSync2 } from "node:fs"; +import { homedir as homedir7 } from "node:os"; +import { join as join8 } from "node:path"; +var dlog2 = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join8(homedir7(), ".deeplake", "state"); + const legacy = join8(root, "skilify"); + const current = join8(root, "skillify"); + if (!existsSync4(legacy)) + return; + if (existsSync4(current)) + return; + try { + renameSync2(legacy, current); + dlog2(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog2(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/state.js +var dlog3 = (msg) => log("skillify-state", msg); +var STATE_DIR2 = join9(homedir8(), ".deeplake", "state", "skillify"); var YIELD_BUF2 = new Int32Array(new SharedArrayBuffer(4)); var TRIGGER_THRESHOLD = (() => { - const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? ""); + const n = Number(process.env.HIVEMIND_SKILLIFY_EVERY_N_TURNS ?? ""); return Number.isInteger(n) && n > 0 ? n : 20; })(); function statePath(projectKey) { - return join8(STATE_DIR2, `${projectKey}.json`); + return join9(STATE_DIR2, `${projectKey}.json`); } function lockPath2(projectKey) { - return join8(STATE_DIR2, `${projectKey}.lock`); + return join9(STATE_DIR2, `${projectKey}.lock`); } function deriveProjectKey(cwd) { const project = basename(cwd) || "unknown"; @@ -357,8 +389,9 @@ function deriveProjectKey(cwd) { return { key, project }; } function readState(projectKey) { + migrateLegacyStateDir(); const p = statePath(projectKey); - if (!existsSync4(p)) + if (!existsSync5(p)) return null; try { return JSON.parse(readFileSync3(p, "utf-8")); @@ -367,13 +400,15 @@ function readState(projectKey) { } } function writeState(projectKey, state) { + migrateLegacyStateDir(); mkdirSync5(STATE_DIR2, { recursive: true }); const p = statePath(projectKey); const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; writeFileSync4(tmp, JSON.stringify(state, null, 2)); - renameSync2(tmp, p); + renameSync3(tmp, p); } function withRmwLock(projectKey, fn) { + migrateLegacyStateDir(); mkdirSync5(STATE_DIR2, { recursive: true }); const rmw = lockPath2(projectKey) + ".rmw"; const deadline = Date.now() + 2e3; @@ -385,11 +420,11 @@ function withRmwLock(projectKey, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { - dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); + dlog3(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); try { unlinkSync2(rmw); } catch (unlinkErr) { - dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); + dlog3(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); } continue; } @@ -403,7 +438,7 @@ function withRmwLock(projectKey, fn) { try { unlinkSync2(rmw); } catch (unlinkErr) { - dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); + dlog3(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); } } } @@ -416,20 +451,21 @@ function resetCounter(projectKey) { }); } function tryAcquireWorkerLock(projectKey, maxAgeMs = 10 * 60 * 1e3) { + migrateLegacyStateDir(); mkdirSync5(STATE_DIR2, { recursive: true }); const p = lockPath2(projectKey); - if (existsSync4(p)) { + if (existsSync5(p)) { try { const ageMs = Date.now() - parseInt(readFileSync3(p, "utf-8"), 10); if (Number.isFinite(ageMs) && ageMs < maxAgeMs) return false; } catch (readErr) { - dlog2(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`); + dlog3(`worker lock unreadable for ${projectKey}, treating as stale: ${readErr.message}`); } try { unlinkSync2(p); } catch (unlinkErr) { - dlog2(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`); + dlog3(`could not unlink stale worker lock for ${projectKey}: ${unlinkErr.message}`); return false; } } @@ -453,15 +489,16 @@ function releaseWorkerLock(projectKey) { } } -// dist/src/skilify/scope-config.js -import { existsSync as existsSync5, mkdirSync as mkdirSync6, readFileSync as readFileSync4, writeFileSync as writeFileSync5 } from "node:fs"; -import { homedir as homedir8 } from "node:os"; -import { join as join9 } from "node:path"; -var STATE_DIR3 = join9(homedir8(), ".deeplake", "state", "skilify"); -var CONFIG_PATH = join9(STATE_DIR3, "config.json"); +// dist/src/skillify/scope-config.js +import { existsSync as existsSync6, mkdirSync as mkdirSync6, readFileSync as readFileSync4, writeFileSync as writeFileSync5 } from "node:fs"; +import { homedir as homedir9 } from "node:os"; +import { join as join10 } from "node:path"; +var STATE_DIR3 = join10(homedir9(), ".deeplake", "state", "skillify"); +var CONFIG_PATH = join10(STATE_DIR3, "config.json"); var DEFAULT = { scope: "me", team: [], install: "project" }; function loadScopeConfig() { - if (!existsSync5(CONFIG_PATH)) + migrateLegacyStateDir(); + if (!existsSync6(CONFIG_PATH)) return DEFAULT; try { const raw = JSON.parse(readFileSync4(CONFIG_PATH, "utf-8")); @@ -474,24 +511,24 @@ function loadScopeConfig() { } } -// dist/src/skilify/triggers.js +// dist/src/skillify/triggers.js function forceSessionEndTrigger(opts) { - if (process.env.HIVEMIND_SKILIFY_WORKER === "1") + if (process.env.HIVEMIND_SKILLIFY_WORKER === "1") return; if (!opts.cwd) return; try { const { key: projectKey, project } = deriveProjectKey(opts.cwd); if (!tryAcquireWorkerLock(projectKey)) { - skilifyLog(`SessionEnd: skilify worker already running for ${projectKey}, skipping`); + skillifyLog(`SessionEnd: skillify worker already running for ${projectKey}, skipping`); return; } if (readState(projectKey)) { resetCounter(projectKey); } - skilifyLog(`SessionEnd: spawning skilify worker for project=${project} agent=${opts.agent}`); + skillifyLog(`SessionEnd: spawning skillify worker for project=${project} agent=${opts.agent}`); try { - spawnSkilifyWorker({ + spawnSkillifyWorker({ config: opts.config, cwd: opts.cwd, projectKey, @@ -503,14 +540,14 @@ function forceSessionEndTrigger(opts) { reason: "SessionEnd" }); } catch (e) { - skilifyLog(`SessionEnd spawn failed: ${e?.message ?? e}`); + skillifyLog(`SessionEnd spawn failed: ${e?.message ?? e}`); try { releaseWorkerLock(projectKey); } catch { } } } catch (e) { - skilifyLog(`SessionEnd trigger error: ${e?.message ?? e}`); + skillifyLog(`SessionEnd trigger error: ${e?.message ?? e}`); } } @@ -554,7 +591,7 @@ async function main() { sessionId }); } catch (e) { - wikiLog(`SessionEnd: skilify trigger failed: ${e?.message ?? e}`); + wikiLog(`SessionEnd: skillify trigger failed: ${e?.message ?? e}`); } } main().catch((e) => { diff --git a/hermes/bundle/session-start.js b/hermes/bundle/session-start.js index 9577ab74..3973b516 100755 --- a/hermes/bundle/session-start.js +++ b/hermes/bundle/session-start.js @@ -670,12 +670,12 @@ async function autoUpdate(creds, opts) { log3(`agent=${opts.agent} dispatched (pid=${pid ?? "?"}) (${Date.now() - t0}ms total)`); } -// dist/src/skilify/pull.js -import { existsSync as existsSync7, readFileSync as readFileSync7, writeFileSync as writeFileSync5, mkdirSync as mkdirSync5, renameSync as renameSync2, lstatSync as lstatSync2, readlinkSync, symlinkSync, unlinkSync as unlinkSync3 } from "node:fs"; -import { homedir as homedir7 } from "node:os"; -import { dirname as dirname3, join as join10 } from "node:path"; +// dist/src/skillify/pull.js +import { existsSync as existsSync8, readFileSync as readFileSync7, writeFileSync as writeFileSync5, mkdirSync as mkdirSync5, renameSync as renameSync3, lstatSync as lstatSync2, readlinkSync, symlinkSync, unlinkSync as unlinkSync3 } from "node:fs"; +import { homedir as homedir8 } from "node:os"; +import { dirname as dirname3, join as join11 } from "node:path"; -// dist/src/skilify/skill-writer.js +// dist/src/skillify/skill-writer.js import { existsSync as existsSync4, mkdirSync as mkdirSync3, readFileSync as readFileSync5, readdirSync, statSync, writeFileSync as writeFileSync3 } from "node:fs"; import { homedir as homedir4 } from "node:os"; import { join as join7 } from "node:path"; @@ -736,18 +736,51 @@ function parseFrontmatter(text) { return { fm, body }; } -// dist/src/skilify/manifest.js -import { existsSync as existsSync5, lstatSync, mkdirSync as mkdirSync4, readFileSync as readFileSync6, renameSync, unlinkSync as unlinkSync2, writeFileSync as writeFileSync4 } from "node:fs"; +// dist/src/skillify/manifest.js +import { existsSync as existsSync6, lstatSync, mkdirSync as mkdirSync4, readFileSync as readFileSync6, renameSync as renameSync2, unlinkSync as unlinkSync2, writeFileSync as writeFileSync4 } from "node:fs"; +import { homedir as homedir6 } from "node:os"; +import { dirname as dirname2, join as join9 } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync5, renameSync } from "node:fs"; import { homedir as homedir5 } from "node:os"; -import { dirname as dirname2, join as join8 } from "node:path"; +import { join as join8 } from "node:path"; +var dlog = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join8(homedir5(), ".deeplake", "state"); + const legacy = join8(root, "skilify"); + const current = join8(root, "skillify"); + if (!existsSync5(legacy)) + return; + if (existsSync5(current)) + return; + try { + renameSync(legacy, current); + dlog(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/manifest.js function emptyManifest() { return { version: 1, entries: [] }; } function manifestPath() { - return join8(homedir5(), ".deeplake", "state", "skilify", "pulled.json"); + return join9(homedir6(), ".deeplake", "state", "skillify", "pulled.json"); } function loadManifest(path = manifestPath()) { - if (!existsSync5(path)) + migrateLegacyStateDir(); + if (!existsSync6(path)) return emptyManifest(); let raw; try { @@ -797,10 +830,11 @@ function loadManifest(path = manifestPath()) { } } function saveManifest(m, path = manifestPath()) { + migrateLegacyStateDir(); mkdirSync4(dirname2(path), { recursive: true }); const tmp = `${path}.tmp`; writeFileSync4(tmp, JSON.stringify(m, null, 2) + "\n", { mode: 384 }); - renameSync(tmp, path); + renameSync2(tmp, path); } function recordPull(entry, path = manifestPath()) { const m = loadManifest(path); @@ -835,7 +869,7 @@ function pruneOrphanedEntries(path = manifestPath()) { const live = []; let pruned = 0; for (const e of m.entries) { - if (existsSync5(join8(e.installRoot, e.dirName))) { + if (existsSync6(join9(e.installRoot, e.dirName))) { live.push(e); continue; } @@ -847,31 +881,31 @@ function pruneOrphanedEntries(path = manifestPath()) { return pruned; } -// dist/src/skilify/agent-roots.js -import { existsSync as existsSync6 } from "node:fs"; -import { homedir as homedir6 } from "node:os"; -import { join as join9 } from "node:path"; +// dist/src/skillify/agent-roots.js +import { existsSync as existsSync7 } from "node:fs"; +import { homedir as homedir7 } from "node:os"; +import { join as join10 } from "node:path"; function resolveDetected(home) { const out = []; - const codexInstalled = existsSync6(join9(home, ".codex")); - const piInstalled = existsSync6(join9(home, ".pi", "agent")); - const hermesInstalled = existsSync6(join9(home, ".hermes")); + const codexInstalled = existsSync7(join10(home, ".codex")); + const piInstalled = existsSync7(join10(home, ".pi", "agent")); + const hermesInstalled = existsSync7(join10(home, ".hermes")); if (codexInstalled || piInstalled) { - out.push(join9(home, ".agents", "skills")); + out.push(join10(home, ".agents", "skills")); } if (hermesInstalled) { - out.push(join9(home, ".hermes", "skills")); + out.push(join10(home, ".hermes", "skills")); } if (piInstalled) { - out.push(join9(home, ".pi", "agent", "skills")); + out.push(join10(home, ".pi", "agent", "skills")); } return out; } -function detectAgentSkillsRoots(canonicalRoot, home = homedir6()) { +function detectAgentSkillsRoots(canonicalRoot, home = homedir7()) { return resolveDetected(home).filter((p) => p !== canonicalRoot); } -// dist/src/skilify/pull.js +// dist/src/skillify/pull.js function assertValidAuthor(author) { if (!author) throw new Error("author is empty"); @@ -903,15 +937,15 @@ function isMissingTableError(message) { } function resolvePullDestination(install, cwd) { if (install === "global") - return join10(homedir7(), ".claude", "skills"); + return join11(homedir8(), ".claude", "skills"); if (!cwd) throw new Error("install=project requires a cwd"); - return join10(cwd, ".claude", "skills"); + return join11(cwd, ".claude", "skills"); } function fanOutSymlinks(canonicalDir, dirName, agentRoots) { const out = []; for (const root of agentRoots) { - const link = join10(root, dirName); + const link = join11(root, dirName); let existing; try { existing = lstatSync2(link); @@ -954,8 +988,8 @@ function backfillSymlinks(installRoot) { return; const detected = detectAgentSkillsRoots(installRoot); for (const entry of entries) { - const canonical = join10(entry.installRoot, entry.dirName); - if (!existsSync7(canonical)) + const canonical = join11(entry.installRoot, entry.dirName); + if (!existsSync8(canonical)) continue; const fresh = fanOutSymlinks(canonical, entry.dirName, detected); if (sameSorted(fresh, entry.symlinks)) @@ -1040,7 +1074,7 @@ function renderFrontmatter(fm) { return lines.join("\n"); } function readLocalVersion(path) { - if (!existsSync7(path)) + if (!existsSync8(path)) return null; try { const text = readFileSync7(path, "utf-8"); @@ -1129,8 +1163,8 @@ async function runPull(opts) { summary.skipped++; continue; } - const skillDir = join10(root, dirName); - const skillFile = join10(skillDir, "SKILL.md"); + const skillDir = join11(root, dirName); + const skillFile = join11(skillDir, "SKILL.md"); const remoteVersion = Number(row.version ?? 1); const localVersion = readLocalVersion(skillFile); const action = decideAction({ @@ -1142,9 +1176,9 @@ async function runPull(opts) { let manifestError; if (action === "wrote") { mkdirSync5(skillDir, { recursive: true }); - if (existsSync7(skillFile)) { + if (existsSync8(skillFile)) { try { - renameSync2(skillFile, `${skillFile}.bak`); + renameSync3(skillFile, `${skillFile}.bak`); } catch { } } @@ -1189,8 +1223,8 @@ async function runPull(opts) { return summary; } -// dist/src/skilify/auto-pull.js -var log4 = (msg) => log("skilify-autopull", msg); +// dist/src/skillify/auto-pull.js +var log4 = (msg) => log("skillify-autopull", msg); var DEFAULT_TIMEOUT_MS = 5e3; function withTimeout(p, ms) { let timer = null; @@ -1264,23 +1298,23 @@ Organization management \u2014 each argument is SEPARATE (do NOT quote subcomman - hivemind members \u2014 list members - hivemind remove \u2014 remove member -SKILLS (skilify) \u2014 mine + share reusable skills across the org: -- hivemind skilify \u2014 show scope/team/install + per-project state -- hivemind skilify pull \u2014 sync project skills from the org table -- hivemind skilify pull --user \u2014 only that author's skills -- hivemind skilify pull --users a,b,c \u2014 multiple authors (CSV) -- hivemind skilify pull --all-users \u2014 explicit "no author filter" -- hivemind skilify pull --to project|global \u2014 install location -- hivemind skilify pull --dry-run \u2014 preview only -- hivemind skilify pull --force \u2014 overwrite local (creates .bak) -- hivemind skilify pull \u2014 pull only that skill (combines with --user) -- hivemind skilify unpull \u2014 remove every skill previously installed by pull -- hivemind skilify unpull --user \u2014 remove only that author's pulls -- hivemind skilify unpull --not-mine \u2014 remove all pulls except your own -- hivemind skilify unpull --dry-run \u2014 preview without touching disk -- hivemind skilify scope \u2014 sharing scope for new skills -- hivemind skilify install \u2014 default install location -- hivemind skilify team add|remove|list \u2014 manage team list`; +SKILLS (skillify) \u2014 mine + share reusable skills across the org: +- hivemind skillify \u2014 show scope/team/install + per-project state +- hivemind skillify pull \u2014 sync project skills from the org table +- hivemind skillify pull --user \u2014 only that author's skills +- hivemind skillify pull --users a,b,c \u2014 multiple authors (CSV) +- hivemind skillify pull --all-users \u2014 explicit "no author filter" +- hivemind skillify pull --to project|global \u2014 install location +- hivemind skillify pull --dry-run \u2014 preview only +- hivemind skillify pull --force \u2014 overwrite local (creates .bak) +- hivemind skillify pull \u2014 pull only that skill (combines with --user) +- hivemind skillify unpull \u2014 remove every skill previously installed by pull +- hivemind skillify unpull --user \u2014 remove only that author's pulls +- hivemind skillify unpull --not-mine \u2014 remove all pulls except your own +- hivemind skillify unpull --dry-run \u2014 preview without touching disk +- hivemind skillify scope \u2014 sharing scope for new skills +- hivemind skillify install \u2014 default install location +- hivemind skillify team add|remove|list \u2014 manage team list`; async function createPlaceholder(api, table, sessionId, cwd, userName, orgName, workspaceId) { const summaryPath = `/summaries/${userName}/${sessionId}.md`; const existing = await api.query(`SELECT path FROM "${table}" WHERE path = '${sqlStr(summaryPath)}' LIMIT 1`); diff --git a/codex/bundle/skilify-worker.js b/hermes/bundle/skillify-worker.js similarity index 92% rename from codex/bundle/skilify-worker.js rename to hermes/bundle/skillify-worker.js index 8eb534fd..987a16ba 100755 --- a/codex/bundle/skilify-worker.js +++ b/hermes/bundle/skillify-worker.js @@ -1,8 +1,8 @@ #!/usr/bin/env node -// dist/src/skilify/skilify-worker.js -import { readFileSync as readFileSync3, writeFileSync as writeFileSync3, existsSync as existsSync4, appendFileSync as appendFileSync2, rmSync } from "node:fs"; -import { join as join5 } from "node:path"; +// dist/src/skillify/skillify-worker.js +import { readFileSync as readFileSync3, writeFileSync as writeFileSync3, existsSync as existsSync5, appendFileSync as appendFileSync2, rmSync } from "node:fs"; +import { join as join6 } from "node:path"; // dist/src/utils/debug.js import { appendFileSync } from "node:fs"; @@ -29,7 +29,7 @@ function deeplakeClientHeader() { return { [DEEPLAKE_CLIENT_HEADER]: deeplakeClientValue() }; } -// dist/src/skilify/extractors/index.js +// dist/src/skillify/extractors/index.js function extractPairs(rows) { const pairs = []; let pendingPrompt = null; @@ -60,7 +60,7 @@ function extractPairs(rows) { return pairs; } -// dist/src/skilify/skill-writer.js +// dist/src/skillify/skill-writer.js import { existsSync, mkdirSync, readFileSync, readdirSync, statSync, writeFileSync } from "node:fs"; import { homedir as homedir2 } from "node:os"; import { join as join2 } from "node:path"; @@ -216,7 +216,7 @@ function resolveSkillsRoot(install, cwd) { return join2(cwd, ".claude", "skills"); } -// dist/src/skilify/skills-table.js +// dist/src/skillify/skills-table.js import { randomUUID } from "node:crypto"; // dist/src/utils/sql.js @@ -227,7 +227,7 @@ function sqlIdent(name) { return name; } -// dist/src/skilify/skills-table.js +// dist/src/skillify/skills-table.js function createSkillsTableSql(tableName) { const safe = sqlIdent(tableName); return `CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`; @@ -256,7 +256,7 @@ async function insertSkillRow(args) { } } -// dist/src/skilify/gate-parser.js +// dist/src/skillify/gate-parser.js function extractJsonBlock(s) { const trimmed = s.trim(); if (!trimmed) @@ -294,7 +294,7 @@ function parseVerdict(raw) { } } -// dist/src/skilify/gate-runner.js +// dist/src/skillify/gate-runner.js import { execFileSync } from "node:child_process"; import { existsSync as existsSync2 } from "node:fs"; import { homedir as homedir3 } from "node:os"; @@ -403,28 +403,61 @@ function runGate(opts) { } } -// dist/src/skilify/state.js -import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync2, renameSync, existsSync as existsSync3, unlinkSync, openSync, closeSync } from "node:fs"; +// dist/src/skillify/state.js +import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync2, renameSync as renameSync2, existsSync as existsSync4, unlinkSync, openSync, closeSync } from "node:fs"; import { execSync } from "node:child_process"; -import { homedir as homedir4 } from "node:os"; +import { homedir as homedir5 } from "node:os"; import { createHash } from "node:crypto"; -import { join as join4, basename } from "node:path"; -var dlog = (msg) => log("skilify-state", msg); -var STATE_DIR = join4(homedir4(), ".deeplake", "state", "skilify"); +import { join as join5, basename } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync3, renameSync } from "node:fs"; +import { homedir as homedir4 } from "node:os"; +import { join as join4 } from "node:path"; +var dlog = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join4(homedir4(), ".deeplake", "state"); + const legacy = join4(root, "skilify"); + const current = join4(root, "skillify"); + if (!existsSync3(legacy)) + return; + if (existsSync3(current)) + return; + try { + renameSync(legacy, current); + dlog(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/state.js +var dlog2 = (msg) => log("skillify-state", msg); +var STATE_DIR = join5(homedir5(), ".deeplake", "state", "skillify"); var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4)); var TRIGGER_THRESHOLD = (() => { - const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? ""); + const n = Number(process.env.HIVEMIND_SKILLIFY_EVERY_N_TURNS ?? ""); return Number.isInteger(n) && n > 0 ? n : 20; })(); function statePath(projectKey) { - return join4(STATE_DIR, `${projectKey}.json`); + return join5(STATE_DIR, `${projectKey}.json`); } function lockPath(projectKey) { - return join4(STATE_DIR, `${projectKey}.lock`); + return join5(STATE_DIR, `${projectKey}.lock`); } function readState(projectKey) { + migrateLegacyStateDir(); const p = statePath(projectKey); - if (!existsSync3(p)) + if (!existsSync4(p)) return null; try { return JSON.parse(readFileSync2(p, "utf-8")); @@ -433,13 +466,15 @@ function readState(projectKey) { } } function writeState(projectKey, state) { + migrateLegacyStateDir(); mkdirSync2(STATE_DIR, { recursive: true }); const p = statePath(projectKey); const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; writeFileSync2(tmp, JSON.stringify(state, null, 2)); - renameSync(tmp, p); + renameSync2(tmp, p); } function withRmwLock(projectKey, fn) { + migrateLegacyStateDir(); mkdirSync2(STATE_DIR, { recursive: true }); const rmw = lockPath(projectKey) + ".rmw"; const deadline = Date.now() + 2e3; @@ -451,11 +486,11 @@ function withRmwLock(projectKey, fn) { if (e.code !== "EEXIST") throw e; if (Date.now() > deadline) { - dlog(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); + dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); try { unlinkSync(rmw); } catch (unlinkErr) { - dlog(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); + dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); } continue; } @@ -469,7 +504,7 @@ function withRmwLock(projectKey, fn) { try { unlinkSync(rmw); } catch (unlinkErr) { - dlog(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); + dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); } } } @@ -509,18 +544,18 @@ function releaseWorkerLock(projectKey) { } } -// dist/src/skilify/skilify-worker.js +// dist/src/skillify/skillify-worker.js var cfg = JSON.parse(readFileSync3(process.argv[2], "utf-8")); var tmpDir = cfg.tmpDir; -var verdictPath = join5(tmpDir, "verdict.json"); -var promptPath = join5(tmpDir, "prompt.txt"); +var verdictPath = join6(tmpDir, "verdict.json"); +var promptPath = join6(tmpDir, "prompt.txt"); var SESSIONS_TO_MINE = 10; var PAIR_CHAR_CAP = 2e3; var TOTAL_PAIRS_CHAR_CAP = 4e4; var EXISTING_SKILLS_CHAR_CAP = 3e4; function wlog(msg) { try { - appendFileSync2(cfg.skilifyLog, `[${utcTimestamp()}] skilify-worker(${cfg.projectKey}): ${msg} + appendFileSync2(cfg.skillifyLog, `[${utcTimestamp()}] skillify-worker(${cfg.projectKey}): ${msg} `); } catch { } @@ -715,7 +750,7 @@ function buildPrompt(pairs) { ].join("\n"); } function readVerdict(stdout) { - if (existsSync4(verdictPath)) { + if (existsSync5(verdictPath)) { try { const text = readFileSync3(verdictPath, "utf-8"); const v2 = parseVerdict(text); @@ -784,9 +819,9 @@ async function main() { timeoutMs: 12e4 }); try { - writeFileSync3(join5(tmpDir, "gate-stdout.txt"), gate.stdout); + writeFileSync3(join6(tmpDir, "gate-stdout.txt"), gate.stdout); if (gate.stderr) - writeFileSync3(join5(tmpDir, "gate-stderr.txt"), gate.stderr); + writeFileSync3(join6(tmpDir, "gate-stderr.txt"), gate.stderr); } catch { } if (gate.errored) { diff --git a/openclaw/skills/SKILL.md b/openclaw/skills/SKILL.md index 98eaa96a..c123e471 100644 --- a/openclaw/skills/SKILL.md +++ b/openclaw/skills/SKILL.md @@ -45,28 +45,28 @@ Do NOT jump straight to reading raw JSONL files. Always start with `hivemind_ind - `/hivemind_update` — shows how to install (ask the agent, or run `hivemind update` in your terminal) - `/hivemind_autoupdate [on|off]` — toggle the agent-facing update nudge (on by default: when a newer version is available, the agent is prompted to install it via `exec` if you ask to update) -## Skill Management (skilify) +## Skill Management (skillify) Hivemind also mines reusable Claude skills from agent sessions and stores them in a per-org Deeplake table. Openclaw itself doesn't run sessions to mine, but you can pull skills others have already mined for the user. These run in the user's terminal (the openclaw plugin does not register them as `/hivemind_*` commands): -- `hivemind skilify` — show scope/team/install + per-project state -- `hivemind skilify pull` — sync skills for the current project from the org table -- `hivemind skilify pull --user ` — only that author's skills -- `hivemind skilify pull --users a,b,c` — multiple authors (CSV) -- `hivemind skilify pull --all-users` — explicit "no author filter" -- `hivemind skilify pull --to project|global` — install location (`/.claude/skills/` vs `~/.claude/skills/`) -- `hivemind skilify pull --dry-run` — preview without touching disk -- `hivemind skilify pull --force` — overwrite local (creates `.bak`) -- `hivemind skilify pull ` — pull only that one skill (combines with `--user`) -- `hivemind skilify unpull` — remove every skill previously installed by pull -- `hivemind skilify unpull --user ` — remove only that author's pulls -- `hivemind skilify unpull --not-mine` — remove all pulls except your own -- `hivemind skilify unpull --dry-run` — preview without touching disk -- `hivemind skilify scope ` — set sharing scope for new skills -- `hivemind skilify install ` — default install location -- `hivemind skilify team add|remove|list ` — manage team list - -If the user asks to "pull skills from X", "share skills with the team", or similar, suggest the matching `hivemind skilify` command. Run `hivemind skilify --help` for the full reference. +- `hivemind skillify` — show scope/team/install + per-project state +- `hivemind skillify pull` — sync skills for the current project from the org table +- `hivemind skillify pull --user ` — only that author's skills +- `hivemind skillify pull --users a,b,c` — multiple authors (CSV) +- `hivemind skillify pull --all-users` — explicit "no author filter" +- `hivemind skillify pull --to project|global` — install location (`/.claude/skills/` vs `~/.claude/skills/`) +- `hivemind skillify pull --dry-run` — preview without touching disk +- `hivemind skillify pull --force` — overwrite local (creates `.bak`) +- `hivemind skillify pull ` — pull only that one skill (combines with `--user`) +- `hivemind skillify unpull` — remove every skill previously installed by pull +- `hivemind skillify unpull --user ` — remove only that author's pulls +- `hivemind skillify unpull --not-mine` — remove all pulls except your own +- `hivemind skillify unpull --dry-run` — preview without touching disk +- `hivemind skillify scope ` — set sharing scope for new skills +- `hivemind skillify install ` — default install location +- `hivemind skillify team add|remove|list ` — manage team list + +If the user asks to "pull skills from X", "share skills with the team", or similar, suggest the matching `hivemind skillify` command. Run `hivemind skillify --help` for the full reference. ## Limits diff --git a/openclaw/src/index.ts b/openclaw/src/index.ts index efc583a8..09bcf2d8 100644 --- a/openclaw/src/index.ts +++ b/openclaw/src/index.ts @@ -57,7 +57,7 @@ import { deeplakeClientHeader } from "../../src/utils/client-header.js"; // memory ∪ sessions, path filters, JSONB normalization, virtual /index.md). import { searchDeeplakeTables, buildGrepSearchOptions, compileGrepRegex, normalizeContent, type GrepMatchParams } from "../../src/shell/grep-core.js"; import { readVirtualPathContent } from "../../src/hooks/virtual-table-query.js"; -// Resolve sibling skilify-worker.js path at runtime via import.meta.url. The +// Resolve sibling skillify-worker.js path at runtime via import.meta.url. The // openclaw plugin is bundled to openclaw/dist/index.js, then installed to // ~/.openclaw/extensions/hivemind/dist/index.js by install-openclaw.ts. The // worker bundle is its sibling at the same level. @@ -67,7 +67,7 @@ import { homedir, tmpdir } from "node:os"; import { existsSync as fsExists, mkdirSync as fsMkdir, openSync as fsOpen, closeSync as fsClose, writeFileSync as fsWriteFile, constants as fsConstants, - readFileSync as fsReadFile, + readFileSync as fsReadFile, renameSync as fsRename, } from "node:fs"; import { createHash } from "node:crypto"; // node:child_process is stubbed in the main openclaw bundle (see esbuild.config.mjs @@ -299,11 +299,11 @@ let captureEnabled = true; const capturedCounts = new Map(); const fallbackSessionId = crypto.randomUUID(); -// --- Skilify worker spawn (mirror of src/skilify/spawn-skilify-worker.ts) --- +// --- Skillify worker spawn (mirror of src/skillify/spawn-skillify-worker.ts) --- // -// OpenClaw can't import the shared skilify TS modules — its bundle is +// OpenClaw can't import the shared skillify TS modules — its bundle is // stubbed for child_process and code-splits the gateway. Inline the spawn -// shape here, keyed off the bundled sibling `skilify-worker.js`. Mining is +// shape here, keyed off the bundled sibling `skillify-worker.js`. Mining is // fired once per agent_end with a per-projectKey lock; per the assumption // "one openclaw session at a time", subsequent agent_ends within the same // session are skipped by the lock and that's fine — the worker advances @@ -312,12 +312,37 @@ const fallbackSessionId = crypto.randomUUID(); const __openclaw_filename = fileURLToPath(import.meta.url); const __openclaw_dirname = dirnamePath(__openclaw_filename); -const OPENCLAW_SKILIFY_WORKER_PATH = joinPath(__openclaw_dirname, "skilify-worker.js"); -const OPENCLAW_SKILIFY_STATE_DIR = joinPath(homedir(), ".deeplake", "state", "skilify"); +const OPENCLAW_SKILLIFY_WORKER_PATH = joinPath(__openclaw_dirname, "skillify-worker.js"); +const OPENCLAW_SKILLIFY_STATE_DIR = joinPath(homedir(), ".deeplake", "state", "skillify"); +const OPENCLAW_SKILLIFY_LEGACY_STATE_DIR = joinPath(homedir(), ".deeplake", "state", "skilify"); + +// One-shot rename of the pre-rename state dir. Mirrors src/skillify/legacy-migration.ts; +// inlined because openclaw is a self-contained bundle that can't import from src/skillify. +// Must run BEFORE any fsMkdir on OPENCLAW_SKILLIFY_STATE_DIR — once the new dir exists, +// the migration becomes a no-op and the legacy data is orphaned. +// +// Error policy mirrors the shared helper: only EXDEV/EPERM are swallowed +// (cross-device link / sandboxed home — legacy dir left in place, new dir +// starts fresh). Every other code re-throws so the caller sees the real +// I/O error instead of silently losing user state. +let openclawSkillifyMigrationAttempted = false; +function migrateOpenclawSkillifyLegacyStateDir(): void { + if (openclawSkillifyMigrationAttempted) return; + openclawSkillifyMigrationAttempted = true; + if (!fsExists(OPENCLAW_SKILLIFY_LEGACY_STATE_DIR)) return; + if (fsExists(OPENCLAW_SKILLIFY_STATE_DIR)) return; + try { + fsRename(OPENCLAW_SKILLIFY_LEGACY_STATE_DIR, OPENCLAW_SKILLIFY_STATE_DIR); + } catch (err) { + const code = (err as NodeJS.ErrnoException).code; + if (code === "EXDEV" || code === "EPERM") return; + throw err; + } +} function deriveOpenclawProjectKey(channel: string): { key: string; project: string } { const project = channel || "openclaw"; - // sha1(channel) — same shape as deriveProjectKey in src/skilify/state.ts + // sha1(channel) — same shape as deriveProjectKey in src/skillify/state.ts // but anchored on the openclaw channel string instead of a filesystem cwd. // Two openclaw channels with the same name (e.g. shared workspace channel) // share a project_key, which is intentional: their skills cluster together. @@ -325,10 +350,11 @@ function deriveOpenclawProjectKey(channel: string): { key: string; project: stri return { key, project }; } -function tryAcquireOpenclawSkilifyLock(projectKey: string): boolean { +function tryAcquireOpenclawSkillifyLock(projectKey: string): boolean { try { - fsMkdir(OPENCLAW_SKILIFY_STATE_DIR, { recursive: true }); - const lockPath = joinPath(OPENCLAW_SKILIFY_STATE_DIR, `${projectKey}.worker.lock`); + migrateOpenclawSkillifyLegacyStateDir(); + fsMkdir(OPENCLAW_SKILLIFY_STATE_DIR, { recursive: true }); + const lockPath = joinPath(OPENCLAW_SKILLIFY_STATE_DIR, `${projectKey}.worker.lock`); const fd = fsOpen(lockPath, fsConstants.O_CREAT | fsConstants.O_EXCL | fsConstants.O_WRONLY); fsClose(fd); return true; @@ -347,7 +373,7 @@ interface OpenclawSpawnArgs { } /** - * Pick a delegate gate-CLI for openclaw skilify mining. + * Pick a delegate gate-CLI for openclaw skillify mining. * * Openclaw is a gateway, not an agent CLI — there's no `openclaw -p ` * binary the gate-runner can invoke. Mining sessions still need a gate call @@ -377,26 +403,26 @@ function detectOpenclawGateAgent(): GateAgent | null { return null; } -function spawnOpenclawSkilifyWorker(a: OpenclawSpawnArgs): void { - if (!fsExists(OPENCLAW_SKILIFY_WORKER_PATH)) { - a.loggerWarn?.(`skilify worker missing at ${OPENCLAW_SKILIFY_WORKER_PATH} — reinstall openclaw plugin`); +function spawnOpenclawSkillifyWorker(a: OpenclawSpawnArgs): void { + if (!fsExists(OPENCLAW_SKILLIFY_WORKER_PATH)) { + a.loggerWarn?.(`skillify worker missing at ${OPENCLAW_SKILLIFY_WORKER_PATH} — reinstall openclaw plugin`); return; } const gateAgent = detectOpenclawGateAgent(); if (!gateAgent) { - a.loggerWarn?.(`skilify spawn: no delegate gate CLI found on PATH (need one of: claude, codex, cursor-agent, hermes, pi). Mining skipped.`); + a.loggerWarn?.(`skillify spawn: no delegate gate CLI found on PATH (need one of: claude, codex, cursor-agent, hermes, pi). Mining skipped.`); return; } const { key: projectKey, project } = deriveOpenclawProjectKey(a.channel); - if (!tryAcquireOpenclawSkilifyLock(projectKey)) { + if (!tryAcquireOpenclawSkillifyLock(projectKey)) { // A worker is already running for this project — skip (next agent_end may // re-fire after the worker releases the lock, or the worker watermark // advance makes the re-fire a no-op). return; } - const tmpDir = joinPath(tmpdir(), `deeplake-skilify-openclaw-${projectKey}-${Date.now()}`); + const tmpDir = joinPath(tmpdir(), `deeplake-skillify-openclaw-${projectKey}-${Date.now()}`); try { fsMkdir(tmpDir, { recursive: true, mode: 0o700 }); } - catch (e: any) { a.loggerWarn?.(`skilify spawn: mkdir failed: ${e?.message ?? e}`); return; } + catch (e: any) { a.loggerWarn?.(`skillify spawn: mkdir failed: ${e?.message ?? e}`); return; } const configPath = joinPath(tmpDir, "config.json"); // install: "global" — openclaw has no per-project filesystem cwd, so written @@ -424,20 +450,20 @@ function spawnOpenclawSkilifyWorker(a: OpenclawSpawnArgs): void { cursorModel: undefined, hermesProvider: undefined, hermesModel: undefined, - skilifyLog: joinPath(homedir(), ".deeplake", "hivemind-openclaw-skilify.log"), + skillifyLog: joinPath(homedir(), ".deeplake", "hivemind-openclaw-skillify.log"), currentSessionId: a.sessionId, }; try { fsWriteFile(configPath, JSON.stringify(config), { mode: 0o600 }); } - catch (e: any) { a.loggerWarn?.(`skilify spawn: config write failed: ${e?.message ?? e}`); return; } + catch (e: any) { a.loggerWarn?.(`skillify spawn: config write failed: ${e?.message ?? e}`); return; } try { - realSpawn(process.execPath, [OPENCLAW_SKILIFY_WORKER_PATH, configPath], { + realSpawn(process.execPath, [OPENCLAW_SKILLIFY_WORKER_PATH, configPath], { detached: true, stdio: "ignore", - env: { ...process.env, HIVEMIND_SKILIFY_WORKER: "1", HIVEMIND_CAPTURE: "false" }, + env: { ...process.env, HIVEMIND_SKILLIFY_WORKER: "1", HIVEMIND_CAPTURE: "false" }, }).unref(); } catch (e: any) { - a.loggerWarn?.(`skilify spawn: spawn failed: ${e?.message ?? e}`); + a.loggerWarn?.(`skillify spawn: spawn failed: ${e?.message ?? e}`); } } @@ -642,17 +668,17 @@ export default definePluginEntry({ handler: async () => { const { ensureHivemindAllowlisted } = await loadSetupConfig(); const result = ensureHivemindAllowlisted(); - // Phase C: surface skilify CLI in setup output. OpenClaw users have no + // Phase C: surface skillify CLI in setup output. OpenClaw users have no // session-start banner equivalent and no Bash tool — without this hint // they can't discover that mining runs in the background or that they // can pull teammates' skills. The CLI itself runs from the user's // terminal, not from the agent. - const skilifyHint = `\n\nSkill mining (skilify) runs in the background after each turn — your conversations get crystallised into reusable skills automatically. From your terminal:\n hivemind skilify status — see what's been mined\n hivemind skilify pull — fetch teammates' skills`; + const skillifyHint = `\n\nSkill mining (skillify) runs in the background after each turn — your conversations get crystallised into reusable skills automatically. From your terminal:\n hivemind skillify status — see what's been mined\n hivemind skillify pull — fetch teammates' skills`; if (result.status === "already-set") { - return { text: `✅ Hivemind tools are already enabled in your allowlist.\n\nNo changes needed — memory tools are available to the agent.${skilifyHint}` }; + return { text: `✅ Hivemind tools are already enabled in your allowlist.\n\nNo changes needed — memory tools are available to the agent.${skillifyHint}` }; } if (result.status === "added") { - return { text: `✅ Added "hivemind" to your tool allowlist.\n\nOpenclaw will detect the config change and restart. On the next turn, the agent will have access to hivemind_search, hivemind_read, and hivemind_index.\n\nBackup of previous config: ${result.backupPath}${skilifyHint}` }; + return { text: `✅ Added "hivemind" to your tool allowlist.\n\nOpenclaw will detect the config change and restart. On the next turn, the agent will have access to hivemind_search, hivemind_read, and hivemind_index.\n\nBackup of previous config: ${result.backupPath}${skillifyHint}` }; } return { text: `⚠️ Could not update allowlist: ${result.error}\n\nManual fix: open ${result.configPath} and add "hivemind" to the "alsoAllow" array under "tools".` }; }, @@ -1119,13 +1145,13 @@ export default definePluginEntry({ logger.info?.(`Auto-captured ${newMessages.length} messages`); - // Skilify: fire the worker after capture so the just-stored messages + // Skillify: fire the worker after capture so the just-stored messages // become candidates for skill mining. Lock-protected, fire-and-forget, // never blocks the agent. Worker reads from the sessions table we // just wrote to. Non-fatal: a spawn failure here only loses one // mining attempt, never breaks capture. try { - spawnOpenclawSkilifyWorker({ + spawnOpenclawSkillifyWorker({ apiUrl: cfg.apiUrl, token: cfg.token, orgId: cfg.orgId, @@ -1133,10 +1159,10 @@ export default definePluginEntry({ userName: cfg.userName, channel: ev.channel || "openclaw", sessionId: sid, - loggerWarn: (msg) => logger.error(`Skilify spawn: ${msg}`), + loggerWarn: (msg) => logger.error(`Skillify spawn: ${msg}`), }); } catch (e: any) { - logger.error(`Skilify spawn threw: ${e?.message ?? e}`); + logger.error(`Skillify spawn threw: ${e?.message ?? e}`); } } catch (err) { logger.error(`Auto-capture failed: ${err instanceof Error ? err.message : String(err)}`); diff --git a/package-lock.json b/package-lock.json index 3b72472b..1c2a3bf2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1097,6 +1097,16 @@ "tslib": "^2.4.0" } }, + "node_modules/@emnapi/runtime": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.10.0.tgz", + "integrity": "sha512-ewvYlk86xUoGI0zQRNq/mC+16R1QeDlKQy21Ki3oSYXNgLb45GV1P6A0M+/s6nyCuNDqe5VpaY84BzXGwVbwFA==", + "license": "MIT", + "optional": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, "node_modules/@esbuild/aix-ppc64": { "version": "0.28.0", "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.28.0.tgz", @@ -3521,7 +3531,6 @@ "integrity": "sha512-/MBdrkA8t6hbdCWFKs09dPik774xvs4Z6L4bycdCxYNLHM8oZuRyosumQMG19LUlBsB6GeVpL1q4kFFazvyKGA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@bcoe/v8-coverage": "^1.0.2", "@vitest/utils": "4.1.3", @@ -4517,7 +4526,6 @@ "dev": true, "hasInstallScript": true, "license": "MIT", - "peer": true, "bin": { "esbuild": "bin/esbuild" }, @@ -4690,7 +4698,6 @@ "resolved": "https://registry.npmjs.org/express/-/express-5.2.1.tgz", "integrity": "sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==", "license": "MIT", - "peer": true, "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.1", @@ -5201,7 +5208,6 @@ "resolved": "https://registry.npmjs.org/hono/-/hono-4.12.15.tgz", "integrity": "sha512-qM0jDhFEaCBb4TxoW7f53Qrpv9RBiayUHo0S52JudprkhvpjIrGoU1mnnr29Fvd1U335ZFPZQY1wlkqgfGXyLg==", "license": "MIT", - "peer": true, "engines": { "node": ">=16.9.0" } @@ -6565,6 +6571,34 @@ "dev": true, "license": "MIT" }, + "node_modules/pg": { + "version": "8.20.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.20.0.tgz", + "integrity": "sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA==", + "license": "MIT", + "optional": true, + "dependencies": { + "pg-connection-string": "^2.12.0", + "pg-pool": "^3.13.0", + "pg-protocol": "^1.13.0", + "pg-types": "2.2.0", + "pgpass": "1.0.5" + }, + "engines": { + "node": ">= 16.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.3.0" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, "node_modules/pg-cloudflare": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz", @@ -8596,7 +8630,6 @@ "integrity": "sha512-P1PbweD+2/udplnThz3btF4cf6AgPky7kk23RtHUkJIU5BIxwPprhRGmOAHs6FTI7UiGbTNrgNP6jSYD6JaRnw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "lightningcss": "^1.32.0", "picomatch": "^4.0.4", @@ -8675,7 +8708,6 @@ "integrity": "sha512-DBc4Tx0MPNsqb9isoyOq00lHftVx/KIU44QOm2q59npZyLUkENn8TMFsuzuO+4U2FUa9rgbbPt3udrP25GcjXw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@vitest/expect": "4.1.3", "@vitest/mocker": "4.1.3", @@ -8908,7 +8940,6 @@ "resolved": "https://registry.npmjs.org/zod/-/zod-4.3.6.tgz", "integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==", "license": "MIT", - "peer": true, "funding": { "url": "https://github.com/sponsors/colinhacks" } diff --git a/pi/bundle/autopull-worker.js b/pi/bundle/autopull-worker.js index b427117f..a80d4f51 100755 --- a/pi/bundle/autopull-worker.js +++ b/pi/bundle/autopull-worker.js @@ -533,12 +533,12 @@ var DeeplakeApi = class { } }; -// dist/src/skilify/pull.js -import { existsSync as existsSync6, readFileSync as readFileSync5, writeFileSync as writeFileSync4, mkdirSync as mkdirSync4, renameSync as renameSync2, lstatSync as lstatSync2, readlinkSync, symlinkSync, unlinkSync as unlinkSync2 } from "node:fs"; -import { homedir as homedir6 } from "node:os"; -import { dirname as dirname2, join as join7 } from "node:path"; +// dist/src/skillify/pull.js +import { existsSync as existsSync7, readFileSync as readFileSync5, writeFileSync as writeFileSync4, mkdirSync as mkdirSync4, renameSync as renameSync3, lstatSync as lstatSync2, readlinkSync, symlinkSync, unlinkSync as unlinkSync2 } from "node:fs"; +import { homedir as homedir7 } from "node:os"; +import { dirname as dirname2, join as join8 } from "node:path"; -// dist/src/skilify/skill-writer.js +// dist/src/skillify/skill-writer.js import { existsSync as existsSync3, mkdirSync as mkdirSync2, readFileSync as readFileSync3, readdirSync, statSync, writeFileSync as writeFileSync2 } from "node:fs"; import { homedir as homedir3 } from "node:os"; import { join as join4 } from "node:path"; @@ -599,18 +599,51 @@ function parseFrontmatter(text) { return { fm, body }; } -// dist/src/skilify/manifest.js -import { existsSync as existsSync4, lstatSync, mkdirSync as mkdirSync3, readFileSync as readFileSync4, renameSync, unlinkSync, writeFileSync as writeFileSync3 } from "node:fs"; +// dist/src/skillify/manifest.js +import { existsSync as existsSync5, lstatSync, mkdirSync as mkdirSync3, readFileSync as readFileSync4, renameSync as renameSync2, unlinkSync, writeFileSync as writeFileSync3 } from "node:fs"; +import { homedir as homedir5 } from "node:os"; +import { dirname, join as join6 } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync4, renameSync } from "node:fs"; import { homedir as homedir4 } from "node:os"; -import { dirname, join as join5 } from "node:path"; +import { join as join5 } from "node:path"; +var dlog = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join5(homedir4(), ".deeplake", "state"); + const legacy = join5(root, "skilify"); + const current = join5(root, "skillify"); + if (!existsSync4(legacy)) + return; + if (existsSync4(current)) + return; + try { + renameSync(legacy, current); + dlog(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/manifest.js function emptyManifest() { return { version: 1, entries: [] }; } function manifestPath() { - return join5(homedir4(), ".deeplake", "state", "skilify", "pulled.json"); + return join6(homedir5(), ".deeplake", "state", "skillify", "pulled.json"); } function loadManifest(path = manifestPath()) { - if (!existsSync4(path)) + migrateLegacyStateDir(); + if (!existsSync5(path)) return emptyManifest(); let raw; try { @@ -660,10 +693,11 @@ function loadManifest(path = manifestPath()) { } } function saveManifest(m, path = manifestPath()) { + migrateLegacyStateDir(); mkdirSync3(dirname(path), { recursive: true }); const tmp = `${path}.tmp`; writeFileSync3(tmp, JSON.stringify(m, null, 2) + "\n", { mode: 384 }); - renameSync(tmp, path); + renameSync2(tmp, path); } function recordPull(entry, path = manifestPath()) { const m = loadManifest(path); @@ -698,7 +732,7 @@ function pruneOrphanedEntries(path = manifestPath()) { const live = []; let pruned = 0; for (const e of m.entries) { - if (existsSync4(join5(e.installRoot, e.dirName))) { + if (existsSync5(join6(e.installRoot, e.dirName))) { live.push(e); continue; } @@ -710,31 +744,31 @@ function pruneOrphanedEntries(path = manifestPath()) { return pruned; } -// dist/src/skilify/agent-roots.js -import { existsSync as existsSync5 } from "node:fs"; -import { homedir as homedir5 } from "node:os"; -import { join as join6 } from "node:path"; +// dist/src/skillify/agent-roots.js +import { existsSync as existsSync6 } from "node:fs"; +import { homedir as homedir6 } from "node:os"; +import { join as join7 } from "node:path"; function resolveDetected(home) { const out = []; - const codexInstalled = existsSync5(join6(home, ".codex")); - const piInstalled = existsSync5(join6(home, ".pi", "agent")); - const hermesInstalled = existsSync5(join6(home, ".hermes")); + const codexInstalled = existsSync6(join7(home, ".codex")); + const piInstalled = existsSync6(join7(home, ".pi", "agent")); + const hermesInstalled = existsSync6(join7(home, ".hermes")); if (codexInstalled || piInstalled) { - out.push(join6(home, ".agents", "skills")); + out.push(join7(home, ".agents", "skills")); } if (hermesInstalled) { - out.push(join6(home, ".hermes", "skills")); + out.push(join7(home, ".hermes", "skills")); } if (piInstalled) { - out.push(join6(home, ".pi", "agent", "skills")); + out.push(join7(home, ".pi", "agent", "skills")); } return out; } -function detectAgentSkillsRoots(canonicalRoot, home = homedir5()) { +function detectAgentSkillsRoots(canonicalRoot, home = homedir6()) { return resolveDetected(home).filter((p) => p !== canonicalRoot); } -// dist/src/skilify/pull.js +// dist/src/skillify/pull.js function assertValidAuthor(author) { if (!author) throw new Error("author is empty"); @@ -766,15 +800,15 @@ function isMissingTableError(message) { } function resolvePullDestination(install, cwd) { if (install === "global") - return join7(homedir6(), ".claude", "skills"); + return join8(homedir7(), ".claude", "skills"); if (!cwd) throw new Error("install=project requires a cwd"); - return join7(cwd, ".claude", "skills"); + return join8(cwd, ".claude", "skills"); } function fanOutSymlinks(canonicalDir, dirName, agentRoots) { const out = []; for (const root of agentRoots) { - const link = join7(root, dirName); + const link = join8(root, dirName); let existing; try { existing = lstatSync2(link); @@ -817,8 +851,8 @@ function backfillSymlinks(installRoot) { return; const detected = detectAgentSkillsRoots(installRoot); for (const entry of entries) { - const canonical = join7(entry.installRoot, entry.dirName); - if (!existsSync6(canonical)) + const canonical = join8(entry.installRoot, entry.dirName); + if (!existsSync7(canonical)) continue; const fresh = fanOutSymlinks(canonical, entry.dirName, detected); if (sameSorted(fresh, entry.symlinks)) @@ -903,7 +937,7 @@ function renderFrontmatter(fm) { return lines.join("\n"); } function readLocalVersion(path) { - if (!existsSync6(path)) + if (!existsSync7(path)) return null; try { const text = readFileSync5(path, "utf-8"); @@ -992,8 +1026,8 @@ async function runPull(opts) { summary.skipped++; continue; } - const skillDir = join7(root, dirName); - const skillFile = join7(skillDir, "SKILL.md"); + const skillDir = join8(root, dirName); + const skillFile = join8(skillDir, "SKILL.md"); const remoteVersion = Number(row.version ?? 1); const localVersion = readLocalVersion(skillFile); const action = decideAction({ @@ -1005,9 +1039,9 @@ async function runPull(opts) { let manifestError; if (action === "wrote") { mkdirSync4(skillDir, { recursive: true }); - if (existsSync6(skillFile)) { + if (existsSync7(skillFile)) { try { - renameSync2(skillFile, `${skillFile}.bak`); + renameSync3(skillFile, `${skillFile}.bak`); } catch { } } @@ -1052,8 +1086,8 @@ async function runPull(opts) { return summary; } -// dist/src/skilify/auto-pull.js -var log3 = (msg) => log("skilify-autopull", msg); +// dist/src/skillify/auto-pull.js +var log3 = (msg) => log("skillify-autopull", msg); var DEFAULT_TIMEOUT_MS = 5e3; function withTimeout(p, ms) { let timer = null; @@ -1105,7 +1139,7 @@ async function autoPullSkills(deps = {}) { } } -// dist/src/skilify/autopull-worker.js +// dist/src/skillify/autopull-worker.js void (async () => { try { await autoPullSkills(); diff --git a/pi/bundle/skilify-worker.js b/pi/bundle/skilify-worker.js deleted file mode 100755 index 8eb534fd..00000000 --- a/pi/bundle/skilify-worker.js +++ /dev/null @@ -1,907 +0,0 @@ -#!/usr/bin/env node - -// dist/src/skilify/skilify-worker.js -import { readFileSync as readFileSync3, writeFileSync as writeFileSync3, existsSync as existsSync4, appendFileSync as appendFileSync2, rmSync } from "node:fs"; -import { join as join5 } from "node:path"; - -// dist/src/utils/debug.js -import { appendFileSync } from "node:fs"; -import { join } from "node:path"; -import { homedir } from "node:os"; -var DEBUG = process.env.HIVEMIND_DEBUG === "1"; -var LOG = join(homedir(), ".deeplake", "hook-debug.log"); -function utcTimestamp(d = /* @__PURE__ */ new Date()) { - return d.toISOString().replace("T", " ").slice(0, 19) + " UTC"; -} -function log(tag, msg) { - if (!DEBUG) - return; - appendFileSync(LOG, `${(/* @__PURE__ */ new Date()).toISOString()} [${tag}] ${msg} -`); -} - -// dist/src/utils/client-header.js -var DEEPLAKE_CLIENT_HEADER = "X-Deeplake-Client"; -function deeplakeClientValue() { - return "hivemind"; -} -function deeplakeClientHeader() { - return { [DEEPLAKE_CLIENT_HEADER]: deeplakeClientValue() }; -} - -// dist/src/skilify/extractors/index.js -function extractPairs(rows) { - const pairs = []; - let pendingPrompt = null; - let pendingAnswer = []; - function flush() { - if (pendingPrompt && pendingAnswer.length > 0) { - pairs.push({ - sessionId: pendingPrompt.row.session_id ?? "", - agent: pendingPrompt.row.agent ?? null, - date: pendingPrompt.row.creation_date ?? null, - prompt: pendingPrompt.content, - answer: pendingAnswer.join("\n\n") - }); - } - pendingPrompt = null; - pendingAnswer = []; - } - for (const r of rows) { - if (r.type === "user_message" && typeof r.content === "string") { - flush(); - pendingPrompt = { content: r.content, row: r }; - } else if (r.type === "assistant_message" && typeof r.content === "string" && pendingPrompt) { - if (r.content.trim().length > 0) - pendingAnswer.push(r.content); - } - } - flush(); - return pairs; -} - -// dist/src/skilify/skill-writer.js -import { existsSync, mkdirSync, readFileSync, readdirSync, statSync, writeFileSync } from "node:fs"; -import { homedir as homedir2 } from "node:os"; -import { join as join2 } from "node:path"; -function assertValidSkillName(name) { - if (typeof name !== "string" || name.length === 0) { - throw new Error(`invalid skill name: empty or non-string`); - } - if (name.length > 100) { - throw new Error(`invalid skill name: too long (${name.length} chars)`); - } - if (name.includes("/") || name.includes("\\") || name.includes("..")) { - throw new Error(`invalid skill name: contains path separator or '..': ${name}`); - } - if (!/^[a-z0-9]+(?:-[a-z0-9]+)*$/.test(name)) { - throw new Error(`invalid skill name: must be kebab-case (lowercase a-z, 0-9, hyphen): ${name}`); - } -} -function skillDir(skillsRoot, name) { - return join2(skillsRoot, name); -} -function skillPath(skillsRoot, name) { - return join2(skillDir(skillsRoot, name), "SKILL.md"); -} -function renderFrontmatter(fm) { - const lines = ["---"]; - lines.push(`name: ${fm.name}`); - lines.push(`description: ${JSON.stringify(fm.description)}`); - if (fm.trigger) - lines.push(`trigger: ${JSON.stringify(fm.trigger)}`); - lines.push(`source_sessions:`); - for (const s of fm.source_sessions) - lines.push(` - ${s}`); - lines.push(`version: ${fm.version}`); - lines.push(`created_by_agent: ${fm.created_by_agent}`); - lines.push(`created_at: ${fm.created_at}`); - lines.push(`updated_at: ${fm.updated_at}`); - lines.push("---"); - return lines.join("\n"); -} -function parseFrontmatter(text) { - if (!text.startsWith("---\n") && !text.startsWith("---\r\n")) - return null; - const end = text.indexOf("\n---", 4); - if (end < 0) - return null; - const head = text.slice(4, end).trim(); - const body = text.slice(end + 4).replace(/^\r?\n/, ""); - const fm = { source_sessions: [] }; - let mode = "kv"; - for (const raw of head.split(/\r?\n/)) { - if (mode === "sources") { - const m2 = raw.match(/^\s+-\s+(.+)$/); - if (m2) { - fm.source_sessions.push(m2[1].trim()); - continue; - } - mode = "kv"; - } - if (raw.startsWith("source_sessions:")) { - mode = "sources"; - continue; - } - const m = raw.match(/^([a-zA-Z_]+):\s*(.*)$/); - if (!m) - continue; - const [, k, v] = m; - let val = v; - if (v.startsWith('"') && v.endsWith('"')) { - try { - val = JSON.parse(v); - } catch { - } - } else if (k === "version") { - const n = parseInt(v, 10); - if (Number.isFinite(n)) - val = n; - } - fm[k] = val; - } - return { fm, body }; -} -function writeNewSkill(args) { - assertValidSkillName(args.name); - const dir = skillDir(args.skillsRoot, args.name); - const path = skillPath(args.skillsRoot, args.name); - if (existsSync(path)) { - throw new Error(`skill already exists at ${path}; use mergeSkill`); - } - mkdirSync(dir, { recursive: true }); - const now = (/* @__PURE__ */ new Date()).toISOString(); - const fm = { - name: args.name, - description: args.description, - trigger: args.trigger, - source_sessions: args.sourceSessions, - version: 1, - created_by_agent: args.agent, - created_at: now, - updated_at: now - }; - const text = `${renderFrontmatter(fm)} - -${args.body.trim()} -`; - writeFileSync(path, text); - return { path, action: "created", version: 1 }; -} -function mergeSkill(args) { - assertValidSkillName(args.name); - const path = skillPath(args.skillsRoot, args.name); - if (!existsSync(path)) { - throw new Error(`skill ${args.name} does not exist at ${path}; use writeNewSkill`); - } - const existing = readFileSync(path, "utf-8"); - const parsed = parseFrontmatter(existing); - const prevVersion = parsed?.fm.version ?? 1; - const prevSources = parsed?.fm.source_sessions ?? []; - const merged = Array.from(/* @__PURE__ */ new Set([...prevSources, ...args.newSourceSessions])); - const now = (/* @__PURE__ */ new Date()).toISOString(); - const fm = { - name: args.name, - description: args.description ?? parsed?.fm.description ?? "", - trigger: parsed?.fm.trigger, - source_sessions: merged, - version: prevVersion + 1, - created_by_agent: parsed?.fm.created_by_agent ?? args.agent, - created_at: parsed?.fm.created_at ?? now, - updated_at: now - }; - const text = `${renderFrontmatter(fm)} - -${args.body.trim()} -`; - writeFileSync(path, text); - return { path, action: "merged", version: fm.version }; -} -function listSkills(skillsRoot) { - if (!existsSync(skillsRoot)) - return []; - const out = []; - for (const name of readdirSync(skillsRoot)) { - const skillFile = join2(skillsRoot, name, "SKILL.md"); - if (existsSync(skillFile) && statSync(skillFile).isFile()) { - out.push({ name, body: readFileSync(skillFile, "utf-8") }); - } - } - return out; -} -function resolveSkillsRoot(install, cwd) { - if (install === "global") { - return join2(homedir2(), ".claude", "skills"); - } - return join2(cwd, ".claude", "skills"); -} - -// dist/src/skilify/skills-table.js -import { randomUUID } from "node:crypto"; - -// dist/src/utils/sql.js -function sqlIdent(name) { - if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) { - throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`); - } - return name; -} - -// dist/src/skilify/skills-table.js -function createSkillsTableSql(tableName) { - const safe = sqlIdent(tableName); - return `CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`; -} -function esc(s) { - return s.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, ""); -} -function isMissingTableError(message) { - if (!message) - return false; - return /Table does not exist|relation .* does not exist|no such table/i.test(message); -} -async function insertSkillRow(args) { - const id = args.id ?? randomUUID(); - const sourceSessionsJson = JSON.stringify(args.sourceSessions); - const sql = `INSERT INTO "${sqlIdent(args.tableName)}" (id, name, project, project_key, local_path, install, source_sessions, source_agent, scope, author, description, trigger_text, body, version, created_at, updated_at) VALUES ('${esc(id)}', '${esc(args.name)}', '${esc(args.project)}', '${esc(args.projectKey)}', '${esc(args.localPath)}', '${esc(args.install)}', '${esc(sourceSessionsJson)}', '${esc(args.sourceAgent)}', '${esc(args.scope)}', '${esc(args.author)}', '${esc(args.description)}', '${esc(args.trigger ?? "")}', '${esc(args.body)}', ${args.version}, '${esc(args.createdAt)}', '${esc(args.updatedAt)}')`; - try { - await args.query(sql); - } catch (e) { - if (isMissingTableError(e?.message)) { - await args.query(createSkillsTableSql(args.tableName)); - await args.query(sql); - return; - } - throw e; - } -} - -// dist/src/skilify/gate-parser.js -function extractJsonBlock(s) { - const trimmed = s.trim(); - if (!trimmed) - return null; - const fenced = trimmed.match(/```(?:json)?\s*\n([\s\S]*?)\n```/); - if (fenced) - return fenced[1].trim(); - const start = trimmed.indexOf("{"); - if (start < 0) - return null; - let depth = 0; - for (let i = start; i < trimmed.length; i++) { - const c = trimmed[i]; - if (c === "{") - depth++; - else if (c === "}") { - depth--; - if (depth === 0) - return trimmed.slice(start, i + 1); - } - } - return null; -} -function parseVerdict(raw) { - const block = extractJsonBlock(raw); - if (!block) - return null; - try { - const v = JSON.parse(block); - if (v.verdict !== "KEEP" && v.verdict !== "SKIP" && v.verdict !== "MERGE") - return null; - return v; - } catch { - return null; - } -} - -// dist/src/skilify/gate-runner.js -import { execFileSync } from "node:child_process"; -import { existsSync as existsSync2 } from "node:fs"; -import { homedir as homedir3 } from "node:os"; -import { join as join3 } from "node:path"; -function findAgentBin(agent) { - const which = (name) => { - try { - const out = execFileSync("which", [name], { - encoding: "utf-8", - stdio: ["ignore", "pipe", "ignore"] - }); - return out.trim() || null; - } catch { - return null; - } - }; - switch (agent) { - case "claude_code": - return which("claude") ?? join3(homedir3(), ".claude", "local", "claude"); - case "codex": - return which("codex") ?? "/usr/local/bin/codex"; - case "cursor": - return which("cursor-agent") ?? "/usr/local/bin/cursor-agent"; - case "hermes": - return which("hermes") ?? join3(homedir3(), ".local", "bin", "hermes"); - case "pi": - return which("pi") ?? join3(homedir3(), ".local", "bin", "pi"); - } -} -function buildArgs(agent, prompt, opts) { - switch (agent) { - case "claude_code": - return [ - "-p", - prompt, - "--no-session-persistence", - "--model", - "haiku", - "--permission-mode", - "bypassPermissions" - ]; - case "codex": - return [ - "exec", - "--dangerously-bypass-approvals-and-sandbox", - prompt - ]; - case "cursor": - return [ - "--print", - "--model", - opts.cursorModel ?? process.env.HIVEMIND_CURSOR_MODEL ?? "auto", - "--force", - "--output-format", - "text", - prompt - ]; - case "hermes": - return [ - "-z", - prompt, - "--provider", - opts.hermesProvider ?? process.env.HIVEMIND_HERMES_PROVIDER ?? "openrouter", - "-m", - opts.hermesModel ?? process.env.HIVEMIND_HERMES_MODEL ?? "anthropic/claude-haiku-4-5", - "--yolo", - "--ignore-user-config" - ]; - case "pi": - return [ - "--print", - "--provider", - opts.piProvider ?? process.env.HIVEMIND_PI_PROVIDER ?? "google", - "--model", - opts.piModel ?? process.env.HIVEMIND_PI_MODEL ?? "gemini-2.5-flash", - prompt - ]; - } -} -function runGate(opts) { - const bin = opts.bin ?? findAgentBin(opts.agent); - if (!existsSync2(bin)) { - return { - stdout: "", - stderr: "", - errored: true, - errorMessage: `agent binary not found at ${bin} (agent=${opts.agent})` - }; - } - const args = buildArgs(opts.agent, opts.prompt, opts); - try { - const result = execFileSync(bin, args, { - stdio: ["ignore", "pipe", "pipe"], - timeout: opts.timeoutMs ?? 12e4, - maxBuffer: 8 * 1024 * 1024, - env: { ...process.env, HIVEMIND_WIKI_WORKER: "1", HIVEMIND_CAPTURE: "false" } - }); - return { stdout: result.toString("utf-8"), stderr: "", errored: false }; - } catch (e) { - return { - stdout: e.stdout?.toString("utf-8") ?? "", - stderr: e.stderr?.toString("utf-8") ?? "", - errored: true, - errorMessage: `${opts.agent} CLI failed: ${e.status ?? e.code ?? e.message}` - }; - } -} - -// dist/src/skilify/state.js -import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync2, renameSync, existsSync as existsSync3, unlinkSync, openSync, closeSync } from "node:fs"; -import { execSync } from "node:child_process"; -import { homedir as homedir4 } from "node:os"; -import { createHash } from "node:crypto"; -import { join as join4, basename } from "node:path"; -var dlog = (msg) => log("skilify-state", msg); -var STATE_DIR = join4(homedir4(), ".deeplake", "state", "skilify"); -var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4)); -var TRIGGER_THRESHOLD = (() => { - const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? ""); - return Number.isInteger(n) && n > 0 ? n : 20; -})(); -function statePath(projectKey) { - return join4(STATE_DIR, `${projectKey}.json`); -} -function lockPath(projectKey) { - return join4(STATE_DIR, `${projectKey}.lock`); -} -function readState(projectKey) { - const p = statePath(projectKey); - if (!existsSync3(p)) - return null; - try { - return JSON.parse(readFileSync2(p, "utf-8")); - } catch { - return null; - } -} -function writeState(projectKey, state) { - mkdirSync2(STATE_DIR, { recursive: true }); - const p = statePath(projectKey); - const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; - writeFileSync2(tmp, JSON.stringify(state, null, 2)); - renameSync(tmp, p); -} -function withRmwLock(projectKey, fn) { - mkdirSync2(STATE_DIR, { recursive: true }); - const rmw = lockPath(projectKey) + ".rmw"; - const deadline = Date.now() + 2e3; - let fd = null; - while (fd === null) { - try { - fd = openSync(rmw, "wx"); - } catch (e) { - if (e.code !== "EEXIST") - throw e; - if (Date.now() > deadline) { - dlog(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); - try { - unlinkSync(rmw); - } catch (unlinkErr) { - dlog(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); - } - continue; - } - Atomics.wait(YIELD_BUF, 0, 0, 10); - } - } - try { - return fn(); - } finally { - closeSync(fd); - try { - unlinkSync(rmw); - } catch (unlinkErr) { - dlog(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); - } - } -} -function recordSkill(projectKey, skillName, newestSessionUuid, newestSessionDate) { - withRmwLock(projectKey, () => { - const s = readState(projectKey); - if (!s) - return; - const skills = s.skillsGenerated.includes(skillName) ? s.skillsGenerated : [...s.skillsGenerated, skillName]; - writeState(projectKey, { - ...s, - skillsGenerated: skills, - lastUuid: newestSessionUuid, - lastDate: newestSessionDate, - updatedAt: Date.now() - }); - }); -} -function advanceWatermark(projectKey, newestSessionUuid, newestSessionDate) { - withRmwLock(projectKey, () => { - const s = readState(projectKey); - if (!s) - return; - writeState(projectKey, { - ...s, - lastUuid: newestSessionUuid, - lastDate: newestSessionDate, - updatedAt: Date.now() - }); - }); -} -function releaseWorkerLock(projectKey) { - const p = lockPath(projectKey); - try { - unlinkSync(p); - } catch { - } -} - -// dist/src/skilify/skilify-worker.js -var cfg = JSON.parse(readFileSync3(process.argv[2], "utf-8")); -var tmpDir = cfg.tmpDir; -var verdictPath = join5(tmpDir, "verdict.json"); -var promptPath = join5(tmpDir, "prompt.txt"); -var SESSIONS_TO_MINE = 10; -var PAIR_CHAR_CAP = 2e3; -var TOTAL_PAIRS_CHAR_CAP = 4e4; -var EXISTING_SKILLS_CHAR_CAP = 3e4; -function wlog(msg) { - try { - appendFileSync2(cfg.skilifyLog, `[${utcTimestamp()}] skilify-worker(${cfg.projectKey}): ${msg} -`); - } catch { - } -} -function esc2(s) { - return s.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, ""); -} -var QUERY_TIMEOUT_MS = 3e4; -async function query(sql, retries = 4) { - for (let attempt = 0; attempt <= retries; attempt++) { - let r; - try { - r = await fetch(`${cfg.apiUrl}/workspaces/${cfg.workspaceId}/tables/query`, { - method: "POST", - headers: { - Authorization: `Bearer ${cfg.token}`, - "Content-Type": "application/json", - "X-Activeloop-Org-Id": cfg.orgId, - ...deeplakeClientHeader() - }, - signal: AbortSignal.timeout(QUERY_TIMEOUT_MS), - body: JSON.stringify({ query: sql }) - }); - } catch (e) { - if (attempt < retries) { - const base = Math.min(3e4, 2e3 * Math.pow(2, attempt)); - const delay = base + Math.floor(Math.random() * 1e3); - wlog(`fetch failed (${e?.name ?? e?.code ?? e?.message}), retrying in ${delay}ms (attempt ${attempt + 1}/${retries})`); - await new Promise((resolve) => setTimeout(resolve, delay)); - continue; - } - throw e; - } - if (r.ok) { - const j = await r.json(); - if (!j.columns || !j.rows) - return []; - return j.rows.map((row) => Object.fromEntries(j.columns.map((col, i) => [col, row[i]]))); - } - const retryable = r.status === 401 || r.status === 403 || r.status === 429 || r.status === 500 || r.status === 502 || r.status === 503; - if (attempt < retries && retryable) { - const base = Math.min(3e4, 2e3 * Math.pow(2, attempt)); - const delay = base + Math.floor(Math.random() * 1e3); - wlog(`API ${r.status}, retrying in ${delay}ms (attempt ${attempt + 1}/${retries})`); - await new Promise((resolve) => setTimeout(resolve, delay)); - continue; - } - throw new Error(`API ${r.status}: ${(await r.text()).slice(0, 200)}`); - } - return []; -} -function authorClause() { - if (cfg.scope === "org") - return ""; - if (cfg.scope === "team" && cfg.team.length > 0) { - const list = cfg.team.map((n) => `'${esc2(n)}'`).join(", "); - return ` AND author IN (${list})`; - } - return ` AND author = '${esc2(cfg.userName)}'`; -} -async function listCandidateSessions(lastDate) { - const dateClause = lastDate ? ` AND creation_date > '${esc2(lastDate)}'` : ""; - const sql = `SELECT path, MAX(creation_date) AS last_msg FROM "${cfg.sessionsTable}" WHERE project = '${esc2(cfg.project)}'${authorClause()}${dateClause} GROUP BY path ORDER BY last_msg DESC LIMIT ${SESSIONS_TO_MINE * 2}`; - const rows = await query(sql); - return rows.map((r) => ({ path: String(r.path ?? ""), lastMsg: String(r.last_msg ?? "") })).filter((r) => r.path.length > 0); -} -function isCurrentSession(path) { - return cfg.currentSessionId ? path.includes(cfg.currentSessionId) : false; -} -async function fetchSessionRows(path) { - const rows = await query(`SELECT message, creation_date, agent FROM "${cfg.sessionsTable}" WHERE path = '${esc2(path)}' ORDER BY creation_date ASC`); - const sessionId = (path.split("/").pop() ?? "").replace(/\.[^.]+$/, ""); - return rows.map((r) => { - const m = r.message; - const parsed = typeof m === "string" ? safeJsonParse(m) : m ?? {}; - return { - type: typeof parsed.type === "string" ? parsed.type : void 0, - content: typeof parsed.content === "string" ? parsed.content : void 0, - creation_date: r.creation_date, - session_id: sessionId, - agent: r.agent - }; - }); -} -function safeJsonParse(s) { - try { - return JSON.parse(s); - } catch { - return {}; - } -} -function truncate(s, max) { - if (s.length <= max) - return s; - return s.slice(0, max) + ` -[\u2026truncated ${s.length - max} chars]`; -} -function renderPairsBlock(pairs) { - let total = 0; - const out = []; - for (const [i, p] of pairs.entries()) { - const prompt = truncate(p.prompt, PAIR_CHAR_CAP); - const answer = truncate(p.answer, PAIR_CHAR_CAP); - const block = `--- exchange ${i + 1} (session ${p.sessionId.slice(0, 8)}, agent ${p.agent ?? "?"}) --- -USER: -${prompt} - -ASSISTANT: -${answer} -`; - if (total + block.length > TOTAL_PAIRS_CHAR_CAP) { - out.push(`[\u2026${pairs.length - i} more exchanges omitted to stay under prompt budget]`); - break; - } - out.push(block); - total += block.length; - } - return out.join("\n"); -} -function renderExistingSkillsBlock() { - const skills = listSkills(resolveSkillsRoot(cfg.install, cfg.cwd)); - if (skills.length === 0) { - return { - names: [], - block: "(no existing skills in this project \u2014 MERGE is NOT a valid choice; pick KEEP or SKIP only)" - }; - } - let total = 0; - const out = []; - const names = []; - for (const s of skills) { - const block = `--- existing skill: ${s.name} --- -${s.body} -`; - if (total + block.length > EXISTING_SKILLS_CHAR_CAP) { - out.push(`[\u2026${skills.length - out.length} more existing skills omitted]`); - break; - } - out.push(block); - names.push(s.name); - total += block.length; - } - return { names, block: out.join("\n") }; -} -function buildPrompt(pairs) { - const existing = renderExistingSkillsBlock(); - const mergeTargetsClause = existing.names.length > 0 ? `MERGE is allowed only if your "name" is EXACTLY one of: [${existing.names.join(", ")}]. Any other name MUST use KEEP, not MERGE.` : `MERGE is FORBIDDEN \u2014 there are no project skills to merge into. Use KEEP or SKIP only.`; - return [ - `You are a skill curator for the "${cfg.project}" project. You decide whether the recent`, - `agent activity below contains a recurring, non-trivial pattern worth crystallizing as a`, - `reusable skill, and whether to create a new skill or merge into an existing one.`, - ``, - `RULES:`, - `- KEEP only if the pattern recurs across at least 3 of the exchanges, is non-obvious to a`, - ` competent engineer, and is not already covered by an existing skill below.`, - `- SKIP if the activity is one-off, generic engineering work, or already covered.`, - `- MERGE if the pattern is a meaningful extension of an existing PROJECT skill \u2014 produce a`, - ` merged body that incorporates the new evidence without exceeding ~3000 characters or`, - ` covering unrelated domains.`, - `- ${mergeTargetsClause}`, - `- Do NOT reference skills outside this project (e.g. ones from ~/.claude/skills/). Only`, - ` the project skills listed below count for MERGE.`, - `- Skill bodies should follow the existing style: short sections (When to use, Workflow,`, - ` Anti-patterns), concrete commands and file paths drawn from the exchanges, no marketing.`, - ``, - `=== EXISTING PROJECT SKILLS ===`, - existing.block, - ``, - `=== RECENT EXCHANGES (prompt + answer pairs, tool calls already stripped) ===`, - renderPairsBlock(pairs), - ``, - `=== YOUR TASK ===`, - `Output your decision as a single JSON object. The worker will parse it.`, - `You may either:`, - ` (a) Write the JSON to this exact path using the Write tool: ${verdictPath}`, - ` (b) Print the JSON object to stdout (your final message), nothing else.`, - `Either path works; pick whichever you prefer. Do not do both.`, - ``, - `The JSON MUST have this shape:`, - `{`, - ` "verdict": "KEEP" | "SKIP" | "MERGE",`, - ` "name": "",`, - ` "description": "",`, - ` "trigger": "",`, - ` "body": "",`, - ` "reason": ""`, - `}`, - ``, - `For SKIP, only "verdict" and "reason" are required.`, - `If you print to stdout, do not include any prose before or after the JSON.`, - `Do not write any other files.` - ].join("\n"); -} -function readVerdict(stdout) { - if (existsSync4(verdictPath)) { - try { - const text = readFileSync3(verdictPath, "utf-8"); - const v2 = parseVerdict(text); - if (v2) - return { verdict: v2, source: "file" }; - return { verdict: null, source: `file-unparseable (${text.length} chars)` }; - } catch (e) { - return { verdict: null, source: `file-read-error: ${e.message}` }; - } - } - const v = parseVerdict(stdout); - if (v) - return { verdict: v, source: "stdout" }; - return { verdict: null, source: `no-file-no-stdout-json (stdout=${stdout.length} chars)` }; -} -function cleanup(keep) { - if (keep) { - wlog(`keeping tmpDir for inspection: ${tmpDir}`); - return; - } - try { - rmSync(tmpDir, { recursive: true, force: true }); - } catch (e) { - wlog(`cleanup failed: ${e.message}`); - } -} -var keepTmpForInspection = false; -async function main() { - try { - const state = readState(cfg.projectKey); - const lastDate = state?.lastDate ?? null; - wlog(`fetching candidate sessions (scope=${cfg.scope}, lastDate=${lastDate ?? "none"})`); - const candidates = await listCandidateSessions(lastDate); - const usable = candidates.filter((c) => !isCurrentSession(c.path)).slice(0, SESSIONS_TO_MINE); - if (usable.length === 0) { - wlog("no new sessions to mine \u2014 done"); - return; - } - wlog(`mining ${usable.length} sessions`); - const allPairs = []; - for (const c of usable) { - const rows = await fetchSessionRows(c.path); - const pairs = extractPairs(rows); - allPairs.push(...pairs); - } - if (allPairs.length === 0) { - wlog("no prompt/answer pairs after extraction \u2014 advancing watermark and exiting"); - const oldest2 = usable[usable.length - 1]; - advanceWatermark(cfg.projectKey, oldest2.path, oldest2.lastMsg); - return; - } - wlog(`extracted ${allPairs.length} pairs across ${usable.length} sessions`); - const prompt = buildPrompt(allPairs); - writeFileSync3(promptPath, prompt); - const gateAgent = cfg.gateAgent ?? cfg.agent; - wlog(`running gate (agent=${cfg.agent}, gateAgent=${gateAgent}, bin=${cfg.gateBin}, prompt=${prompt.length} chars)`); - const gate = runGate({ - agent: gateAgent, - prompt, - bin: cfg.gateBin, - cursorModel: cfg.cursorModel, - hermesProvider: cfg.hermesProvider, - hermesModel: cfg.hermesModel, - piProvider: cfg.piProvider, - piModel: cfg.piModel, - timeoutMs: 12e4 - }); - try { - writeFileSync3(join5(tmpDir, "gate-stdout.txt"), gate.stdout); - if (gate.stderr) - writeFileSync3(join5(tmpDir, "gate-stderr.txt"), gate.stderr); - } catch { - } - if (gate.errored) { - wlog(`gate failed: ${gate.errorMessage} (stdout=${gate.stdout.length}, stderr=${gate.stderr.length})`); - return; - } - wlog(`gate exited (code 0, stdout=${gate.stdout.length} chars)`); - const { verdict, source } = readVerdict(gate.stdout); - if (!verdict) { - wlog(`no parseable verdict (${source}) \u2014 treating as SKIP, advancing watermark`); - keepTmpForInspection = true; - const oldest2 = usable[usable.length - 1]; - advanceWatermark(cfg.projectKey, oldest2.path, oldest2.lastMsg); - return; - } - wlog(`verdict source: ${source}`); - wlog(`verdict=${verdict.verdict} name=${verdict.name ?? "-"} reason=${verdict.reason ?? "-"}`); - const oldest = usable[usable.length - 1]; - const watermarkUuid = (oldest.path.split("/").pop() ?? "").replace(/\.[^.]+$/, ""); - const watermarkDate = oldest.lastMsg; - const sourceSessions = usable.map((c) => (c.path.split("/").pop() ?? "").replace(/\.[^.]+$/, "")); - async function recordToDeeplake(result, verdict2) { - try { - await insertSkillRow({ - query, - tableName: cfg.skillsTable, - name: verdict2.name, - project: cfg.project, - projectKey: cfg.projectKey, - localPath: result.path, - install: cfg.install, - sourceSessions, - sourceAgent: cfg.agent, - scope: cfg.scope, - author: cfg.userName, - description: verdict2.description ?? "", - trigger: verdict2.trigger, - body: verdict2.body, - version: result.version, - createdAt: (/* @__PURE__ */ new Date()).toISOString(), - updatedAt: (/* @__PURE__ */ new Date()).toISOString() - }); - wlog(`recorded to skills table: name=${verdict2.name} v${result.version}`); - } catch (e) { - wlog(`skills table insert failed (non-fatal): ${e.message}`); - } - } - if (verdict.verdict === "KEEP" && verdict.name && verdict.body) { - try { - const result = writeNewSkill({ - skillsRoot: resolveSkillsRoot(cfg.install, cfg.cwd), - name: verdict.name, - description: verdict.description ?? "", - trigger: verdict.trigger, - body: verdict.body, - sourceSessions, - agent: cfg.agent - }); - wlog(`wrote new skill: ${result.path}`); - recordSkill(cfg.projectKey, verdict.name, watermarkUuid, watermarkDate); - await recordToDeeplake(result, verdict); - } catch (e) { - wlog(`writeNewSkill failed: ${e.message}`); - advanceWatermark(cfg.projectKey, watermarkUuid, watermarkDate); - } - } else if (verdict.verdict === "MERGE" && verdict.name && verdict.body) { - try { - const result = mergeSkill({ - skillsRoot: resolveSkillsRoot(cfg.install, cfg.cwd), - name: verdict.name, - description: verdict.description, - body: verdict.body, - newSourceSessions: sourceSessions, - agent: cfg.agent - }); - wlog(`merged into skill: ${result.path} (v${result.version})`); - recordSkill(cfg.projectKey, verdict.name, watermarkUuid, watermarkDate); - await recordToDeeplake(result, verdict); - } catch (e) { - if (/does not exist/i.test(e.message ?? "")) { - wlog(`mergeSkill target missing \u2014 falling back to writeNewSkill: ${verdict.name}`); - try { - const result = writeNewSkill({ - skillsRoot: resolveSkillsRoot(cfg.install, cfg.cwd), - name: verdict.name, - description: verdict.description ?? "", - trigger: verdict.trigger, - body: verdict.body, - sourceSessions, - agent: cfg.agent - }); - wlog(`wrote new skill (merge fallback): ${result.path}`); - recordSkill(cfg.projectKey, verdict.name, watermarkUuid, watermarkDate); - await recordToDeeplake(result, verdict); - } catch (e2) { - wlog(`writeNewSkill fallback also failed: ${e2.message}`); - advanceWatermark(cfg.projectKey, watermarkUuid, watermarkDate); - } - } else { - wlog(`mergeSkill failed: ${e.message}`); - advanceWatermark(cfg.projectKey, watermarkUuid, watermarkDate); - } - } - } else { - advanceWatermark(cfg.projectKey, watermarkUuid, watermarkDate); - } - } catch (e) { - wlog(`fatal: ${e.message}`); - } finally { - cleanup(keepTmpForInspection); - try { - releaseWorkerLock(cfg.projectKey); - } catch (e) { - wlog(`releaseWorkerLock failed: ${e.message}`); - } - } -} -main(); diff --git a/pi/bundle/skillify-worker.js b/pi/bundle/skillify-worker.js new file mode 100755 index 00000000..987a16ba --- /dev/null +++ b/pi/bundle/skillify-worker.js @@ -0,0 +1,942 @@ +#!/usr/bin/env node + +// dist/src/skillify/skillify-worker.js +import { readFileSync as readFileSync3, writeFileSync as writeFileSync3, existsSync as existsSync5, appendFileSync as appendFileSync2, rmSync } from "node:fs"; +import { join as join6 } from "node:path"; + +// dist/src/utils/debug.js +import { appendFileSync } from "node:fs"; +import { join } from "node:path"; +import { homedir } from "node:os"; +var DEBUG = process.env.HIVEMIND_DEBUG === "1"; +var LOG = join(homedir(), ".deeplake", "hook-debug.log"); +function utcTimestamp(d = /* @__PURE__ */ new Date()) { + return d.toISOString().replace("T", " ").slice(0, 19) + " UTC"; +} +function log(tag, msg) { + if (!DEBUG) + return; + appendFileSync(LOG, `${(/* @__PURE__ */ new Date()).toISOString()} [${tag}] ${msg} +`); +} + +// dist/src/utils/client-header.js +var DEEPLAKE_CLIENT_HEADER = "X-Deeplake-Client"; +function deeplakeClientValue() { + return "hivemind"; +} +function deeplakeClientHeader() { + return { [DEEPLAKE_CLIENT_HEADER]: deeplakeClientValue() }; +} + +// dist/src/skillify/extractors/index.js +function extractPairs(rows) { + const pairs = []; + let pendingPrompt = null; + let pendingAnswer = []; + function flush() { + if (pendingPrompt && pendingAnswer.length > 0) { + pairs.push({ + sessionId: pendingPrompt.row.session_id ?? "", + agent: pendingPrompt.row.agent ?? null, + date: pendingPrompt.row.creation_date ?? null, + prompt: pendingPrompt.content, + answer: pendingAnswer.join("\n\n") + }); + } + pendingPrompt = null; + pendingAnswer = []; + } + for (const r of rows) { + if (r.type === "user_message" && typeof r.content === "string") { + flush(); + pendingPrompt = { content: r.content, row: r }; + } else if (r.type === "assistant_message" && typeof r.content === "string" && pendingPrompt) { + if (r.content.trim().length > 0) + pendingAnswer.push(r.content); + } + } + flush(); + return pairs; +} + +// dist/src/skillify/skill-writer.js +import { existsSync, mkdirSync, readFileSync, readdirSync, statSync, writeFileSync } from "node:fs"; +import { homedir as homedir2 } from "node:os"; +import { join as join2 } from "node:path"; +function assertValidSkillName(name) { + if (typeof name !== "string" || name.length === 0) { + throw new Error(`invalid skill name: empty or non-string`); + } + if (name.length > 100) { + throw new Error(`invalid skill name: too long (${name.length} chars)`); + } + if (name.includes("/") || name.includes("\\") || name.includes("..")) { + throw new Error(`invalid skill name: contains path separator or '..': ${name}`); + } + if (!/^[a-z0-9]+(?:-[a-z0-9]+)*$/.test(name)) { + throw new Error(`invalid skill name: must be kebab-case (lowercase a-z, 0-9, hyphen): ${name}`); + } +} +function skillDir(skillsRoot, name) { + return join2(skillsRoot, name); +} +function skillPath(skillsRoot, name) { + return join2(skillDir(skillsRoot, name), "SKILL.md"); +} +function renderFrontmatter(fm) { + const lines = ["---"]; + lines.push(`name: ${fm.name}`); + lines.push(`description: ${JSON.stringify(fm.description)}`); + if (fm.trigger) + lines.push(`trigger: ${JSON.stringify(fm.trigger)}`); + lines.push(`source_sessions:`); + for (const s of fm.source_sessions) + lines.push(` - ${s}`); + lines.push(`version: ${fm.version}`); + lines.push(`created_by_agent: ${fm.created_by_agent}`); + lines.push(`created_at: ${fm.created_at}`); + lines.push(`updated_at: ${fm.updated_at}`); + lines.push("---"); + return lines.join("\n"); +} +function parseFrontmatter(text) { + if (!text.startsWith("---\n") && !text.startsWith("---\r\n")) + return null; + const end = text.indexOf("\n---", 4); + if (end < 0) + return null; + const head = text.slice(4, end).trim(); + const body = text.slice(end + 4).replace(/^\r?\n/, ""); + const fm = { source_sessions: [] }; + let mode = "kv"; + for (const raw of head.split(/\r?\n/)) { + if (mode === "sources") { + const m2 = raw.match(/^\s+-\s+(.+)$/); + if (m2) { + fm.source_sessions.push(m2[1].trim()); + continue; + } + mode = "kv"; + } + if (raw.startsWith("source_sessions:")) { + mode = "sources"; + continue; + } + const m = raw.match(/^([a-zA-Z_]+):\s*(.*)$/); + if (!m) + continue; + const [, k, v] = m; + let val = v; + if (v.startsWith('"') && v.endsWith('"')) { + try { + val = JSON.parse(v); + } catch { + } + } else if (k === "version") { + const n = parseInt(v, 10); + if (Number.isFinite(n)) + val = n; + } + fm[k] = val; + } + return { fm, body }; +} +function writeNewSkill(args) { + assertValidSkillName(args.name); + const dir = skillDir(args.skillsRoot, args.name); + const path = skillPath(args.skillsRoot, args.name); + if (existsSync(path)) { + throw new Error(`skill already exists at ${path}; use mergeSkill`); + } + mkdirSync(dir, { recursive: true }); + const now = (/* @__PURE__ */ new Date()).toISOString(); + const fm = { + name: args.name, + description: args.description, + trigger: args.trigger, + source_sessions: args.sourceSessions, + version: 1, + created_by_agent: args.agent, + created_at: now, + updated_at: now + }; + const text = `${renderFrontmatter(fm)} + +${args.body.trim()} +`; + writeFileSync(path, text); + return { path, action: "created", version: 1 }; +} +function mergeSkill(args) { + assertValidSkillName(args.name); + const path = skillPath(args.skillsRoot, args.name); + if (!existsSync(path)) { + throw new Error(`skill ${args.name} does not exist at ${path}; use writeNewSkill`); + } + const existing = readFileSync(path, "utf-8"); + const parsed = parseFrontmatter(existing); + const prevVersion = parsed?.fm.version ?? 1; + const prevSources = parsed?.fm.source_sessions ?? []; + const merged = Array.from(/* @__PURE__ */ new Set([...prevSources, ...args.newSourceSessions])); + const now = (/* @__PURE__ */ new Date()).toISOString(); + const fm = { + name: args.name, + description: args.description ?? parsed?.fm.description ?? "", + trigger: parsed?.fm.trigger, + source_sessions: merged, + version: prevVersion + 1, + created_by_agent: parsed?.fm.created_by_agent ?? args.agent, + created_at: parsed?.fm.created_at ?? now, + updated_at: now + }; + const text = `${renderFrontmatter(fm)} + +${args.body.trim()} +`; + writeFileSync(path, text); + return { path, action: "merged", version: fm.version }; +} +function listSkills(skillsRoot) { + if (!existsSync(skillsRoot)) + return []; + const out = []; + for (const name of readdirSync(skillsRoot)) { + const skillFile = join2(skillsRoot, name, "SKILL.md"); + if (existsSync(skillFile) && statSync(skillFile).isFile()) { + out.push({ name, body: readFileSync(skillFile, "utf-8") }); + } + } + return out; +} +function resolveSkillsRoot(install, cwd) { + if (install === "global") { + return join2(homedir2(), ".claude", "skills"); + } + return join2(cwd, ".claude", "skills"); +} + +// dist/src/skillify/skills-table.js +import { randomUUID } from "node:crypto"; + +// dist/src/utils/sql.js +function sqlIdent(name) { + if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) { + throw new Error(`Invalid SQL identifier: ${JSON.stringify(name)}`); + } + return name; +} + +// dist/src/skillify/skills-table.js +function createSkillsTableSql(tableName) { + const safe = sqlIdent(tableName); + return `CREATE TABLE IF NOT EXISTS "${safe}" (id TEXT NOT NULL DEFAULT '', name TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '', project_key TEXT NOT NULL DEFAULT '', local_path TEXT NOT NULL DEFAULT '', install TEXT NOT NULL DEFAULT 'project', source_sessions TEXT NOT NULL DEFAULT '[]', source_agent TEXT NOT NULL DEFAULT '', scope TEXT NOT NULL DEFAULT 'me', author TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', trigger_text TEXT NOT NULL DEFAULT '', body TEXT NOT NULL DEFAULT '', version BIGINT NOT NULL DEFAULT 1, created_at TEXT NOT NULL DEFAULT '', updated_at TEXT NOT NULL DEFAULT '') USING deeplake`; +} +function esc(s) { + return s.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, ""); +} +function isMissingTableError(message) { + if (!message) + return false; + return /Table does not exist|relation .* does not exist|no such table/i.test(message); +} +async function insertSkillRow(args) { + const id = args.id ?? randomUUID(); + const sourceSessionsJson = JSON.stringify(args.sourceSessions); + const sql = `INSERT INTO "${sqlIdent(args.tableName)}" (id, name, project, project_key, local_path, install, source_sessions, source_agent, scope, author, description, trigger_text, body, version, created_at, updated_at) VALUES ('${esc(id)}', '${esc(args.name)}', '${esc(args.project)}', '${esc(args.projectKey)}', '${esc(args.localPath)}', '${esc(args.install)}', '${esc(sourceSessionsJson)}', '${esc(args.sourceAgent)}', '${esc(args.scope)}', '${esc(args.author)}', '${esc(args.description)}', '${esc(args.trigger ?? "")}', '${esc(args.body)}', ${args.version}, '${esc(args.createdAt)}', '${esc(args.updatedAt)}')`; + try { + await args.query(sql); + } catch (e) { + if (isMissingTableError(e?.message)) { + await args.query(createSkillsTableSql(args.tableName)); + await args.query(sql); + return; + } + throw e; + } +} + +// dist/src/skillify/gate-parser.js +function extractJsonBlock(s) { + const trimmed = s.trim(); + if (!trimmed) + return null; + const fenced = trimmed.match(/```(?:json)?\s*\n([\s\S]*?)\n```/); + if (fenced) + return fenced[1].trim(); + const start = trimmed.indexOf("{"); + if (start < 0) + return null; + let depth = 0; + for (let i = start; i < trimmed.length; i++) { + const c = trimmed[i]; + if (c === "{") + depth++; + else if (c === "}") { + depth--; + if (depth === 0) + return trimmed.slice(start, i + 1); + } + } + return null; +} +function parseVerdict(raw) { + const block = extractJsonBlock(raw); + if (!block) + return null; + try { + const v = JSON.parse(block); + if (v.verdict !== "KEEP" && v.verdict !== "SKIP" && v.verdict !== "MERGE") + return null; + return v; + } catch { + return null; + } +} + +// dist/src/skillify/gate-runner.js +import { execFileSync } from "node:child_process"; +import { existsSync as existsSync2 } from "node:fs"; +import { homedir as homedir3 } from "node:os"; +import { join as join3 } from "node:path"; +function findAgentBin(agent) { + const which = (name) => { + try { + const out = execFileSync("which", [name], { + encoding: "utf-8", + stdio: ["ignore", "pipe", "ignore"] + }); + return out.trim() || null; + } catch { + return null; + } + }; + switch (agent) { + case "claude_code": + return which("claude") ?? join3(homedir3(), ".claude", "local", "claude"); + case "codex": + return which("codex") ?? "/usr/local/bin/codex"; + case "cursor": + return which("cursor-agent") ?? "/usr/local/bin/cursor-agent"; + case "hermes": + return which("hermes") ?? join3(homedir3(), ".local", "bin", "hermes"); + case "pi": + return which("pi") ?? join3(homedir3(), ".local", "bin", "pi"); + } +} +function buildArgs(agent, prompt, opts) { + switch (agent) { + case "claude_code": + return [ + "-p", + prompt, + "--no-session-persistence", + "--model", + "haiku", + "--permission-mode", + "bypassPermissions" + ]; + case "codex": + return [ + "exec", + "--dangerously-bypass-approvals-and-sandbox", + prompt + ]; + case "cursor": + return [ + "--print", + "--model", + opts.cursorModel ?? process.env.HIVEMIND_CURSOR_MODEL ?? "auto", + "--force", + "--output-format", + "text", + prompt + ]; + case "hermes": + return [ + "-z", + prompt, + "--provider", + opts.hermesProvider ?? process.env.HIVEMIND_HERMES_PROVIDER ?? "openrouter", + "-m", + opts.hermesModel ?? process.env.HIVEMIND_HERMES_MODEL ?? "anthropic/claude-haiku-4-5", + "--yolo", + "--ignore-user-config" + ]; + case "pi": + return [ + "--print", + "--provider", + opts.piProvider ?? process.env.HIVEMIND_PI_PROVIDER ?? "google", + "--model", + opts.piModel ?? process.env.HIVEMIND_PI_MODEL ?? "gemini-2.5-flash", + prompt + ]; + } +} +function runGate(opts) { + const bin = opts.bin ?? findAgentBin(opts.agent); + if (!existsSync2(bin)) { + return { + stdout: "", + stderr: "", + errored: true, + errorMessage: `agent binary not found at ${bin} (agent=${opts.agent})` + }; + } + const args = buildArgs(opts.agent, opts.prompt, opts); + try { + const result = execFileSync(bin, args, { + stdio: ["ignore", "pipe", "pipe"], + timeout: opts.timeoutMs ?? 12e4, + maxBuffer: 8 * 1024 * 1024, + env: { ...process.env, HIVEMIND_WIKI_WORKER: "1", HIVEMIND_CAPTURE: "false" } + }); + return { stdout: result.toString("utf-8"), stderr: "", errored: false }; + } catch (e) { + return { + stdout: e.stdout?.toString("utf-8") ?? "", + stderr: e.stderr?.toString("utf-8") ?? "", + errored: true, + errorMessage: `${opts.agent} CLI failed: ${e.status ?? e.code ?? e.message}` + }; + } +} + +// dist/src/skillify/state.js +import { readFileSync as readFileSync2, writeFileSync as writeFileSync2, writeSync, mkdirSync as mkdirSync2, renameSync as renameSync2, existsSync as existsSync4, unlinkSync, openSync, closeSync } from "node:fs"; +import { execSync } from "node:child_process"; +import { homedir as homedir5 } from "node:os"; +import { createHash } from "node:crypto"; +import { join as join5, basename } from "node:path"; + +// dist/src/skillify/legacy-migration.js +import { existsSync as existsSync3, renameSync } from "node:fs"; +import { homedir as homedir4 } from "node:os"; +import { join as join4 } from "node:path"; +var dlog = (msg) => log("skillify-migrate", msg); +var attempted = false; +function migrateLegacyStateDir() { + if (attempted) + return; + attempted = true; + const root = join4(homedir4(), ".deeplake", "state"); + const legacy = join4(root, "skilify"); + const current = join4(root, "skillify"); + if (!existsSync3(legacy)) + return; + if (existsSync3(current)) + return; + try { + renameSync(legacy, current); + dlog(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = err.code; + if (code === "EXDEV" || code === "EPERM") { + dlog(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} + +// dist/src/skillify/state.js +var dlog2 = (msg) => log("skillify-state", msg); +var STATE_DIR = join5(homedir5(), ".deeplake", "state", "skillify"); +var YIELD_BUF = new Int32Array(new SharedArrayBuffer(4)); +var TRIGGER_THRESHOLD = (() => { + const n = Number(process.env.HIVEMIND_SKILLIFY_EVERY_N_TURNS ?? ""); + return Number.isInteger(n) && n > 0 ? n : 20; +})(); +function statePath(projectKey) { + return join5(STATE_DIR, `${projectKey}.json`); +} +function lockPath(projectKey) { + return join5(STATE_DIR, `${projectKey}.lock`); +} +function readState(projectKey) { + migrateLegacyStateDir(); + const p = statePath(projectKey); + if (!existsSync4(p)) + return null; + try { + return JSON.parse(readFileSync2(p, "utf-8")); + } catch { + return null; + } +} +function writeState(projectKey, state) { + migrateLegacyStateDir(); + mkdirSync2(STATE_DIR, { recursive: true }); + const p = statePath(projectKey); + const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; + writeFileSync2(tmp, JSON.stringify(state, null, 2)); + renameSync2(tmp, p); +} +function withRmwLock(projectKey, fn) { + migrateLegacyStateDir(); + mkdirSync2(STATE_DIR, { recursive: true }); + const rmw = lockPath(projectKey) + ".rmw"; + const deadline = Date.now() + 2e3; + let fd = null; + while (fd === null) { + try { + fd = openSync(rmw, "wx"); + } catch (e) { + if (e.code !== "EEXIST") + throw e; + if (Date.now() > deadline) { + dlog2(`rmw lock deadline exceeded for ${projectKey}, reclaiming stale lock`); + try { + unlinkSync(rmw); + } catch (unlinkErr) { + dlog2(`stale rmw lock unlink failed for ${projectKey}: ${unlinkErr.message}`); + } + continue; + } + Atomics.wait(YIELD_BUF, 0, 0, 10); + } + } + try { + return fn(); + } finally { + closeSync(fd); + try { + unlinkSync(rmw); + } catch (unlinkErr) { + dlog2(`rmw lock cleanup failed for ${projectKey}: ${unlinkErr.message}`); + } + } +} +function recordSkill(projectKey, skillName, newestSessionUuid, newestSessionDate) { + withRmwLock(projectKey, () => { + const s = readState(projectKey); + if (!s) + return; + const skills = s.skillsGenerated.includes(skillName) ? s.skillsGenerated : [...s.skillsGenerated, skillName]; + writeState(projectKey, { + ...s, + skillsGenerated: skills, + lastUuid: newestSessionUuid, + lastDate: newestSessionDate, + updatedAt: Date.now() + }); + }); +} +function advanceWatermark(projectKey, newestSessionUuid, newestSessionDate) { + withRmwLock(projectKey, () => { + const s = readState(projectKey); + if (!s) + return; + writeState(projectKey, { + ...s, + lastUuid: newestSessionUuid, + lastDate: newestSessionDate, + updatedAt: Date.now() + }); + }); +} +function releaseWorkerLock(projectKey) { + const p = lockPath(projectKey); + try { + unlinkSync(p); + } catch { + } +} + +// dist/src/skillify/skillify-worker.js +var cfg = JSON.parse(readFileSync3(process.argv[2], "utf-8")); +var tmpDir = cfg.tmpDir; +var verdictPath = join6(tmpDir, "verdict.json"); +var promptPath = join6(tmpDir, "prompt.txt"); +var SESSIONS_TO_MINE = 10; +var PAIR_CHAR_CAP = 2e3; +var TOTAL_PAIRS_CHAR_CAP = 4e4; +var EXISTING_SKILLS_CHAR_CAP = 3e4; +function wlog(msg) { + try { + appendFileSync2(cfg.skillifyLog, `[${utcTimestamp()}] skillify-worker(${cfg.projectKey}): ${msg} +`); + } catch { + } +} +function esc2(s) { + return s.replace(/\\/g, "\\\\").replace(/'/g, "''").replace(/[\x01-\x08\x0b\x0c\x0e-\x1f\x7f]/g, ""); +} +var QUERY_TIMEOUT_MS = 3e4; +async function query(sql, retries = 4) { + for (let attempt = 0; attempt <= retries; attempt++) { + let r; + try { + r = await fetch(`${cfg.apiUrl}/workspaces/${cfg.workspaceId}/tables/query`, { + method: "POST", + headers: { + Authorization: `Bearer ${cfg.token}`, + "Content-Type": "application/json", + "X-Activeloop-Org-Id": cfg.orgId, + ...deeplakeClientHeader() + }, + signal: AbortSignal.timeout(QUERY_TIMEOUT_MS), + body: JSON.stringify({ query: sql }) + }); + } catch (e) { + if (attempt < retries) { + const base = Math.min(3e4, 2e3 * Math.pow(2, attempt)); + const delay = base + Math.floor(Math.random() * 1e3); + wlog(`fetch failed (${e?.name ?? e?.code ?? e?.message}), retrying in ${delay}ms (attempt ${attempt + 1}/${retries})`); + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + throw e; + } + if (r.ok) { + const j = await r.json(); + if (!j.columns || !j.rows) + return []; + return j.rows.map((row) => Object.fromEntries(j.columns.map((col, i) => [col, row[i]]))); + } + const retryable = r.status === 401 || r.status === 403 || r.status === 429 || r.status === 500 || r.status === 502 || r.status === 503; + if (attempt < retries && retryable) { + const base = Math.min(3e4, 2e3 * Math.pow(2, attempt)); + const delay = base + Math.floor(Math.random() * 1e3); + wlog(`API ${r.status}, retrying in ${delay}ms (attempt ${attempt + 1}/${retries})`); + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + throw new Error(`API ${r.status}: ${(await r.text()).slice(0, 200)}`); + } + return []; +} +function authorClause() { + if (cfg.scope === "org") + return ""; + if (cfg.scope === "team" && cfg.team.length > 0) { + const list = cfg.team.map((n) => `'${esc2(n)}'`).join(", "); + return ` AND author IN (${list})`; + } + return ` AND author = '${esc2(cfg.userName)}'`; +} +async function listCandidateSessions(lastDate) { + const dateClause = lastDate ? ` AND creation_date > '${esc2(lastDate)}'` : ""; + const sql = `SELECT path, MAX(creation_date) AS last_msg FROM "${cfg.sessionsTable}" WHERE project = '${esc2(cfg.project)}'${authorClause()}${dateClause} GROUP BY path ORDER BY last_msg DESC LIMIT ${SESSIONS_TO_MINE * 2}`; + const rows = await query(sql); + return rows.map((r) => ({ path: String(r.path ?? ""), lastMsg: String(r.last_msg ?? "") })).filter((r) => r.path.length > 0); +} +function isCurrentSession(path) { + return cfg.currentSessionId ? path.includes(cfg.currentSessionId) : false; +} +async function fetchSessionRows(path) { + const rows = await query(`SELECT message, creation_date, agent FROM "${cfg.sessionsTable}" WHERE path = '${esc2(path)}' ORDER BY creation_date ASC`); + const sessionId = (path.split("/").pop() ?? "").replace(/\.[^.]+$/, ""); + return rows.map((r) => { + const m = r.message; + const parsed = typeof m === "string" ? safeJsonParse(m) : m ?? {}; + return { + type: typeof parsed.type === "string" ? parsed.type : void 0, + content: typeof parsed.content === "string" ? parsed.content : void 0, + creation_date: r.creation_date, + session_id: sessionId, + agent: r.agent + }; + }); +} +function safeJsonParse(s) { + try { + return JSON.parse(s); + } catch { + return {}; + } +} +function truncate(s, max) { + if (s.length <= max) + return s; + return s.slice(0, max) + ` +[\u2026truncated ${s.length - max} chars]`; +} +function renderPairsBlock(pairs) { + let total = 0; + const out = []; + for (const [i, p] of pairs.entries()) { + const prompt = truncate(p.prompt, PAIR_CHAR_CAP); + const answer = truncate(p.answer, PAIR_CHAR_CAP); + const block = `--- exchange ${i + 1} (session ${p.sessionId.slice(0, 8)}, agent ${p.agent ?? "?"}) --- +USER: +${prompt} + +ASSISTANT: +${answer} +`; + if (total + block.length > TOTAL_PAIRS_CHAR_CAP) { + out.push(`[\u2026${pairs.length - i} more exchanges omitted to stay under prompt budget]`); + break; + } + out.push(block); + total += block.length; + } + return out.join("\n"); +} +function renderExistingSkillsBlock() { + const skills = listSkills(resolveSkillsRoot(cfg.install, cfg.cwd)); + if (skills.length === 0) { + return { + names: [], + block: "(no existing skills in this project \u2014 MERGE is NOT a valid choice; pick KEEP or SKIP only)" + }; + } + let total = 0; + const out = []; + const names = []; + for (const s of skills) { + const block = `--- existing skill: ${s.name} --- +${s.body} +`; + if (total + block.length > EXISTING_SKILLS_CHAR_CAP) { + out.push(`[\u2026${skills.length - out.length} more existing skills omitted]`); + break; + } + out.push(block); + names.push(s.name); + total += block.length; + } + return { names, block: out.join("\n") }; +} +function buildPrompt(pairs) { + const existing = renderExistingSkillsBlock(); + const mergeTargetsClause = existing.names.length > 0 ? `MERGE is allowed only if your "name" is EXACTLY one of: [${existing.names.join(", ")}]. Any other name MUST use KEEP, not MERGE.` : `MERGE is FORBIDDEN \u2014 there are no project skills to merge into. Use KEEP or SKIP only.`; + return [ + `You are a skill curator for the "${cfg.project}" project. You decide whether the recent`, + `agent activity below contains a recurring, non-trivial pattern worth crystallizing as a`, + `reusable skill, and whether to create a new skill or merge into an existing one.`, + ``, + `RULES:`, + `- KEEP only if the pattern recurs across at least 3 of the exchanges, is non-obvious to a`, + ` competent engineer, and is not already covered by an existing skill below.`, + `- SKIP if the activity is one-off, generic engineering work, or already covered.`, + `- MERGE if the pattern is a meaningful extension of an existing PROJECT skill \u2014 produce a`, + ` merged body that incorporates the new evidence without exceeding ~3000 characters or`, + ` covering unrelated domains.`, + `- ${mergeTargetsClause}`, + `- Do NOT reference skills outside this project (e.g. ones from ~/.claude/skills/). Only`, + ` the project skills listed below count for MERGE.`, + `- Skill bodies should follow the existing style: short sections (When to use, Workflow,`, + ` Anti-patterns), concrete commands and file paths drawn from the exchanges, no marketing.`, + ``, + `=== EXISTING PROJECT SKILLS ===`, + existing.block, + ``, + `=== RECENT EXCHANGES (prompt + answer pairs, tool calls already stripped) ===`, + renderPairsBlock(pairs), + ``, + `=== YOUR TASK ===`, + `Output your decision as a single JSON object. The worker will parse it.`, + `You may either:`, + ` (a) Write the JSON to this exact path using the Write tool: ${verdictPath}`, + ` (b) Print the JSON object to stdout (your final message), nothing else.`, + `Either path works; pick whichever you prefer. Do not do both.`, + ``, + `The JSON MUST have this shape:`, + `{`, + ` "verdict": "KEEP" | "SKIP" | "MERGE",`, + ` "name": "",`, + ` "description": "",`, + ` "trigger": "",`, + ` "body": "",`, + ` "reason": ""`, + `}`, + ``, + `For SKIP, only "verdict" and "reason" are required.`, + `If you print to stdout, do not include any prose before or after the JSON.`, + `Do not write any other files.` + ].join("\n"); +} +function readVerdict(stdout) { + if (existsSync5(verdictPath)) { + try { + const text = readFileSync3(verdictPath, "utf-8"); + const v2 = parseVerdict(text); + if (v2) + return { verdict: v2, source: "file" }; + return { verdict: null, source: `file-unparseable (${text.length} chars)` }; + } catch (e) { + return { verdict: null, source: `file-read-error: ${e.message}` }; + } + } + const v = parseVerdict(stdout); + if (v) + return { verdict: v, source: "stdout" }; + return { verdict: null, source: `no-file-no-stdout-json (stdout=${stdout.length} chars)` }; +} +function cleanup(keep) { + if (keep) { + wlog(`keeping tmpDir for inspection: ${tmpDir}`); + return; + } + try { + rmSync(tmpDir, { recursive: true, force: true }); + } catch (e) { + wlog(`cleanup failed: ${e.message}`); + } +} +var keepTmpForInspection = false; +async function main() { + try { + const state = readState(cfg.projectKey); + const lastDate = state?.lastDate ?? null; + wlog(`fetching candidate sessions (scope=${cfg.scope}, lastDate=${lastDate ?? "none"})`); + const candidates = await listCandidateSessions(lastDate); + const usable = candidates.filter((c) => !isCurrentSession(c.path)).slice(0, SESSIONS_TO_MINE); + if (usable.length === 0) { + wlog("no new sessions to mine \u2014 done"); + return; + } + wlog(`mining ${usable.length} sessions`); + const allPairs = []; + for (const c of usable) { + const rows = await fetchSessionRows(c.path); + const pairs = extractPairs(rows); + allPairs.push(...pairs); + } + if (allPairs.length === 0) { + wlog("no prompt/answer pairs after extraction \u2014 advancing watermark and exiting"); + const oldest2 = usable[usable.length - 1]; + advanceWatermark(cfg.projectKey, oldest2.path, oldest2.lastMsg); + return; + } + wlog(`extracted ${allPairs.length} pairs across ${usable.length} sessions`); + const prompt = buildPrompt(allPairs); + writeFileSync3(promptPath, prompt); + const gateAgent = cfg.gateAgent ?? cfg.agent; + wlog(`running gate (agent=${cfg.agent}, gateAgent=${gateAgent}, bin=${cfg.gateBin}, prompt=${prompt.length} chars)`); + const gate = runGate({ + agent: gateAgent, + prompt, + bin: cfg.gateBin, + cursorModel: cfg.cursorModel, + hermesProvider: cfg.hermesProvider, + hermesModel: cfg.hermesModel, + piProvider: cfg.piProvider, + piModel: cfg.piModel, + timeoutMs: 12e4 + }); + try { + writeFileSync3(join6(tmpDir, "gate-stdout.txt"), gate.stdout); + if (gate.stderr) + writeFileSync3(join6(tmpDir, "gate-stderr.txt"), gate.stderr); + } catch { + } + if (gate.errored) { + wlog(`gate failed: ${gate.errorMessage} (stdout=${gate.stdout.length}, stderr=${gate.stderr.length})`); + return; + } + wlog(`gate exited (code 0, stdout=${gate.stdout.length} chars)`); + const { verdict, source } = readVerdict(gate.stdout); + if (!verdict) { + wlog(`no parseable verdict (${source}) \u2014 treating as SKIP, advancing watermark`); + keepTmpForInspection = true; + const oldest2 = usable[usable.length - 1]; + advanceWatermark(cfg.projectKey, oldest2.path, oldest2.lastMsg); + return; + } + wlog(`verdict source: ${source}`); + wlog(`verdict=${verdict.verdict} name=${verdict.name ?? "-"} reason=${verdict.reason ?? "-"}`); + const oldest = usable[usable.length - 1]; + const watermarkUuid = (oldest.path.split("/").pop() ?? "").replace(/\.[^.]+$/, ""); + const watermarkDate = oldest.lastMsg; + const sourceSessions = usable.map((c) => (c.path.split("/").pop() ?? "").replace(/\.[^.]+$/, "")); + async function recordToDeeplake(result, verdict2) { + try { + await insertSkillRow({ + query, + tableName: cfg.skillsTable, + name: verdict2.name, + project: cfg.project, + projectKey: cfg.projectKey, + localPath: result.path, + install: cfg.install, + sourceSessions, + sourceAgent: cfg.agent, + scope: cfg.scope, + author: cfg.userName, + description: verdict2.description ?? "", + trigger: verdict2.trigger, + body: verdict2.body, + version: result.version, + createdAt: (/* @__PURE__ */ new Date()).toISOString(), + updatedAt: (/* @__PURE__ */ new Date()).toISOString() + }); + wlog(`recorded to skills table: name=${verdict2.name} v${result.version}`); + } catch (e) { + wlog(`skills table insert failed (non-fatal): ${e.message}`); + } + } + if (verdict.verdict === "KEEP" && verdict.name && verdict.body) { + try { + const result = writeNewSkill({ + skillsRoot: resolveSkillsRoot(cfg.install, cfg.cwd), + name: verdict.name, + description: verdict.description ?? "", + trigger: verdict.trigger, + body: verdict.body, + sourceSessions, + agent: cfg.agent + }); + wlog(`wrote new skill: ${result.path}`); + recordSkill(cfg.projectKey, verdict.name, watermarkUuid, watermarkDate); + await recordToDeeplake(result, verdict); + } catch (e) { + wlog(`writeNewSkill failed: ${e.message}`); + advanceWatermark(cfg.projectKey, watermarkUuid, watermarkDate); + } + } else if (verdict.verdict === "MERGE" && verdict.name && verdict.body) { + try { + const result = mergeSkill({ + skillsRoot: resolveSkillsRoot(cfg.install, cfg.cwd), + name: verdict.name, + description: verdict.description, + body: verdict.body, + newSourceSessions: sourceSessions, + agent: cfg.agent + }); + wlog(`merged into skill: ${result.path} (v${result.version})`); + recordSkill(cfg.projectKey, verdict.name, watermarkUuid, watermarkDate); + await recordToDeeplake(result, verdict); + } catch (e) { + if (/does not exist/i.test(e.message ?? "")) { + wlog(`mergeSkill target missing \u2014 falling back to writeNewSkill: ${verdict.name}`); + try { + const result = writeNewSkill({ + skillsRoot: resolveSkillsRoot(cfg.install, cfg.cwd), + name: verdict.name, + description: verdict.description ?? "", + trigger: verdict.trigger, + body: verdict.body, + sourceSessions, + agent: cfg.agent + }); + wlog(`wrote new skill (merge fallback): ${result.path}`); + recordSkill(cfg.projectKey, verdict.name, watermarkUuid, watermarkDate); + await recordToDeeplake(result, verdict); + } catch (e2) { + wlog(`writeNewSkill fallback also failed: ${e2.message}`); + advanceWatermark(cfg.projectKey, watermarkUuid, watermarkDate); + } + } else { + wlog(`mergeSkill failed: ${e.message}`); + advanceWatermark(cfg.projectKey, watermarkUuid, watermarkDate); + } + } + } else { + advanceWatermark(cfg.projectKey, watermarkUuid, watermarkDate); + } + } catch (e) { + wlog(`fatal: ${e.message}`); + } finally { + cleanup(keepTmpForInspection); + try { + releaseWorkerLock(cfg.projectKey); + } catch (e) { + wlog(`releaseWorkerLock failed: ${e.message}`); + } + } +} +main(); diff --git a/pi/extension-source/hivemind.ts b/pi/extension-source/hivemind.ts index 90ad14f0..83091418 100644 --- a/pi/extension-source/hivemind.ts +++ b/pi/extension-source/hivemind.ts @@ -205,11 +205,11 @@ function tryEmbedOverSocket(text: string, kind: "document" | "query"): Promise.lock` via - // src/skilify/state.ts:tryAcquireWorkerLock and releases it on exit (with + // src/skillify/state.ts:tryAcquireWorkerLock and releases it on exit (with // a 10-min stale-lock fallback). A spawn-side lock here would create a // SECOND lockfile (`.worker.lock`) that nobody releases, // permanently blocking subsequent spawns from the same Pi runtime @@ -483,12 +483,12 @@ function spawnPiSkilifyWorker(creds: Creds, sessionId: string, cwd: string): voi // back-to-back spawns where a worker is in flight cost only one extra // node cold-start (~50ms) before the worker self-skips on the lock. - const tmpDir = join(tmpdir(), `deeplake-skilify-${projectKey}-${Date.now()}`); + const tmpDir = join(tmpdir(), `deeplake-skillify-${projectKey}-${Date.now()}`); try { mkdirSync(tmpDir, { recursive: true, mode: 0o700 }); } - catch (e: any) { logHm(`spawnPiSkilifyWorker: mkdir failed: ${e?.message ?? e}`); return; } + catch (e: any) { logHm(`spawnPiSkillifyWorker: mkdir failed: ${e?.message ?? e}`); return; } const configPath = join(tmpDir, "config.json"); - // Same shape the spawn-skilify-worker.ts module writes for the other agents. + // Same shape the spawn-skillify-worker.ts module writes for the other agents. // Defaults match scope-config.ts: scope=me, install=project, no team list. // Pi-specific: no per-agent gate binary (`gateBin: null`) — the worker's // gate-runner falls back to its agent dispatch which for `agent: "pi"` @@ -516,21 +516,21 @@ function spawnPiSkilifyWorker(creds: Creds, sessionId: string, cwd: string): voi // pi-specific gate args — match wikiWorker config defaults (google + gemini-2.5-flash) piProvider: process.env.HIVEMIND_PI_PROVIDER ?? "google", piModel: process.env.HIVEMIND_PI_MODEL ?? "gemini-2.5-flash", - skilifyLog: join(homedir(), ".deeplake", "hivemind-pi-skilify.log"), + skillifyLog: join(homedir(), ".deeplake", "hivemind-pi-skillify.log"), currentSessionId: sessionId, }; try { writeFileSync(configPath, JSON.stringify(config), { mode: 0o600 }); } - catch (e: any) { logHm(`spawnPiSkilifyWorker: config write failed: ${e?.message ?? e}`); return; } + catch (e: any) { logHm(`spawnPiSkillifyWorker: config write failed: ${e?.message ?? e}`); return; } - logHm(`spawnPiSkilifyWorker: spawning ${PI_SKILIFY_WORKER_PATH} project=${project} key=${projectKey} session=${sessionId}`); + logHm(`spawnPiSkillifyWorker: spawning ${PI_SKILLIFY_WORKER_PATH} project=${project} key=${projectKey} session=${sessionId}`); try { - spawn(process.execPath, [PI_SKILIFY_WORKER_PATH, configPath], { + spawn(process.execPath, [PI_SKILLIFY_WORKER_PATH, configPath], { detached: true, stdio: "ignore", - env: { ...process.env, HIVEMIND_SKILIFY_WORKER: "1", HIVEMIND_CAPTURE: "false" }, + env: { ...process.env, HIVEMIND_SKILLIFY_WORKER: "1", HIVEMIND_CAPTURE: "false" }, }).unref(); } catch (e: any) { - logHm(`spawnPiSkilifyWorker: spawn failed: ${e?.message ?? e}`); + logHm(`spawnPiSkillifyWorker: spawn failed: ${e?.message ?? e}`); } } @@ -696,23 +696,23 @@ Organization management — each argument is SEPARATE (do NOT quote subcommands - hivemind members — list members - hivemind remove — remove member -SKILLS (skilify) — mine + share reusable skills across the org. Run these in a terminal (or via shell if available): -- hivemind skilify — show scope/team/install + per-project state -- hivemind skilify pull — sync project skills from the org table -- hivemind skilify pull --user — only that author's skills -- hivemind skilify pull --users a,b,c — multiple authors (CSV) -- hivemind skilify pull --all-users — explicit "no author filter" -- hivemind skilify pull --to project|global — install location -- hivemind skilify pull --dry-run — preview only -- hivemind skilify pull --force — overwrite local (creates .bak) -- hivemind skilify pull — pull only that skill (combines with --user) -- hivemind skilify unpull — remove every skill previously installed by pull -- hivemind skilify unpull --user — remove only that author's pulls -- hivemind skilify unpull --not-mine — remove all pulls except your own -- hivemind skilify unpull --dry-run — preview without touching disk -- hivemind skilify scope — sharing scope for new skills -- hivemind skilify install — default install location -- hivemind skilify team add|remove|list — manage team list`; +SKILLS (skillify) — mine + share reusable skills across the org. Run these in a terminal (or via shell if available): +- hivemind skillify — show scope/team/install + per-project state +- hivemind skillify pull — sync project skills from the org table +- hivemind skillify pull --user — only that author's skills +- hivemind skillify pull --users a,b,c — multiple authors (CSV) +- hivemind skillify pull --all-users — explicit "no author filter" +- hivemind skillify pull --to project|global — install location +- hivemind skillify pull --dry-run — preview only +- hivemind skillify pull --force — overwrite local (creates .bak) +- hivemind skillify pull — pull only that skill (combines with --user) +- hivemind skillify unpull — remove every skill previously installed by pull +- hivemind skillify unpull --user — remove only that author's pulls +- hivemind skillify unpull --not-mine — remove all pulls except your own +- hivemind skillify unpull --dry-run — preview without touching disk +- hivemind skillify scope — sharing scope for new skills +- hivemind skillify install — default install location +- hivemind skillify team add|remove|list — manage team list`; export default function hivemindExtension(pi: ExtensionAPI): void { const captureEnabled = process.env.HIVEMIND_CAPTURE !== "false"; @@ -1007,13 +1007,13 @@ export default function hivemindExtension(pi: ExtensionAPI): void { // skips if a periodic worker is mid-flight. Non-fatal either way. spawnWikiWorker(creds, sessionId, cwd, "final"); - // Also kick off the skilify worker so this session's prompt+answer + // Also kick off the skillify worker so this session's prompt+answer // pairs become candidates for reusable skills. Lock keyed on // projectKey, not sessionId — multiple sessions in the same project // shouldn't race the gate. Non-fatal: failure here only loses the // mining for this one session, never breaks the wiki summary above. - try { spawnPiSkilifyWorker(creds, sessionId, cwd); } - catch (e: any) { logHm(`session_shutdown: skilify spawn threw: ${e?.message ?? e}`); } + try { spawnPiSkillifyWorker(creds, sessionId, cwd); } + catch (e: any) { logHm(`session_shutdown: skillify spawn threw: ${e?.message ?? e}`); } }); // Module-load breadcrumb so we know the extension's default export ran at all. diff --git a/src/cli/index.ts b/src/cli/index.ts index b6d18c0a..1a400d4e 100644 --- a/src/cli/index.ts +++ b/src/cli/index.ts @@ -7,7 +7,7 @@ import { installPi, uninstallPi } from "./install-pi.js"; import { enableEmbeddings, disableEmbeddings, statusEmbeddings } from "./embeddings.js"; import { ensureLoggedIn, isLoggedIn, maybeShowOrgChoice } from "./auth.js"; import { runAuthCommand } from "../commands/auth-login.js"; -import { runSkilifyCommand } from "../commands/skilify.js"; +import { runSkillifyCommand } from "../commands/skillify.js"; import { detectPlatforms, allPlatformIds, log, warn, type PlatformId } from "./util.js"; import { getVersion } from "./version.js"; import { runUpdate } from "./update.js"; @@ -64,8 +64,8 @@ Semantic search (embeddings): to run "embeddings install" automatically after installing the agent(s). Skill management (mine + share reusable Claude skills across the org): - hivemind skilify Show scope, team, install, and per-project state. - hivemind skilify pull [skill-name] Sync skills from the org table to local FS. + hivemind skillify Show scope, team, install, and per-project state. + hivemind skillify pull [skill-name] Sync skills from the org table to local FS. Options: --user , --users a,b,c, --all-users, --to , --dry-run, --force. @@ -75,17 +75,17 @@ Skill management (mine + share reusable Claude skills across the org): idempotent (skipped when local is at-or-newer than remote). Disable via HIVEMIND_AUTOPULL_DISABLED=1. - hivemind skilify unpull Remove skills previously installed by pull. + hivemind skillify unpull Remove skills previously installed by pull. Options: --user, --users, --not-mine, --to , --dry-run, --all (also locally-mined), --legacy-cleanup (pre-suffix-author dirs). - hivemind skilify scope Set the sharing scope for newly mined skills. - hivemind skilify install Set where new skills are written. - hivemind skilify promote Move a project skill to the global location. - hivemind skilify team add Add a username to the team list. - hivemind skilify team remove Remove a username from the team list. - hivemind skilify team list List current team members. + hivemind skillify scope Set the sharing scope for newly mined skills. + hivemind skillify install Set where new skills are written. + hivemind skillify promote Move a project skill to the global location. + hivemind skillify team add Add a username to the team list. + hivemind skillify team remove Remove a username from the team list. + hivemind skillify team list List current team members. Account / org / workspace: hivemind whoami Show current user, org, workspace. @@ -228,8 +228,8 @@ async function main(): Promise { process.exit(code); } - if (cmd === "skilify") { - runSkilifyCommand(args.slice(1)); + if (cmd === "skillify") { + runSkillifyCommand(args.slice(1)); return; } diff --git a/src/cli/install-pi.ts b/src/cli/install-pi.ts index f9e23d43..1ca4fba3 100644 --- a/src/cli/install-pi.ts +++ b/src/cli/install-pi.ts @@ -39,10 +39,10 @@ const VERSION_DIR = join(PI_AGENT_DIR, ".hivemind"); // install it as a separate file alongside. const WIKI_WORKER_DIR = join(PI_AGENT_DIR, "hivemind"); const WIKI_WORKER_PATH = join(WIKI_WORKER_DIR, "wiki-worker.js"); -// Skilify worker bundle, spawned by pi extension on session_shutdown to mine +// Skillify worker bundle, spawned by pi extension on session_shutdown to mine // reusable Claude skills from the just-finished session. Sibling of // wiki-worker.js so a single ensureDir + cleanup covers both. -const SKILIFY_WORKER_PATH = join(WIKI_WORKER_DIR, "skilify-worker.js"); +const SKILLIFY_WORKER_PATH = join(WIKI_WORKER_DIR, "skillify-worker.js"); // Autopull worker bundle, spawned synchronously by pi extension on // session_start to fetch all-author skills from the org table. Same // shared autoPullSkills() codex/cursor/hermes call directly; pi can't @@ -131,13 +131,13 @@ export function installPi(): void { copyFileSync(srcWorker, WIKI_WORKER_PATH); } - // 4. Skilify-worker bundle (spawned by extension on session_shutdown to + // 4. Skillify-worker bundle (spawned by extension on session_shutdown to // mine reusable skills from the finished session). Same dir as // wiki-worker, same shared ensureDir. - const srcSkilifyWorker = join(pkgRoot(), "pi", "bundle", "skilify-worker.js"); - if (existsSync(srcSkilifyWorker)) { + const srcSkillifyWorker = join(pkgRoot(), "pi", "bundle", "skillify-worker.js"); + if (existsSync(srcSkillifyWorker)) { ensureDir(WIKI_WORKER_DIR); - copyFileSync(srcSkilifyWorker, SKILIFY_WORKER_PATH); + copyFileSync(srcSkillifyWorker, SKILLIFY_WORKER_PATH); } // 5. Autopull-worker bundle (spawned synchronously by extension on @@ -156,8 +156,8 @@ export function installPi(): void { if (existsSync(WIKI_WORKER_PATH)) { log(` pi wiki-worker installed -> ${WIKI_WORKER_PATH}`); } - if (existsSync(SKILIFY_WORKER_PATH)) { - log(` pi skilify-worker installed -> ${SKILIFY_WORKER_PATH}`); + if (existsSync(SKILLIFY_WORKER_PATH)) { + log(` pi skillify-worker installed -> ${SKILLIFY_WORKER_PATH}`); } if (existsSync(AUTOPULL_WORKER_PATH)) { log(` pi autopull-worker installed -> ${AUTOPULL_WORKER_PATH}`); diff --git a/src/commands/skilify.ts b/src/commands/skillify.ts similarity index 77% rename from src/commands/skilify.ts rename to src/commands/skillify.ts index 7c39411d..7374aa2e 100644 --- a/src/commands/skilify.ts +++ b/src/commands/skillify.ts @@ -1,18 +1,18 @@ #!/usr/bin/env node /** - * CLI surface for skilify scope, team, install, and pull management. + * CLI surface for skillify scope, team, install, and pull management. * * Usage: - * hivemind skilify — show current scope, team, status - * hivemind skilify scope — set the mining scope - * hivemind skilify install — set where new skills are written - * hivemind skilify promote — move a project skill to global - * hivemind skilify team add — add a username to the team list - * hivemind skilify team remove — remove a username from the team list - * hivemind skilify team list — list current team members - * hivemind skilify pull [skill-name] [opts] — fetch skills from Deeplake to local FS - * hivemind skilify status — show counter + per-project state + * hivemind skillify — show current scope, team, status + * hivemind skillify scope — set the mining scope + * hivemind skillify install — set where new skills are written + * hivemind skillify promote — move a project skill to global + * hivemind skillify team add — add a username to the team list + * hivemind skillify team remove — remove a username from the team list + * hivemind skillify team list — list current team members + * hivemind skillify pull [skill-name] [opts] — fetch skills from Deeplake to local FS + * hivemind skillify status — show counter + per-project state * * The team list is consumed by the worker when scope=team: SQL filter * becomes `author IN ()`. scope=me filters by current user only; @@ -22,18 +22,18 @@ import { readdirSync, existsSync, readFileSync, mkdirSync, renameSync, rmSync } from "node:fs"; import { homedir } from "node:os"; import { dirname, join } from "node:path"; -import { loadScopeConfig, saveScopeConfig, type Scope, type InstallLocation } from "../skilify/scope-config.js"; -import { runPull, type PullSummary } from "../skilify/pull.js"; -import { runUnpull } from "../skilify/unpull.js"; +import { loadScopeConfig, saveScopeConfig, type Scope, type InstallLocation } from "../skillify/scope-config.js"; +import { runPull, type PullSummary } from "../skillify/pull.js"; +import { runUnpull } from "../skillify/unpull.js"; import { loadConfig } from "../config.js"; import { DeeplakeApi } from "../deeplake-api.js"; // Compute lazily so tests that swap `process.env.HOME` actually affect the // path. A module-level `const STATE_DIR = join(homedir(), ...)` would // capture the developer's real home at import time and bypass HOME -// isolation, causing test runs to read & pollute ~/.deeplake/state/skilify. +// isolation, causing test runs to read & pollute ~/.deeplake/state/skillify. function stateDir(): string { - return join(homedir(), ".deeplake", "state", "skilify"); + return join(homedir(), ".deeplake", "state", "skillify"); } function showStatus(): void { @@ -47,13 +47,18 @@ function showStatus(): void { console.log(`state: (no projects tracked yet)`); return; } - // Filter out skilify's own bookkeeping files. `config.json` is the - // scope/team/install settings; `pulled.json` is the unpull manifest — - // neither represents a "tracked project" and counting them inflates the - // status output (and the `for` loop below would JSON.parse them with the - // wrong shape and silently swallow the error). + // Filter out skillify's own bookkeeping files. `config.json` is the + // scope/team/install settings; `pulled.json` is the unpull manifest; + // `autopull-last-run.json` is the (now-removed) throttle file that pre- + // rename installs may still contain. None of these represent a "tracked + // project" and counting them inflates the status output (and the `for` + // loop below would JSON.parse them with the wrong shape). const files = readdirSync(dir).filter( - f => f.endsWith(".json") && f !== "config.json" && f !== "pulled.json", + f => + f.endsWith(".json") && + f !== "config.json" && + f !== "pulled.json" && + f !== "autopull-last-run.json", ); if (files.length === 0) { console.log(`state: (no projects tracked yet)`); @@ -63,10 +68,23 @@ function showStatus(): void { for (const f of files) { try { const s = JSON.parse(readFileSync(join(dir, f), "utf-8")) as { - project: string; counter: number; lastDate: string | null; skillsGenerated: string[]; + project: string; + counter: number; + lastDate?: string | null; + updatedAt?: number; + skillsGenerated?: string[]; }; - const skills = s.skillsGenerated.length === 0 ? "none" : s.skillsGenerated.join(", "); - console.log(` - ${s.project} (counter=${s.counter}, last=${s.lastDate ?? "never"}, skills=${skills})`); + // Prefer `updatedAt` (always written on counter bump) over `lastDate` + // (only written on a KEEP/MERGE mining verdict). An active project + // with no successful mining yet would otherwise show "last=never". + const last = + typeof s.updatedAt === "number" + ? new Date(s.updatedAt).toISOString() + : s.lastDate ?? "never"; + const skills = Array.isArray(s.skillsGenerated) && s.skillsGenerated.length > 0 + ? s.skillsGenerated.join(", ") + : "none"; + console.log(` - ${s.project} (counter=${s.counter}, last=${last}, skills=${skills})`); } catch { /* skip malformed */ } } } @@ -80,7 +98,7 @@ function setScope(scope: string): void { saveScopeConfig({ ...cfg, scope: scope as Scope }); console.log(`Scope set to '${scope}'.`); if (scope === "team" && cfg.team.length === 0) { - console.log(`Note: team list is empty. Use 'hivemind skilify team add ' to populate it.`); + console.log(`Note: team list is empty. Use 'hivemind skillify team add ' to populate it.`); } } @@ -96,7 +114,7 @@ function setInstall(loc: string): void { } function promoteSkill(name: string, cwd: string): void { - if (!name) { console.error("Usage: hivemind skilify promote "); process.exit(1); } + if (!name) { console.error("Usage: hivemind skillify promote "); process.exit(1); } const projectPath = join(cwd, ".claude", "skills", name); const globalPath = join(homedir(), ".claude", "skills", name); if (!existsSync(join(projectPath, "SKILL.md"))) { @@ -113,7 +131,7 @@ function promoteSkill(name: string, cwd: string): void { } function teamAdd(name: string): void { - if (!name) { console.error("Usage: hivemind skilify team add "); process.exit(1); } + if (!name) { console.error("Usage: hivemind skillify team add "); process.exit(1); } const cfg = loadScopeConfig(); if (cfg.team.includes(name)) { console.log(`'${name}' is already in the team list.`); @@ -125,7 +143,7 @@ function teamAdd(name: string): void { } function teamRemove(name: string): void { - if (!name) { console.error("Usage: hivemind skilify team remove "); process.exit(1); } + if (!name) { console.error("Usage: hivemind skillify team remove "); process.exit(1); } const cfg = loadScopeConfig(); if (!cfg.team.includes(name)) { console.log(`'${name}' is not in the team list.`); @@ -147,14 +165,14 @@ function teamList(): void { function usage(): void { console.log("Usage:"); - console.log(" hivemind skilify show current scope, team, install, and per-project state"); - console.log(" hivemind skilify scope set the mining scope"); - console.log(" hivemind skilify install set where new skills are written"); - console.log(" hivemind skilify promote move a project skill to the global location"); - console.log(" hivemind skilify team add add a username to the team list"); - console.log(" hivemind skilify team remove remove a username from the team list"); - console.log(" hivemind skilify team list list current team members"); - console.log(" hivemind skilify pull [skill-name] [opts] fetch skills from Deeplake to local FS"); + console.log(" hivemind skillify show current scope, team, install, and per-project state"); + console.log(" hivemind skillify scope set the mining scope"); + console.log(" hivemind skillify install set where new skills are written"); + console.log(" hivemind skillify promote move a project skill to the global location"); + console.log(" hivemind skillify team add add a username to the team list"); + console.log(" hivemind skillify team remove remove a username from the team list"); + console.log(" hivemind skillify team list list current team members"); + console.log(" hivemind skillify pull [skill-name] [opts] fetch skills from Deeplake to local FS"); console.log(" Options for pull:"); console.log(" --to destination (default: global)"); console.log(" --user only skills authored by this user"); @@ -162,7 +180,7 @@ function usage(): void { console.log(" --all-users all authors (default — equivalent to no filter)"); console.log(" --dry-run show what would be written, don't touch disk"); console.log(" --force overwrite even when local version >= remote"); - console.log(" hivemind skilify unpull [opts] remove skills previously installed by pull"); + console.log(" hivemind skillify unpull [opts] remove skills previously installed by pull"); console.log(" Options for unpull:"); console.log(" --to where to scan (default: global)"); console.log(" --user only entries authored by this user"); @@ -171,7 +189,7 @@ function usage(): void { console.log(" --dry-run show what would be removed"); console.log(" --all also remove flat-layout (locally-mined) entries"); console.log(" --legacy-cleanup also remove pre-`--author`-layout legacy `/` dirs"); - console.log(" hivemind skilify status show per-project state"); + console.log(" hivemind skillify status show per-project state"); } /** Parse a single string flag value out of `args`, removing the matched tokens. */ @@ -338,7 +356,7 @@ async function unpullSkills(args: string[]): Promise { console.log(`Result: ${summary.removed} removed, ${summary.wouldRemove} dry-run, ${summary.kept} kept${prunedNote}.`); } -export function runSkilifyCommand(args: string[]): void { +export function runSkillifyCommand(args: string[]): void { const sub = args[0]; if (!sub || sub === "status") { showStatus(); return; } if (sub === "scope") { setScope(args[1] ?? ""); return; } @@ -369,11 +387,11 @@ export function runSkilifyCommand(args: string[]): void { if (action === "add") { teamAdd(args[2] ?? ""); return; } if (action === "remove") { teamRemove(args[2] ?? ""); return; } if (action === "list") { teamList(); return; } - console.error("Usage: hivemind skilify team [name]"); + console.error("Usage: hivemind skillify team [name]"); process.exit(1); } if (sub === "--help" || sub === "-h" || sub === "help") { usage(); return; } - console.error(`Unknown skilify subcommand: ${sub}`); + console.error(`Unknown skillify subcommand: ${sub}`); usage(); process.exit(1); } @@ -381,6 +399,6 @@ export function runSkilifyCommand(args: string[]): void { // Run as a standalone script only when invoked directly via Node — not when // imported by the unified CLI (`bundle/cli.js`). Identify by the entry // script's filename, the same pattern auth-login.ts uses. -if (process.argv[1] && process.argv[1].endsWith("skilify.js")) { - runSkilifyCommand(process.argv.slice(2)); +if (process.argv[1] && process.argv[1].endsWith("skillify.js")) { + runSkillifyCommand(process.argv.slice(2)); } diff --git a/src/hooks/capture.ts b/src/hooks/capture.ts index a670421f..973b3a33 100644 --- a/src/hooks/capture.ts +++ b/src/hooks/capture.ts @@ -21,7 +21,7 @@ import { releaseLock, } from "./summary-state.js"; import { bundleDirFromImportMeta, spawnWikiWorker, wikiLog } from "./spawn-wiki-worker.js"; -import { tryStopCounterTrigger } from "../skilify/triggers.js"; +import { tryStopCounterTrigger } from "../skillify/triggers.js"; import { EmbedClient } from "../embeddings/client.js"; import { embeddingSqlLiteral } from "../embeddings/sql.js"; import { embeddingsDisabled } from "../embeddings/disable.js"; diff --git a/src/hooks/codex/session-start.ts b/src/hooks/codex/session-start.ts index 53a48ade..ab3d07ca 100644 --- a/src/hooks/codex/session-start.ts +++ b/src/hooks/codex/session-start.ts @@ -17,7 +17,7 @@ import { loadCredentials } from "../../commands/auth.js"; import { readStdin } from "../../utils/stdin.js"; import { log as _log } from "../../utils/debug.js"; import { getInstalledVersion } from "../../utils/version-check.js"; -import { autoPullSkills } from "../../skilify/auto-pull.js"; +import { autoPullSkills } from "../../skillify/auto-pull.js"; const log = (msg: string) => _log("codex-session-start", msg); const __bundleDir = dirname(fileURLToPath(import.meta.url)); @@ -53,23 +53,23 @@ Organization management — each argument is SEPARATE (do NOT quote subcommands - hivemind members — list members - hivemind remove — remove member -SKILLS (skilify) — mine + share reusable skills across the org: -- hivemind skilify — show scope/team/install + per-project state -- hivemind skilify pull — sync project skills from the org table -- hivemind skilify pull --user — only that author's skills -- hivemind skilify pull --users a,b,c — multiple authors (CSV) -- hivemind skilify pull --all-users — explicit "no author filter" -- hivemind skilify pull --to project|global — install location -- hivemind skilify pull --dry-run — preview only -- hivemind skilify pull --force — overwrite local (creates .bak) -- hivemind skilify pull — pull only that skill (combines with --user) -- hivemind skilify unpull — remove every skill previously installed by pull -- hivemind skilify unpull --user — remove only that author's pulls -- hivemind skilify unpull --not-mine — remove all pulls except your own -- hivemind skilify unpull --dry-run — preview without touching disk -- hivemind skilify scope — sharing scope for new skills -- hivemind skilify install — default install location -- hivemind skilify team add|remove|list — manage team list`; +SKILLS (skillify) — mine + share reusable skills across the org: +- hivemind skillify — show scope/team/install + per-project state +- hivemind skillify pull — sync project skills from the org table +- hivemind skillify pull --user — only that author's skills +- hivemind skillify pull --users a,b,c — multiple authors (CSV) +- hivemind skillify pull --all-users — explicit "no author filter" +- hivemind skillify pull --to project|global — install location +- hivemind skillify pull --dry-run — preview only +- hivemind skillify pull --force — overwrite local (creates .bak) +- hivemind skillify pull — pull only that skill (combines with --user) +- hivemind skillify unpull — remove every skill previously installed by pull +- hivemind skillify unpull --user — remove only that author's pulls +- hivemind skillify unpull --not-mine — remove all pulls except your own +- hivemind skillify unpull --dry-run — preview without touching disk +- hivemind skillify scope — sharing scope for new skills +- hivemind skillify install — default install location +- hivemind skillify team add|remove|list — manage team list`; interface CodexSessionStartInput { session_id: string; diff --git a/src/hooks/codex/stop.ts b/src/hooks/codex/stop.ts index edb39a11..01b98d94 100644 --- a/src/hooks/codex/stop.ts +++ b/src/hooks/codex/stop.ts @@ -20,7 +20,7 @@ import { DeeplakeApi } from "../../deeplake-api.js"; import { sqlStr } from "../../utils/sql.js"; import { log as _log } from "../../utils/debug.js"; import { bundleDirFromImportMeta, spawnCodexWikiWorker, wikiLog } from "./spawn-wiki-worker.js"; -import { forceSessionEndTrigger } from "../../skilify/triggers.js"; +import { forceSessionEndTrigger } from "../../skillify/triggers.js"; import { tryAcquireLock, releaseLock } from "../summary-state.js"; import { buildSessionPath } from "../../utils/session-path.js"; import { EmbedClient } from "../../embeddings/client.js"; @@ -167,7 +167,7 @@ async function main(): Promise { throw e; } - // Skilify: Codex Stop is the end-of-session signal (no separate SessionEnd + // Skillify: Codex Stop is the end-of-session signal (no separate SessionEnd // hook). Always force-fire — same shape as Claude Code's SessionEnd path. // The forceSessionEndTrigger helper resets the counter internally so the // mid-session Stop counter doesn't double-fire on the same window. diff --git a/src/hooks/cursor/capture.ts b/src/hooks/cursor/capture.ts index 0a521573..6394ff81 100644 --- a/src/hooks/cursor/capture.ts +++ b/src/hooks/cursor/capture.ts @@ -32,7 +32,7 @@ import { releaseLock, } from "../summary-state.js"; import { bundleDirFromImportMeta, spawnCursorWikiWorker, wikiLog } from "./spawn-wiki-worker.js"; -import { tryStopCounterTrigger } from "../../skilify/triggers.js"; +import { tryStopCounterTrigger } from "../../skillify/triggers.js"; import type { Config } from "../../config.js"; const log = (msg: string) => _log("cursor-capture", msg); @@ -166,13 +166,13 @@ async function main(): Promise { maybeTriggerPeriodicSummary(sessionId, cwd, config); - // Skilify Stop counter — afterAgentResponse is the assistant-complete event. + // Skillify Stop counter — afterAgentResponse is the assistant-complete event. // Same guards as the wiki periodic trigger: don't fire when this capture - // is running INSIDE the wiki/skilify workers (their spawned CLI inherits + // is running INSIDE the wiki/skillify workers (their spawned CLI inherits // env vars and would otherwise loop). if (event === "afterAgentResponse" && process.env.HIVEMIND_WIKI_WORKER !== "1" && - process.env.HIVEMIND_SKILIFY_WORKER !== "1") { + process.env.HIVEMIND_SKILLIFY_WORKER !== "1") { tryStopCounterTrigger({ config, cwd, diff --git a/src/hooks/cursor/session-end.ts b/src/hooks/cursor/session-end.ts index fafed0be..927eefc7 100644 --- a/src/hooks/cursor/session-end.ts +++ b/src/hooks/cursor/session-end.ts @@ -15,7 +15,7 @@ import { log as _log } from "../../utils/debug.js"; import { loadConfig } from "../../config.js"; import { tryAcquireLock } from "../summary-state.js"; import { bundleDirFromImportMeta, spawnCursorWikiWorker, wikiLog } from "./spawn-wiki-worker.js"; -import { forceSessionEndTrigger } from "../../skilify/triggers.js"; +import { forceSessionEndTrigger } from "../../skillify/triggers.js"; const log = (msg: string) => _log("cursor-session-end", msg); @@ -42,7 +42,7 @@ async function main(): Promise { const config = loadConfig(); if (!config) { wikiLog(`SessionEnd: no config, skipping summary`); return; } - // Spawn the wiki and skilify workers independently — a failure of one + // Spawn the wiki and skillify workers independently — a failure of one // must not suppress the other. Each is wrapped in its own try. try { spawnCursorWikiWorker({ @@ -64,7 +64,7 @@ async function main(): Promise { sessionId, }); } catch (e: any) { - wikiLog(`SessionEnd: skilify trigger failed: ${e?.message ?? e}`); + wikiLog(`SessionEnd: skillify trigger failed: ${e?.message ?? e}`); } } diff --git a/src/hooks/cursor/session-start.ts b/src/hooks/cursor/session-start.ts index c5e497c0..04e49765 100644 --- a/src/hooks/cursor/session-start.ts +++ b/src/hooks/cursor/session-start.ts @@ -28,7 +28,7 @@ import { readStdin } from "../../utils/stdin.js"; import { log as _log } from "../../utils/debug.js"; import { getInstalledVersion } from "../../utils/version-check.js"; import { autoUpdate } from "../shared/autoupdate.js"; -import { autoPullSkills } from "../../skilify/auto-pull.js"; +import { autoPullSkills } from "../../skillify/auto-pull.js"; const log = (msg: string) => _log("cursor-session-start", msg); const __bundleDir = dirname(fileURLToPath(import.meta.url)); @@ -53,23 +53,23 @@ Organization management — each argument is SEPARATE (do NOT quote subcommands - hivemind members — list members - hivemind remove — remove member -SKILLS (skilify) — mine + share reusable skills across the org: -- hivemind skilify — show scope/team/install + per-project state -- hivemind skilify pull — sync project skills from the org table -- hivemind skilify pull --user — only that author's skills -- hivemind skilify pull --users a,b,c — multiple authors (CSV) -- hivemind skilify pull --all-users — explicit "no author filter" -- hivemind skilify pull --to project|global — install location -- hivemind skilify pull --dry-run — preview only -- hivemind skilify pull --force — overwrite local (creates .bak) -- hivemind skilify pull — pull only that skill (combines with --user) -- hivemind skilify unpull — remove every skill previously installed by pull -- hivemind skilify unpull --user — remove only that author's pulls -- hivemind skilify unpull --not-mine — remove all pulls except your own -- hivemind skilify unpull --dry-run — preview without touching disk -- hivemind skilify scope — sharing scope for new skills -- hivemind skilify install — default install location -- hivemind skilify team add|remove|list — manage team list`; +SKILLS (skillify) — mine + share reusable skills across the org: +- hivemind skillify — show scope/team/install + per-project state +- hivemind skillify pull — sync project skills from the org table +- hivemind skillify pull --user — only that author's skills +- hivemind skillify pull --users a,b,c — multiple authors (CSV) +- hivemind skillify pull --all-users — explicit "no author filter" +- hivemind skillify pull --to project|global — install location +- hivemind skillify pull --dry-run — preview only +- hivemind skillify pull --force — overwrite local (creates .bak) +- hivemind skillify pull — pull only that skill (combines with --user) +- hivemind skillify unpull — remove every skill previously installed by pull +- hivemind skillify unpull --user — remove only that author's pulls +- hivemind skillify unpull --not-mine — remove all pulls except your own +- hivemind skillify unpull --dry-run — preview without touching disk +- hivemind skillify scope — sharing scope for new skills +- hivemind skillify install — default install location +- hivemind skillify team add|remove|list — manage team list`; interface CursorSessionStartInput { session_id?: string; diff --git a/src/hooks/hermes/capture.ts b/src/hooks/hermes/capture.ts index b5c5ced7..f9d5c58f 100644 --- a/src/hooks/hermes/capture.ts +++ b/src/hooks/hermes/capture.ts @@ -33,7 +33,7 @@ import { releaseLock, } from "../summary-state.js"; import { bundleDirFromImportMeta, spawnHermesWikiWorker, wikiLog } from "./spawn-wiki-worker.js"; -import { tryStopCounterTrigger } from "../../skilify/triggers.js"; +import { tryStopCounterTrigger } from "../../skillify/triggers.js"; import type { Config } from "../../config.js"; const log = (msg: string) => _log("hermes-capture", msg); @@ -146,14 +146,14 @@ async function main(): Promise { maybeTriggerPeriodicSummary(sessionId, cwd, config); - // Skilify Stop counter — post_llm_call is the assistant-complete event. + // Skillify Stop counter — post_llm_call is the assistant-complete event. // Guard: don't fire when this capture is running INSIDE the wiki worker - // or skilify worker themselves (their spawned CLI inherits env vars and - // would otherwise loop). triggers.ts has the same SKILIFY_WORKER guard; + // or skillify worker themselves (their spawned CLI inherits env vars and + // would otherwise loop). triggers.ts has the same SKILLIFY_WORKER guard; // the WIKI_WORKER guard below covers the wiki-worker-calling-hermes case. if (event === "post_llm_call" && process.env.HIVEMIND_WIKI_WORKER !== "1" && - process.env.HIVEMIND_SKILIFY_WORKER !== "1") { + process.env.HIVEMIND_SKILLIFY_WORKER !== "1") { tryStopCounterTrigger({ config, cwd, diff --git a/src/hooks/hermes/session-end.ts b/src/hooks/hermes/session-end.ts index a570d6d3..eae598a1 100644 --- a/src/hooks/hermes/session-end.ts +++ b/src/hooks/hermes/session-end.ts @@ -10,7 +10,7 @@ import { log as _log } from "../../utils/debug.js"; import { loadConfig } from "../../config.js"; import { tryAcquireLock } from "../summary-state.js"; import { bundleDirFromImportMeta, spawnHermesWikiWorker, wikiLog } from "./spawn-wiki-worker.js"; -import { forceSessionEndTrigger } from "../../skilify/triggers.js"; +import { forceSessionEndTrigger } from "../../skillify/triggers.js"; const log = (msg: string) => _log("hermes-session-end", msg); @@ -35,7 +35,7 @@ async function main(): Promise { const cwd = input.cwd ?? process.cwd(); // Independent try blocks per worker — a failure in wiki spawn must not - // suppress the skilify trigger and vice versa. + // suppress the skillify trigger and vice versa. try { spawnHermesWikiWorker({ config, @@ -56,7 +56,7 @@ async function main(): Promise { sessionId, }); } catch (e: any) { - wikiLog(`SessionEnd: skilify trigger failed: ${e?.message ?? e}`); + wikiLog(`SessionEnd: skillify trigger failed: ${e?.message ?? e}`); } } diff --git a/src/hooks/hermes/session-start.ts b/src/hooks/hermes/session-start.ts index 07104a9c..81bd54bc 100644 --- a/src/hooks/hermes/session-start.ts +++ b/src/hooks/hermes/session-start.ts @@ -19,7 +19,7 @@ import { readStdin } from "../../utils/stdin.js"; import { log as _log } from "../../utils/debug.js"; import { getInstalledVersion } from "../../utils/version-check.js"; import { autoUpdate } from "../shared/autoupdate.js"; -import { autoPullSkills } from "../../skilify/auto-pull.js"; +import { autoPullSkills } from "../../skillify/auto-pull.js"; const log = (msg: string) => _log("hermes-session-start", msg); const __bundleDir = dirname(fileURLToPath(import.meta.url)); @@ -45,23 +45,23 @@ Organization management — each argument is SEPARATE (do NOT quote subcommands - hivemind members — list members - hivemind remove — remove member -SKILLS (skilify) — mine + share reusable skills across the org: -- hivemind skilify — show scope/team/install + per-project state -- hivemind skilify pull — sync project skills from the org table -- hivemind skilify pull --user — only that author's skills -- hivemind skilify pull --users a,b,c — multiple authors (CSV) -- hivemind skilify pull --all-users — explicit "no author filter" -- hivemind skilify pull --to project|global — install location -- hivemind skilify pull --dry-run — preview only -- hivemind skilify pull --force — overwrite local (creates .bak) -- hivemind skilify pull — pull only that skill (combines with --user) -- hivemind skilify unpull — remove every skill previously installed by pull -- hivemind skilify unpull --user — remove only that author's pulls -- hivemind skilify unpull --not-mine — remove all pulls except your own -- hivemind skilify unpull --dry-run — preview without touching disk -- hivemind skilify scope — sharing scope for new skills -- hivemind skilify install — default install location -- hivemind skilify team add|remove|list — manage team list`; +SKILLS (skillify) — mine + share reusable skills across the org: +- hivemind skillify — show scope/team/install + per-project state +- hivemind skillify pull — sync project skills from the org table +- hivemind skillify pull --user — only that author's skills +- hivemind skillify pull --users a,b,c — multiple authors (CSV) +- hivemind skillify pull --all-users — explicit "no author filter" +- hivemind skillify pull --to project|global — install location +- hivemind skillify pull --dry-run — preview only +- hivemind skillify pull --force — overwrite local (creates .bak) +- hivemind skillify pull — pull only that skill (combines with --user) +- hivemind skillify unpull — remove every skill previously installed by pull +- hivemind skillify unpull --user — remove only that author's pulls +- hivemind skillify unpull --not-mine — remove all pulls except your own +- hivemind skillify unpull --dry-run — preview without touching disk +- hivemind skillify scope — sharing scope for new skills +- hivemind skillify install — default install location +- hivemind skillify team add|remove|list — manage team list`; interface HermesSessionStartInput { hook_event_name?: string; diff --git a/src/hooks/session-end.ts b/src/hooks/session-end.ts index d9a7d331..859c7987 100644 --- a/src/hooks/session-end.ts +++ b/src/hooks/session-end.ts @@ -13,7 +13,7 @@ import { loadConfig, type Config } from "../config.js"; import { log as _log } from "../utils/debug.js"; import { bundleDirFromImportMeta, spawnWikiWorker, wikiLog } from "./spawn-wiki-worker.js"; import { tryAcquireLock, releaseLock } from "./summary-state.js"; -import { forceSessionEndTrigger } from "../skilify/triggers.js"; +import { forceSessionEndTrigger } from "../skillify/triggers.js"; const log = (msg: string) => _log("session-end", msg); diff --git a/src/hooks/session-start.ts b/src/hooks/session-start.ts index fc345749..b094225d 100644 --- a/src/hooks/session-start.ts +++ b/src/hooks/session-start.ts @@ -18,7 +18,7 @@ import { log as _log } from "../utils/debug.js"; import { getInstalledVersion } from "../utils/version-check.js"; import { makeWikiLogger } from "../utils/wiki-log.js"; import { autoUpdate } from "./shared/autoupdate.js"; -import { autoPullSkills } from "../skilify/auto-pull.js"; +import { autoPullSkills } from "../skillify/auto-pull.js"; const log = (msg: string) => _log("session-start", msg); const __bundleDir = dirname(fileURLToPath(import.meta.url)); @@ -59,23 +59,23 @@ Organization management — each argument is SEPARATE (do NOT quote subcommands - hivemind remove — remove member Skill management (mine + share reusable Claude skills across the org): -- hivemind skilify — show scope, team, install, per-project state -- hivemind skilify pull — sync project skills from the org table to local FS -- hivemind skilify pull --user — only skills authored by that user -- hivemind skilify pull --users — only skills from those authors -- hivemind skilify pull --all-users — explicit "no author filter" (default) -- hivemind skilify pull --to — install location (project=cwd/.claude/skills, global=~/.claude/skills) -- hivemind skilify pull --dry-run — preview without touching disk -- hivemind skilify pull --force — overwrite local files even if up-to-date (creates .bak) -- hivemind skilify pull — pull only that one skill (combines with --user) -- hivemind skilify unpull — remove every skill previously installed by pull -- hivemind skilify unpull --user — remove only that author's pulls -- hivemind skilify unpull --not-mine — remove all pulls except your own -- hivemind skilify unpull --dry-run — preview without touching disk -- hivemind skilify scope — sharing scope for newly mined skills -- hivemind skilify install — default install location for new skills -- hivemind skilify promote — move a project skill to the global location -- hivemind skilify team add|remove|list — manage team member list +- hivemind skillify — show scope, team, install, per-project state +- hivemind skillify pull — sync project skills from the org table to local FS +- hivemind skillify pull --user — only skills authored by that user +- hivemind skillify pull --users — only skills from those authors +- hivemind skillify pull --all-users — explicit "no author filter" (default) +- hivemind skillify pull --to — install location (project=cwd/.claude/skills, global=~/.claude/skills) +- hivemind skillify pull --dry-run — preview without touching disk +- hivemind skillify pull --force — overwrite local files even if up-to-date (creates .bak) +- hivemind skillify pull — pull only that one skill (combines with --user) +- hivemind skillify unpull — remove every skill previously installed by pull +- hivemind skillify unpull --user — remove only that author's pulls +- hivemind skillify unpull --not-mine — remove all pulls except your own +- hivemind skillify unpull --dry-run — preview without touching disk +- hivemind skillify scope — sharing scope for newly mined skills +- hivemind skillify install — default install location for new skills +- hivemind skillify promote — move a project skill to the global location +- hivemind skillify team add|remove|list — manage team member list IMPORTANT: Only use bash commands (cat, ls, grep, echo, jq, head, tail, etc.) to interact with ~/.deeplake/memory/. Do NOT use python, python3, node, curl, or other interpreters — they are not available in the memory filesystem. Avoid bash brace expansions like \`{1..10}\` (not fully supported); spell out paths explicitly. Bash output is capped at 10MB total — avoid \`for f in *.json; do cat $f\` style loops on the whole sessions dir. diff --git a/src/skilify/agent-roots.ts b/src/skillify/agent-roots.ts similarity index 100% rename from src/skilify/agent-roots.ts rename to src/skillify/agent-roots.ts diff --git a/src/skilify/auto-pull.ts b/src/skillify/auto-pull.ts similarity index 95% rename from src/skilify/auto-pull.ts rename to src/skillify/auto-pull.ts index ad5b5fe5..9304680c 100644 --- a/src/skilify/auto-pull.ts +++ b/src/skillify/auto-pull.ts @@ -1,8 +1,8 @@ /** * SessionStart auto-pull of skills from the org's `skills` Deeplake table. * - * Why: teammates mine reusable skills constantly via the skilify worker. Without - * an auto-pull, every user has to remember to run `hivemind skilify pull + * Why: teammates mine reusable skills constantly via the skillify worker. Without + * an auto-pull, every user has to remember to run `hivemind skillify pull * --all-users --to global` themselves. This module wires the pull into every * agent's SessionStart hook so freshly-mined skills become available without * manual intervention. @@ -22,7 +22,7 @@ * - Not-logged-in is a silent skip (no nag). * * Scope: install=global, users=[] (all-users), force=false. The result is - * exactly equivalent to `hivemind skilify pull --all-users --to global`. + * exactly equivalent to `hivemind skillify pull --all-users --to global`. */ import { loadConfig, type Config } from "../config.js"; @@ -30,7 +30,7 @@ import { DeeplakeApi } from "../deeplake-api.js"; import { runPull, type QueryFn } from "./pull.js"; import { log as _log } from "../utils/debug.js"; -const log = (msg: string) => _log("skilify-autopull", msg); +const log = (msg: string) => _log("skillify-autopull", msg); const DEFAULT_TIMEOUT_MS = 5_000; diff --git a/src/skilify/autopull-worker.ts b/src/skillify/autopull-worker.ts similarity index 100% rename from src/skilify/autopull-worker.ts rename to src/skillify/autopull-worker.ts diff --git a/src/skilify/extractors/index.ts b/src/skillify/extractors/index.ts similarity index 100% rename from src/skilify/extractors/index.ts rename to src/skillify/extractors/index.ts diff --git a/src/skilify/gate-parser.ts b/src/skillify/gate-parser.ts similarity index 100% rename from src/skilify/gate-parser.ts rename to src/skillify/gate-parser.ts diff --git a/src/skilify/gate-runner.ts b/src/skillify/gate-runner.ts similarity index 100% rename from src/skilify/gate-runner.ts rename to src/skillify/gate-runner.ts diff --git a/src/skillify/legacy-migration.ts b/src/skillify/legacy-migration.ts new file mode 100644 index 00000000..832e7d11 --- /dev/null +++ b/src/skillify/legacy-migration.ts @@ -0,0 +1,48 @@ +/** + * One-time migration of the pre-rename state directory. + * + * Old: ~/.deeplake/state/skilify/ + * New: ~/.deeplake/state/skillify/ + * + * If the legacy directory exists and the new one does not, rename in place + * so installed-skill manifests, scope config, and per-project state survive + * the rename. + * + * Error policy: only swallow the documented fallback codes — `EXDEV` + * (cross-device link, e.g. `~/.deeplake` on a different mount than `/tmp`) + * and `EPERM` (sandboxed or read-only home). In those cases we leave the + * legacy dir in place and the new dir starts fresh — `pull` will repopulate + * `pulled.json` but pre-rename installs may need manual cleanup. Every + * other failure (`EIO`, `ENOSPC`, anything else) re-throws so the caller + * sees the I/O error instead of silently losing user state. + */ + +import { existsSync, renameSync } from "node:fs"; +import { homedir } from "node:os"; +import { join } from "node:path"; +import { log as _log } from "../utils/debug.js"; + +const dlog = (msg: string) => _log("skillify-migrate", msg); + +let attempted = false; + +export function migrateLegacyStateDir(): void { + if (attempted) return; + attempted = true; + const root = join(homedir(), ".deeplake", "state"); + const legacy = join(root, "skilify"); + const current = join(root, "skillify"); + if (!existsSync(legacy)) return; + if (existsSync(current)) return; + try { + renameSync(legacy, current); + dlog(`migrated ${legacy} -> ${current}`); + } catch (err) { + const code = (err as NodeJS.ErrnoException).code; + if (code === "EXDEV" || code === "EPERM") { + dlog(`migration failed (${code}); leaving legacy dir in place`); + return; + } + throw err; + } +} diff --git a/src/skilify/manifest.ts b/src/skillify/manifest.ts similarity index 96% rename from src/skilify/manifest.ts rename to src/skillify/manifest.ts index 2c325e05..33c38ee3 100644 --- a/src/skilify/manifest.ts +++ b/src/skillify/manifest.ts @@ -1,5 +1,5 @@ /** - * Manifest of skills installed via `hivemind skilify pull`. + * Manifest of skills installed via `hivemind skillify pull`. * * Why a manifest instead of just heuristics on directory names: * the `--/` convention used by `pull` is a legitimate @@ -8,9 +8,9 @@ * a pull-managed entry" purely from the presence of `--` would let * `unpull` accidentally remove user-authored skills with that naming * style. The manifest gives `unpull` an authoritative list of what - * skilify actually wrote, so anything outside that list is left alone. + * skillify actually wrote, so anything outside that list is left alone. * - * File: ~/.deeplake/state/skilify/pulled.json + * File: ~/.deeplake/state/skillify/pulled.json * * Atomicity: writes go to a sibling .tmp file and rename in place, so * a crash mid-write leaves either the pre-write state or the new state @@ -20,6 +20,7 @@ import { existsSync, lstatSync, mkdirSync, readFileSync, renameSync, unlinkSync, writeFileSync } from "node:fs"; import { homedir } from "node:os"; import { dirname, join } from "node:path"; +import { migrateLegacyStateDir } from "./legacy-migration.js"; import type { InstallLocation } from "./scope-config.js"; export interface PulledEntry { @@ -63,10 +64,11 @@ function emptyManifest(): PulledManifest { } export function manifestPath(): string { - return join(homedir(), ".deeplake", "state", "skilify", "pulled.json"); + return join(homedir(), ".deeplake", "state", "skillify", "pulled.json"); } export function loadManifest(path: string = manifestPath()): PulledManifest { + migrateLegacyStateDir(); if (!existsSync(path)) return emptyManifest(); let raw: string; try { raw = readFileSync(path, "utf-8"); } @@ -128,6 +130,7 @@ export function loadManifest(path: string = manifestPath()): PulledManifest { } export function saveManifest(m: PulledManifest, path: string = manifestPath()): void { + migrateLegacyStateDir(); mkdirSync(dirname(path), { recursive: true }); const tmp = `${path}.tmp`; writeFileSync(tmp, JSON.stringify(m, null, 2) + "\n", { mode: 0o600 }); diff --git a/src/skilify/pull.ts b/src/skillify/pull.ts similarity index 100% rename from src/skilify/pull.ts rename to src/skillify/pull.ts diff --git a/src/skilify/scope-config.ts b/src/skillify/scope-config.ts similarity index 81% rename from src/skilify/scope-config.ts rename to src/skillify/scope-config.ts index a90ccadd..d6f0cda9 100644 --- a/src/skilify/scope-config.ts +++ b/src/skillify/scope-config.ts @@ -1,17 +1,18 @@ /** - * Persisted scope + team membership for the skilify worker. + * Persisted scope + team membership for the skillify worker. * - * File: ~/.deeplake/state/skilify/config.json + * File: ~/.deeplake/state/skillify/config.json * { scope: "me" | "team" | "org", team: string[] } * * Defaults to scope "me" with an empty team list when the file is absent - * or unreadable. The `hivemind skilify` CLI (src/commands/skilify.ts) is + * or unreadable. The `hivemind skillify` CLI (src/commands/skillify.ts) is * the only writer; the worker hook reads. */ import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; import { homedir } from "node:os"; import { join } from "node:path"; +import { migrateLegacyStateDir } from "./legacy-migration.js"; export type Scope = "me" | "team" | "org"; export type InstallLocation = "project" | "global"; @@ -27,12 +28,13 @@ export interface ScopeConfig { install: InstallLocation; } -const STATE_DIR = join(homedir(), ".deeplake", "state", "skilify"); +const STATE_DIR = join(homedir(), ".deeplake", "state", "skillify"); const CONFIG_PATH = join(STATE_DIR, "config.json"); const DEFAULT: ScopeConfig = { scope: "me", team: [], install: "project" }; export function loadScopeConfig(): ScopeConfig { + migrateLegacyStateDir(); if (!existsSync(CONFIG_PATH)) return DEFAULT; try { const raw = JSON.parse(readFileSync(CONFIG_PATH, "utf-8")); @@ -48,6 +50,7 @@ export function loadScopeConfig(): ScopeConfig { } export function saveScopeConfig(cfg: ScopeConfig): void { + migrateLegacyStateDir(); mkdirSync(STATE_DIR, { recursive: true }); writeFileSync(CONFIG_PATH, JSON.stringify(cfg, null, 2)); } diff --git a/src/skilify/skill-writer.ts b/src/skillify/skill-writer.ts similarity index 100% rename from src/skilify/skill-writer.ts rename to src/skillify/skill-writer.ts diff --git a/src/skilify/skilify-worker.ts b/src/skillify/skillify-worker.ts similarity index 98% rename from src/skilify/skilify-worker.ts rename to src/skillify/skillify-worker.ts index 8a4cff56..6fa974a8 100644 --- a/src/skilify/skilify-worker.ts +++ b/src/skillify/skillify-worker.ts @@ -1,14 +1,14 @@ #!/usr/bin/env node /** - * Background skilify worker. + * Background skillify worker. * * Pulls the last N sessions from Deeplake in the configured scope, strips * tool calls / thinking, asks Haiku whether the recent activity warrants a * new or merged skill, and writes the result under the project's * .claude/skills directory. * - * Invoked by the capture hook as: node skilify-worker.js + * Invoked by the capture hook as: node skillify-worker.js */ import { readFileSync, writeFileSync, existsSync, appendFileSync, mkdirSync, rmSync } from "node:fs"; @@ -66,7 +66,7 @@ interface WorkerConfig { hermesModel?: string; piProvider?: string; piModel?: string; - skilifyLog: string; + skillifyLog: string; currentSessionId?: string; } @@ -82,7 +82,7 @@ const EXISTING_SKILLS_CHAR_CAP = 30_000; function wlog(msg: string): void { try { - appendFileSync(cfg.skilifyLog, `[${utcTimestamp()}] skilify-worker(${cfg.projectKey}): ${msg}\n`); + appendFileSync(cfg.skillifyLog, `[${utcTimestamp()}] skillify-worker(${cfg.projectKey}): ${msg}\n`); } catch { /* ignore */ } } diff --git a/src/skilify/skills-table.ts b/src/skillify/skills-table.ts similarity index 100% rename from src/skilify/skills-table.ts rename to src/skillify/skills-table.ts diff --git a/src/skilify/spawn-skilify-worker.ts b/src/skillify/spawn-skillify-worker.ts similarity index 79% rename from src/skilify/spawn-skilify-worker.ts rename to src/skillify/spawn-skillify-worker.ts index 91190f41..9bf6e78f 100644 --- a/src/skilify/spawn-skilify-worker.ts +++ b/src/skillify/spawn-skillify-worker.ts @@ -1,5 +1,5 @@ /** - * Spawn a detached skilify worker. Mirror of spawn-wiki-worker.ts. + * Spawn a detached skillify worker. Mirror of spawn-wiki-worker.ts. * * The hook calls this when the per-project Stop counter crosses the * threshold. It writes a config JSON to tmpdir, spawns the worker, @@ -17,12 +17,12 @@ import { utcTimestamp } from "../utils/debug.js"; import { findAgentBin, type Agent } from "./gate-runner.js"; const HOME = homedir(); -export const SKILIFY_LOG = join(HOME, ".claude", "hooks", "skilify.log"); +export const SKILLIFY_LOG = join(HOME, ".claude", "hooks", "skillify.log"); -export function skilifyLog(msg: string): void { +export function skillifyLog(msg: string): void { try { - mkdirSync(dirname(SKILIFY_LOG), { recursive: true }); - appendFileSync(SKILIFY_LOG, `[${utcTimestamp()}] ${msg}\n`); + mkdirSync(dirname(SKILLIFY_LOG), { recursive: true }); + appendFileSync(SKILLIFY_LOG, `[${utcTimestamp()}] ${msg}\n`); } catch { /* ignore */ } } @@ -30,7 +30,7 @@ export function skilifyLog(msg: string): void { export type { Scope, InstallLocation, ScopeConfig } from "./scope-config.js"; import type { ScopeConfig } from "./scope-config.js"; -export interface SkilifySpawnOptions { +export interface SkillifySpawnOptions { config: Config; cwd: string; projectKey: string; @@ -43,14 +43,14 @@ export interface SkilifySpawnOptions { reason: string; } -export function spawnSkilifyWorker(opts: SkilifySpawnOptions): void { +export function spawnSkillifyWorker(opts: SkillifySpawnOptions): void { const { config, cwd, projectKey, project, bundleDir, agent, scopeConfig, currentSessionId, reason } = opts; - const tmpDir = join(tmpdir(), `deeplake-skilify-${projectKey}-${Date.now()}`); + const tmpDir = join(tmpdir(), `deeplake-skillify-${projectKey}-${Date.now()}`); // mode 0o700: tmpDir holds config.json with the user's full-org Deeplake API token. // The file itself is written 0o600 below, but a world-readable directory still // leaks the file's existence + name to other users on the host. Mirror of the - // Pi extension's spawnPiSkilifyWorker which already uses 0o700. + // Pi extension's spawnPiSkillifyWorker which already uses 0o700. mkdirSync(tmpDir, { recursive: true, mode: 0o700 }); // Resolve the gate CLI for this agent up front (faster cold-start in the @@ -83,22 +83,22 @@ export function spawnSkilifyWorker(opts: SkilifySpawnOptions): void { hermesModel: process.env.HIVEMIND_HERMES_MODEL, piProvider: process.env.HIVEMIND_PI_PROVIDER, piModel: process.env.HIVEMIND_PI_MODEL, - skilifyLog: SKILIFY_LOG, + skillifyLog: SKILLIFY_LOG, currentSessionId, }), { mode: 0o600 }); // chmod again as a belt-and-suspenders against umask weirdness — some // file systems / overlay setups strip mode bits on the initial create. try { chmodSync(configFile, 0o600); } catch { /* best effort */ } - skilifyLog(`${reason}: spawning skilify worker for project=${project} key=${projectKey}`); + skillifyLog(`${reason}: spawning skillify worker for project=${project} key=${projectKey}`); - const workerPath = join(bundleDir, "skilify-worker.js"); + const workerPath = join(bundleDir, "skillify-worker.js"); spawn("nohup", ["node", workerPath, configFile], { detached: true, stdio: ["ignore", "ignore", "ignore"], }).unref(); - skilifyLog(`${reason}: spawned skilify worker for ${projectKey}`); + skillifyLog(`${reason}: spawned skillify worker for ${projectKey}`); } export function bundleDirFromImportMeta(importMetaUrl: string): string { diff --git a/src/skilify/state.ts b/src/skillify/state.ts similarity index 85% rename from src/skilify/state.ts rename to src/skillify/state.ts index 7977bf72..737e89df 100644 --- a/src/skilify/state.ts +++ b/src/skillify/state.ts @@ -1,7 +1,7 @@ /** - * Per-project state for the skilify worker. + * Per-project state for the skillify worker. * - * File: ~/.deeplake/state/skilify/.json + * File: ~/.deeplake/state/skillify/.json * { * project: string, // human-readable project name * projectKey: string, // stable id derived from git remote or cwd hash @@ -25,10 +25,11 @@ import { homedir } from "node:os"; import { createHash } from "node:crypto"; import { join, basename } from "node:path"; import { log as _log } from "../utils/debug.js"; +import { migrateLegacyStateDir } from "./legacy-migration.js"; -const dlog = (msg: string) => _log("skilify-state", msg); +const dlog = (msg: string) => _log("skillify-state", msg); -export interface SkilifyState { +export interface SkillifyState { project: string; projectKey: string; counter: number; @@ -38,11 +39,11 @@ export interface SkilifyState { updatedAt: number; } -const STATE_DIR = join(homedir(), ".deeplake", "state", "skilify"); +const STATE_DIR = join(homedir(), ".deeplake", "state", "skillify"); const YIELD_BUF = new Int32Array(new SharedArrayBuffer(4)); export const TRIGGER_THRESHOLD = (() => { - const n = Number(process.env.HIVEMIND_SKILIFY_EVERY_N_TURNS ?? ""); + const n = Number(process.env.HIVEMIND_SKILLIFY_EVERY_N_TURNS ?? ""); return Number.isInteger(n) && n > 0 ? n : 20; })(); @@ -72,17 +73,23 @@ export function deriveProjectKey(cwd: string): { key: string; project: string } return { key, project }; } -export function readState(projectKey: string): SkilifyState | null { +export function readState(projectKey: string): SkillifyState | null { + // Workers call readState() first to find the session watermark. Without + // migration here, a post-rename run sees an empty `skillify/` dir while + // the data still lives at `skilify/.json` — and the worker would + // re-mine sessions it has already processed. + migrateLegacyStateDir(); const p = statePath(projectKey); if (!existsSync(p)) return null; try { - return JSON.parse(readFileSync(p, "utf-8")) as SkilifyState; + return JSON.parse(readFileSync(p, "utf-8")) as SkillifyState; } catch { return null; } } -export function writeState(projectKey: string, state: SkilifyState): void { +export function writeState(projectKey: string, state: SkillifyState): void { + migrateLegacyStateDir(); mkdirSync(STATE_DIR, { recursive: true }); const p = statePath(projectKey); const tmp = `${p}.${process.pid}.${Date.now()}.tmp`; @@ -91,6 +98,7 @@ export function writeState(projectKey: string, state: SkilifyState): void { } export function withRmwLock(projectKey: string, fn: () => T): T { + migrateLegacyStateDir(); mkdirSync(STATE_DIR, { recursive: true }); const rmw = lockPath(projectKey) + ".rmw"; const deadline = Date.now() + 2000; @@ -123,11 +131,11 @@ export function withRmwLock(projectKey: string, fn: () => T): T { * Increment the Stop counter for a project. Initializes state on first call. * Returns the resulting state. */ -export function bumpStopCounter(cwd: string): SkilifyState { +export function bumpStopCounter(cwd: string): SkillifyState { const { key, project } = deriveProjectKey(cwd); return withRmwLock(key, () => { const existing = readState(key); - const next: SkilifyState = existing + const next: SkillifyState = existing ? { ...existing, counter: existing.counter + 1, updatedAt: Date.now() } : { project, @@ -198,6 +206,7 @@ export function advanceWatermark( /** Cross-project lock so a single worker fires at a time per project. */ export function tryAcquireWorkerLock(projectKey: string, maxAgeMs = 10 * 60 * 1000): boolean { + migrateLegacyStateDir(); mkdirSync(STATE_DIR, { recursive: true }); const p = lockPath(projectKey); if (existsSync(p)) { diff --git a/src/skilify/triggers.ts b/src/skillify/triggers.ts similarity index 74% rename from src/skilify/triggers.ts rename to src/skillify/triggers.ts index 4eb427eb..660db584 100644 --- a/src/skilify/triggers.ts +++ b/src/skillify/triggers.ts @@ -8,13 +8,13 @@ * regardless of counter (catches tail-of-session knowledge that the * mid-session counter trigger would miss). * - * Both are no-ops when cwd is empty or HIVEMIND_SKILIFY_WORKER=1 (recursion + * Both are no-ops when cwd is empty or HIVEMIND_SKILLIFY_WORKER=1 (recursion * guard). Both spawn the worker as a detached subprocess via - * spawnSkilifyWorker; they never block the calling hook. + * spawnSkillifyWorker; they never block the calling hook. */ import type { Config } from "../config.js"; -import { spawnSkilifyWorker, skilifyLog } from "./spawn-skilify-worker.js"; +import { spawnSkillifyWorker, skillifyLog } from "./spawn-skillify-worker.js"; import { bumpStopCounter, resetCounter, @@ -40,7 +40,7 @@ export interface TriggerOptions { * (Stop, afterAgentResponse, post_llm_call, etc. depending on agent). */ export function tryStopCounterTrigger(opts: TriggerOptions): void { - if (process.env.HIVEMIND_SKILIFY_WORKER === "1") return; + if (process.env.HIVEMIND_SKILLIFY_WORKER === "1") return; if (!opts.cwd) return; try { @@ -48,15 +48,15 @@ export function tryStopCounterTrigger(opts: TriggerOptions): void { if (state.counter < TRIGGER_THRESHOLD) return; if (!tryAcquireWorkerLock(state.projectKey)) { - skilifyLog(`Stop: trigger suppressed (worker lock held) project=${state.project}`); + skillifyLog(`Stop: trigger suppressed (worker lock held) project=${state.project}`); return; } - skilifyLog(`Stop: threshold hit (counter=${state.counter}, N=${TRIGGER_THRESHOLD}) project=${state.project} agent=${opts.agent}`); + skillifyLog(`Stop: threshold hit (counter=${state.counter}, N=${TRIGGER_THRESHOLD}) project=${state.project} agent=${opts.agent}`); resetCounter(state.projectKey); try { - spawnSkilifyWorker({ + spawnSkillifyWorker({ config: opts.config, cwd: opts.cwd, projectKey: state.projectKey, @@ -68,11 +68,11 @@ export function tryStopCounterTrigger(opts: TriggerOptions): void { reason: "Stop", }); } catch (e: any) { - skilifyLog(`Stop spawn failed: ${e?.message ?? e}`); + skillifyLog(`Stop spawn failed: ${e?.message ?? e}`); try { releaseWorkerLock(state.projectKey); } catch { /* best effort */ } } } catch (e: any) { - skilifyLog(`Stop trigger error: ${e?.message ?? e}`); + skillifyLog(`Stop trigger error: ${e?.message ?? e}`); } } @@ -82,14 +82,14 @@ export function tryStopCounterTrigger(opts: TriggerOptions): void { * doesn't fire reliably (e.g. claude -p one-shot). */ export function forceSessionEndTrigger(opts: TriggerOptions): void { - if (process.env.HIVEMIND_SKILIFY_WORKER === "1") return; + if (process.env.HIVEMIND_SKILLIFY_WORKER === "1") return; if (!opts.cwd) return; try { const { key: projectKey, project } = deriveProjectKey(opts.cwd); if (!tryAcquireWorkerLock(projectKey)) { - skilifyLog(`SessionEnd: skilify worker already running for ${projectKey}, skipping`); + skillifyLog(`SessionEnd: skillify worker already running for ${projectKey}, skipping`); return; } @@ -99,9 +99,9 @@ export function forceSessionEndTrigger(opts: TriggerOptions): void { resetCounter(projectKey); } - skilifyLog(`SessionEnd: spawning skilify worker for project=${project} agent=${opts.agent}`); + skillifyLog(`SessionEnd: spawning skillify worker for project=${project} agent=${opts.agent}`); try { - spawnSkilifyWorker({ + spawnSkillifyWorker({ config: opts.config, cwd: opts.cwd, projectKey, @@ -113,10 +113,10 @@ export function forceSessionEndTrigger(opts: TriggerOptions): void { reason: "SessionEnd", }); } catch (e: any) { - skilifyLog(`SessionEnd spawn failed: ${e?.message ?? e}`); + skillifyLog(`SessionEnd spawn failed: ${e?.message ?? e}`); try { releaseWorkerLock(projectKey); } catch { /* best effort */ } } } catch (e: any) { - skilifyLog(`SessionEnd trigger error: ${e?.message ?? e}`); + skillifyLog(`SessionEnd trigger error: ${e?.message ?? e}`); } } diff --git a/src/skilify/unpull.ts b/src/skillify/unpull.ts similarity index 98% rename from src/skilify/unpull.ts rename to src/skillify/unpull.ts index 6c13741f..e3f2b0ca 100644 --- a/src/skilify/unpull.ts +++ b/src/skillify/unpull.ts @@ -1,7 +1,7 @@ /** - * Remove skills previously installed by `hivemind skilify pull`. + * Remove skills previously installed by `hivemind skillify pull`. * - * Source of truth: `~/.deeplake/state/skilify/pulled.json` (the manifest + * Source of truth: `~/.deeplake/state/skillify/pulled.json` (the manifest * written by pull.ts). Entries on disk that are NOT in the manifest are * never touched by default — even if their directory name follows the * `--` convention. This protects user-authored skills that @@ -16,7 +16,7 @@ * locally-mined skills are not tracked there. Destructive, * documented as such in usage. * - --legacy-cleanup: scan disk for pre-`--author` dirs of the shape - * `<16-hex>/` (old project_key layout from skilify ≤ v0.7.13) + * `<16-hex>/` (old project_key layout from skillify ≤ v0.7.13) * and remove them. * * Drift handling: a manifest entry whose `installRoot/` no longer diff --git a/vitest.config.ts b/vitest.config.ts index d04b86fc..26cfbdef 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -42,20 +42,20 @@ export default defineConfig({ // unit tests don't make sense. These files have subprocess-spawn // coverage via claude-code/tests/shell-bundle-*.test.ts instead. "src/shell/deeplake-shell.ts", - // Skilify worker entry points: skilify-worker.ts parses cfg from + // Skillify worker entry points: skillify-worker.ts parses cfg from // process.argv[2] at top level then runs main() which spawns - // detached subprocesses; spawn-skilify-worker.ts is the spawner. + // detached subprocesses; spawn-skillify-worker.ts is the spawner. // Both are excluded from vitest because they need a live Deeplake // workspace + a real agent CLI to exercise meaningfully. // Coverage on the SHIPPED bundle is enforced indirectly by - // claude-code/tests/skilify-bundle-scan.test.ts (asserts the - // skilify-worker.js bundle exists per agent and contains the + // claude-code/tests/skillify-bundle-scan.test.ts (asserts the + // skillify-worker.js bundle exists per agent and contains the // required entry strings + agent labels). For full e2e in // development, see the manual matrix script described in the - // PR description (lives at /tmp/skilify-e2e-matrix.mjs in the + // PR description (lives at /tmp/skillify-e2e-matrix.mjs in the // author's worktree, not committed). - "src/skilify/skilify-worker.ts", - "src/skilify/spawn-skilify-worker.ts", + "src/skillify/skillify-worker.ts", + "src/skillify/spawn-skillify-worker.ts", ], // Per-file thresholds. Each PR that ships new files should append // its paths here with 80 / 80 / 80 / 80, so we prevent regressions @@ -251,7 +251,7 @@ export default defineConfig({ // + lines + functions held at 80; the gating + output-parsing logic // (the actually-load-bearing surface) is exhaustively tested. "src/hooks/shared/autoupdate.ts": { statements: 80, branches: 80, functions: 80, lines: 80 }, - // feat/skilify — background skill-mining worker + CLI surface + + // feat/skillify — background skill-mining worker + CLI surface + // per-agent gate dispatch + Deeplake skills table for org provenance. // Most modules cleanly hit 90/90/90/90; the trio below sits a touch // lower on branches because their happy paths are well-covered but a @@ -259,17 +259,21 @@ export default defineConfig({ // inside detached subprocesses) are pragmatic to leave at 75-80. // feat/session-start-autopull-skills — auto-pull all-author skills // at every SessionStart, throttled + bounded. - "src/skilify/auto-pull.ts": { statements: 90, branches: 70, functions: 90, lines: 90 }, - "src/skilify/extractors/index.ts": { statements: 90, branches: 90, functions: 90, lines: 90 }, - "src/skilify/gate-parser.ts": { statements: 90, branches: 90, functions: 90, lines: 90 }, - "src/skilify/gate-runner.ts": { statements: 90, branches: 60, functions: 90, lines: 90 }, - "src/skilify/pull.ts": { statements: 90, branches: 75, functions: 90, lines: 90 }, - "src/skilify/scope-config.ts": { statements: 90, branches: 90, functions: 90, lines: 90 }, - "src/skilify/skill-writer.ts": { statements: 90, branches: 80, functions: 90, lines: 90 }, - "src/skilify/skills-table.ts": { statements: 90, branches: 70, functions: 90, lines: 90 }, - "src/skilify/state.ts": { statements: 80, branches: 70, functions: 90, lines: 80 }, - "src/skilify/triggers.ts": { statements: 80, branches: 70, functions: 90, lines: 80 }, - "src/commands/skilify.ts": { statements: 80, branches: 70, functions: 80, lines: 80 }, + "src/skillify/auto-pull.ts": { statements: 90, branches: 70, functions: 90, lines: 90 }, + "src/skillify/extractors/index.ts": { statements: 90, branches: 90, functions: 90, lines: 90 }, + "src/skillify/gate-parser.ts": { statements: 90, branches: 90, functions: 90, lines: 90 }, + "src/skillify/gate-runner.ts": { statements: 90, branches: 60, functions: 90, lines: 90 }, + // One-shot legacy state-dir migration. Branches at 80 because the + // EXDEV/EPERM error-recovery branch is mocked via vi.doMock("node:fs") + // and the uncaught-rethrow branch covers everything else implicitly. + "src/skillify/legacy-migration.ts": { statements: 90, branches: 80, functions: 90, lines: 90 }, + "src/skillify/pull.ts": { statements: 90, branches: 75, functions: 90, lines: 90 }, + "src/skillify/scope-config.ts": { statements: 90, branches: 90, functions: 90, lines: 90 }, + "src/skillify/skill-writer.ts": { statements: 90, branches: 80, functions: 90, lines: 90 }, + "src/skillify/skills-table.ts": { statements: 90, branches: 70, functions: 90, lines: 90 }, + "src/skillify/state.ts": { statements: 80, branches: 70, functions: 90, lines: 80 }, + "src/skillify/triggers.ts": { statements: 80, branches: 70, functions: 90, lines: 80 }, + "src/commands/skillify.ts": { statements: 80, branches: 70, functions: 80, lines: 80 }, // PR #96 — feat/notifications-framework. Centralized push-notification // framework + Claude Code dual-channel adapter (systemMessage + addCtx). // Most files at 100% via notifications.test.ts and notifications-coverage.test.ts.