diff --git a/.claude-plugin/marketplace.json b/.claude-plugin/marketplace.json index bad0255..f40a7e3 100644 --- a/.claude-plugin/marketplace.json +++ b/.claude-plugin/marketplace.json @@ -6,13 +6,13 @@ }, "metadata": { "description": "Cloud-backed persistent shared memory for AI agents powered by Deeplake", - "version": "0.6.6" + "version": "0.6.7" }, "plugins": [ { "name": "hivemind", "description": "Persistent shared memory powered by Deeplake — captures all session activity and provides cross-session, cross-agent memory search", - "version": "0.6.6", + "version": "0.6.7", "source": "./claude-code", "homepage": "https://github.com/activeloopai/hivemind" } diff --git a/.claude-plugin/plugin.json b/.claude-plugin/plugin.json index 8028d11..158e0fa 100644 --- a/.claude-plugin/plugin.json +++ b/.claude-plugin/plugin.json @@ -1,7 +1,7 @@ { "name": "hivemind", "description": "Cloud-backed persistent memory powered by Deeplake — read, write, and share memory across Claude Code sessions and agents", - "version": "0.6.6", + "version": "0.6.7", "author": { "name": "Activeloop", "url": "https://deeplake.ai" diff --git a/.gitignore b/.gitignore index 96d7afd..d19216b 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ tmp/ .env.* coverage/ bench/ +.claude/ diff --git a/claude-code/bundle/capture.js b/claude-code/bundle/capture.js index 053d9b9..8811a20 100755 --- a/claude-code/bundle/capture.js +++ b/claude-code/bundle/capture.js @@ -186,13 +186,12 @@ var DeeplakeApi = class { log2(`commit: ${rows.length} rows`); } async upsertRowSql(row) { - const hex = row.content.toString("hex"); const ts = (/* @__PURE__ */ new Date()).toISOString(); const cd = row.creationDate ?? ts; const lud = row.lastUpdateDate ?? ts; const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`); if (exists.length > 0) { - let setClauses = `content = E'\\\\x${hex}', summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`; + let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`; if (row.project !== void 0) setClauses += `, project = '${sqlStr(row.project)}'`; if (row.description !== void 0) @@ -200,8 +199,8 @@ var DeeplakeApi = class { await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`); } else { const id = randomUUID(); - let cols = "id, path, filename, content, summary, mime_type, size_bytes, creation_date, last_update_date"; - let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'\\\\x${hex}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`; + let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date"; + let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`; if (row.project !== void 0) { cols += ", project"; vals += `, '${sqlStr(row.project)}'`; @@ -258,7 +257,7 @@ var DeeplakeApi = class { const tables = await this.listTables(); if (!tables.includes(tbl)) { log2(`table "${tbl}" not found, creating`); - await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', content BYTEA NOT NULL DEFAULT ''::bytea, summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/octet-stream', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`); + await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`); log2(`table "${tbl}" created`); } else { for (const col of ["project", "description", "creation_date", "last_update_date", "author"]) { diff --git a/claude-code/bundle/pre-tool-use.js b/claude-code/bundle/pre-tool-use.js index b391c1b..95bce5b 100755 --- a/claude-code/bundle/pre-tool-use.js +++ b/claude-code/bundle/pre-tool-use.js @@ -194,13 +194,12 @@ var DeeplakeApi = class { log2(`commit: ${rows.length} rows`); } async upsertRowSql(row) { - const hex = row.content.toString("hex"); const ts = (/* @__PURE__ */ new Date()).toISOString(); const cd = row.creationDate ?? ts; const lud = row.lastUpdateDate ?? ts; const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`); if (exists.length > 0) { - let setClauses = `content = E'\\\\x${hex}', summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`; + let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`; if (row.project !== void 0) setClauses += `, project = '${sqlStr(row.project)}'`; if (row.description !== void 0) @@ -208,8 +207,8 @@ var DeeplakeApi = class { await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`); } else { const id = randomUUID(); - let cols = "id, path, filename, content, summary, mime_type, size_bytes, creation_date, last_update_date"; - let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'\\\\x${hex}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`; + let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date"; + let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`; if (row.project !== void 0) { cols += ", project"; vals += `, '${sqlStr(row.project)}'`; @@ -266,7 +265,7 @@ var DeeplakeApi = class { const tables = await this.listTables(); if (!tables.includes(tbl)) { log2(`table "${tbl}" not found, creating`); - await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', content BYTEA NOT NULL DEFAULT ''::bytea, summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/octet-stream', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`); + await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`); log2(`table "${tbl}" created`); } else { for (const col of ["project", "description", "creation_date", "last_update_date", "author"]) { diff --git a/claude-code/bundle/session-start.js b/claude-code/bundle/session-start.js index eb2cfc2..9693a22 100755 --- a/claude-code/bundle/session-start.js +++ b/claude-code/bundle/session-start.js @@ -198,13 +198,12 @@ var DeeplakeApi = class { log2(`commit: ${rows.length} rows`); } async upsertRowSql(row) { - const hex = row.content.toString("hex"); const ts = (/* @__PURE__ */ new Date()).toISOString(); const cd = row.creationDate ?? ts; const lud = row.lastUpdateDate ?? ts; const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`); if (exists.length > 0) { - let setClauses = `content = E'\\\\x${hex}', summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`; + let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`; if (row.project !== void 0) setClauses += `, project = '${sqlStr(row.project)}'`; if (row.description !== void 0) @@ -212,8 +211,8 @@ var DeeplakeApi = class { await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`); } else { const id = randomUUID(); - let cols = "id, path, filename, content, summary, mime_type, size_bytes, creation_date, last_update_date"; - let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'\\\\x${hex}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`; + let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date"; + let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`; if (row.project !== void 0) { cols += ", project"; vals += `, '${sqlStr(row.project)}'`; @@ -270,7 +269,7 @@ var DeeplakeApi = class { const tables = await this.listTables(); if (!tables.includes(tbl)) { log2(`table "${tbl}" not found, creating`); - await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', content BYTEA NOT NULL DEFAULT ''::bytea, summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/octet-stream', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`); + await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`); log2(`table "${tbl}" created`); } else { for (const col of ["project", "description", "creation_date", "last_update_date", "author"]) { @@ -399,9 +398,8 @@ async function createPlaceholder(api, table, sessionId, cwd, userName, orgName, `- **Status**: in-progress`, "" ].join("\n"); - const hex = Buffer.from(content, "utf-8").toString("hex"); const filename = `${sessionId}.md`; - await api.query(`INSERT INTO "${table}" (id, path, filename, content, summary, author, mime_type, size_bytes, project, description, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(summaryPath)}', '${sqlStr(filename)}', E'\\\\x${hex}', E'${sqlStr(content)}', '${sqlStr(userName)}', 'text/markdown', ${Buffer.byteLength(content, "utf-8")}, '${sqlStr(projectName)}', 'in progress', '${now}', '${now}')`); + await api.query(`INSERT INTO "${table}" (id, path, filename, summary, author, mime_type, size_bytes, project, description, creation_date, last_update_date) VALUES ('${crypto.randomUUID()}', '${sqlStr(summaryPath)}', '${sqlStr(filename)}', E'${sqlStr(content)}', '${sqlStr(userName)}', 'text/markdown', ${Buffer.byteLength(content, "utf-8")}, '${sqlStr(projectName)}', 'in progress', '${now}', '${now}')`); wikiLog(`SessionStart: created placeholder for ${sessionId} (${cwd})`); } async function main() { diff --git a/claude-code/bundle/shell/deeplake-shell.js b/claude-code/bundle/shell/deeplake-shell.js index 50633c5..6453609 100755 --- a/claude-code/bundle/shell/deeplake-shell.js +++ b/claude-code/bundle/shell/deeplake-shell.js @@ -66889,13 +66889,12 @@ var DeeplakeApi = class { log2(`commit: ${rows.length} rows`); } async upsertRowSql(row) { - const hex = row.content.toString("hex"); const ts3 = (/* @__PURE__ */ new Date()).toISOString(); const cd = row.creationDate ?? ts3; const lud = row.lastUpdateDate ?? ts3; const exists = await this.query(`SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1`); if (exists.length > 0) { - let setClauses = `content = E'\\\\x${hex}', summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`; + let setClauses = `summary = E'${sqlStr(row.contentText)}', mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`; if (row.project !== void 0) setClauses += `, project = '${sqlStr(row.project)}'`; if (row.description !== void 0) @@ -66903,8 +66902,8 @@ var DeeplakeApi = class { await this.query(`UPDATE "${this.tableName}" SET ${setClauses} WHERE path = '${sqlStr(row.path)}'`); } else { const id = randomUUID(); - let cols = "id, path, filename, content, summary, mime_type, size_bytes, creation_date, last_update_date"; - let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'\\\\x${hex}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`; + let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date"; + let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`; if (row.project !== void 0) { cols += ", project"; vals += `, '${sqlStr(row.project)}'`; @@ -66961,7 +66960,7 @@ var DeeplakeApi = class { const tables = await this.listTables(); if (!tables.includes(tbl)) { log2(`table "${tbl}" not found, creating`); - await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', content BYTEA NOT NULL DEFAULT ''::bytea, summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'application/octet-stream', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`); + await this.query(`CREATE TABLE IF NOT EXISTS "${tbl}" (id TEXT NOT NULL DEFAULT '', path TEXT NOT NULL DEFAULT '', filename TEXT NOT NULL DEFAULT '', summary TEXT NOT NULL DEFAULT '', author TEXT NOT NULL DEFAULT '', mime_type TEXT NOT NULL DEFAULT 'text/plain', size_bytes BIGINT NOT NULL DEFAULT 0, project TEXT NOT NULL DEFAULT '', description TEXT NOT NULL DEFAULT '', creation_date TEXT NOT NULL DEFAULT '', last_update_date TEXT NOT NULL DEFAULT '') USING deeplake`); log2(`table "${tbl}" created`); } else { for (const col of ["project", "description", "creation_date", "last_update_date", "author"]) { @@ -66989,7 +66988,6 @@ import { basename as basename4, posix } from "node:path"; import { randomUUID as randomUUID2 } from "node:crypto"; var BATCH_SIZE = 10; var FLUSH_DEBOUNCE_MS = 200; -var TEXT_DETECT_BYTES = 4096; function normPath(p22) { const r10 = posix.normalize(p22.startsWith("/") ? p22 : "/" + p22); return r10 === "/" ? r10 : r10.replace(/\/$/, ""); @@ -66998,13 +66996,6 @@ function parentOf(p22) { const i11 = p22.lastIndexOf("/"); return i11 <= 0 ? "/" : p22.slice(0, i11); } -function isText(buf) { - const end = Math.min(buf.length, TEXT_DETECT_BYTES); - for (let i11 = 0; i11 < end; i11++) - if (buf[i11] === 0) - return false; - return true; -} function guessMime(filename) { const ext2 = filename.split(".").pop()?.toLowerCase() ?? ""; return { @@ -67014,29 +67005,12 @@ function guessMime(filename) { js: "text/javascript", ts: "text/typescript", html: "text/html", - css: "text/css", - png: "image/png", - jpg: "image/jpeg", - jpeg: "image/jpeg", - pdf: "application/pdf", - svg: "image/svg+xml", - gz: "application/gzip", - zip: "application/zip" - }[ext2] ?? "application/octet-stream"; + css: "text/css" + }[ext2] ?? "text/plain"; } function fsErr(code, msg, path2) { return Object.assign(new Error(`${code}: ${msg}, '${path2}'`), { code }); } -function decodeContent(raw) { - if (raw instanceof Uint8Array) - return Buffer.from(raw); - if (Buffer.isBuffer(raw)) - return raw; - if (typeof raw === "string") { - return raw.startsWith("\\x") ? Buffer.from(raw.slice(2), "hex") : Buffer.from(raw, "base64"); - } - throw new Error(`Unexpected content type: ${typeof raw}`); -} var DeeplakeFs = class _DeeplakeFs { client; table; @@ -67179,7 +67153,6 @@ var DeeplakeFs = class _DeeplakeFs { } } async upsertRow(r10) { - const hex = r10.content.toString("hex"); const text = sqlStr(r10.contentText); const p22 = sqlStr(r10.path); const fname = sqlStr(r10.filename); @@ -67188,7 +67161,7 @@ var DeeplakeFs = class _DeeplakeFs { const cd = r10.creationDate ?? ts3; const lud = r10.lastUpdateDate ?? ts3; if (this.flushed.has(r10.path)) { - let setClauses = `filename = '${fname}', content = E'\\\\x${hex}', summary = E'${text}', mime_type = '${mime}', size_bytes = ${r10.sizeBytes}, last_update_date = '${sqlStr(lud)}'`; + let setClauses = `filename = '${fname}', summary = E'${text}', mime_type = '${mime}', size_bytes = ${r10.sizeBytes}, last_update_date = '${sqlStr(lud)}'`; if (r10.project !== void 0) setClauses += `, project = '${sqlStr(r10.project)}'`; if (r10.description !== void 0) @@ -67196,8 +67169,8 @@ var DeeplakeFs = class _DeeplakeFs { await this.client.query(`UPDATE "${this.table}" SET ${setClauses} WHERE path = '${p22}'`); } else { const id = randomUUID2(); - const cols = "id, path, filename, content, summary, mime_type, size_bytes, creation_date, last_update_date" + (r10.project !== void 0 ? ", project" : "") + (r10.description !== void 0 ? ", description" : ""); - const vals = `'${id}', '${p22}', '${fname}', E'\\\\x${hex}', E'${text}', '${mime}', ${r10.sizeBytes}, '${sqlStr(cd)}', '${sqlStr(lud)}'` + (r10.project !== void 0 ? `, '${sqlStr(r10.project)}'` : "") + (r10.description !== void 0 ? `, '${sqlStr(r10.description)}'` : ""); + const cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date" + (r10.project !== void 0 ? ", project" : "") + (r10.description !== void 0 ? ", description" : ""); + const vals = `'${id}', '${p22}', '${fname}', E'${text}', '${mime}', ${r10.sizeBytes}, '${sqlStr(cd)}', '${sqlStr(lud)}'` + (r10.project !== void 0 ? `, '${sqlStr(r10.project)}'` : "") + (r10.description !== void 0 ? `, '${sqlStr(r10.description)}'` : ""); await this.client.query(`INSERT INTO "${this.table}" (${cols}) VALUES (${vals})`); this.flushed.add(r10.path); } @@ -67205,13 +67178,19 @@ var DeeplakeFs = class _DeeplakeFs { // ── Virtual index.md generation ──────────────────────────────────────────── async generateVirtualIndex() { const rows = await this.client.query(`SELECT path, project, description, creation_date, last_update_date FROM "${this.table}" WHERE path LIKE '${sqlStr("/summaries/")}%' ORDER BY last_update_date DESC`); + const sessionPathsByUser = /* @__PURE__ */ new Map(); + for (const sp of this.sessionPaths) { + const m26 = sp.match(/\/sessions\/[^/]+\/[^/]+_([^.]+)\.jsonl$/); + if (m26) + sessionPathsByUser.set(m26[1], sp.slice(1)); + } const lines = [ "# Session Index", "", "List of all Claude Code sessions with summaries.", "", - "| Session | Created | Last Updated | Project | Description |", - "|---------|---------|--------------|---------|-------------|" + "| Session | Conversation | Created | Last Updated | Project | Description |", + "|---------|-------------|---------|--------------|---------|-------------|" ]; for (const row of rows) { const p22 = row["path"]; @@ -67221,11 +67200,13 @@ var DeeplakeFs = class _DeeplakeFs { const summaryUser = match2[1]; const sessionId = match2[2]; const relPath = `summaries/${summaryUser}/${sessionId}.md`; + const convPath = sessionPathsByUser.get(sessionId); + const convLink = convPath ? `[messages](${convPath})` : ""; const project = row["project"] || ""; const description = row["description"] || ""; const creationDate = row["creation_date"] || ""; const lastUpdateDate = row["last_update_date"] || ""; - lines.push(`| [${sessionId}](${relPath}) | ${creationDate} | ${lastUpdateDate} | ${project} | ${description} |`); + lines.push(`| [${sessionId}](${relPath}) | ${convLink} | ${creationDate} | ${lastUpdateDate} | ${project} | ${description} |`); } lines.push(""); return lines.join("\n"); @@ -67253,15 +67234,11 @@ var DeeplakeFs = class _DeeplakeFs { if (uncached.length === 0) return; const inList = uncached.map((p22) => `'${sqlStr(p22)}'`).join(", "); - const rows = await this.client.query(`SELECT path, summary, content FROM "${this.table}" WHERE path IN (${inList})`); + const rows = await this.client.query(`SELECT path, summary FROM "${this.table}" WHERE path IN (${inList})`); for (const row of rows) { const p22 = row["path"]; - const text = row["summary"]; - if (text && text.length > 0) { - this.files.set(p22, Buffer.from(text, "utf-8")); - } else if (row["content"] != null) { - this.files.set(p22, decodeContent(row["content"])); - } + const text = row["summary"] ?? ""; + this.files.set(p22, Buffer.from(text, "utf-8")); } } // ── IFileSystem: reads ──────────────────────────────────────────────────── @@ -67276,8 +67253,9 @@ var DeeplakeFs = class _DeeplakeFs { return cached; const pend = this.pending.get(p22); if (pend) { - this.files.set(p22, pend.content); - return pend.content; + const buf2 = Buffer.from(pend.contentText, "utf-8"); + this.files.set(p22, buf2); + return buf2; } if (this.sessionPaths.has(p22) && this.sessionsTable) { const rows2 = await this.client.query(`SELECT message FROM "${this.sessionsTable}" WHERE path = '${sqlStr(p22)}' ORDER BY creation_date ASC`); @@ -67288,10 +67266,10 @@ var DeeplakeFs = class _DeeplakeFs { this.files.set(p22, buf2); return buf2; } - const rows = await this.client.query(`SELECT content FROM "${this.table}" WHERE path = '${sqlStr(p22)}' LIMIT 1`); + const rows = await this.client.query(`SELECT summary FROM "${this.table}" WHERE path = '${sqlStr(p22)}' LIMIT 1`); if (rows.length === 0) throw fsErr("ENOENT", "no such file or directory", p22); - const buf = decodeContent(rows[0]["content"]); + const buf = Buffer.from(rows[0]["summary"] ?? "", "utf-8"); this.files.set(p22, buf); return buf; } @@ -67316,7 +67294,7 @@ var DeeplakeFs = class _DeeplakeFs { return cached.toString("utf-8"); const pend = this.pending.get(p22); if (pend) - return pend.contentText || pend.content.toString("utf-8"); + return pend.contentText; if (this.sessionPaths.has(p22) && this.sessionsTable) { const rows2 = await this.client.query(`SELECT message FROM "${this.sessionsTable}" WHERE path = '${sqlStr(p22)}' ORDER BY creation_date ASC`); if (rows2.length === 0) @@ -67326,37 +67304,32 @@ var DeeplakeFs = class _DeeplakeFs { this.files.set(p22, buf2); return text2; } - const rows = await this.client.query(`SELECT summary, content FROM "${this.table}" WHERE path = '${sqlStr(p22)}' LIMIT 1`); + const rows = await this.client.query(`SELECT summary FROM "${this.table}" WHERE path = '${sqlStr(p22)}' LIMIT 1`); if (rows.length === 0) throw fsErr("ENOENT", "no such file or directory", p22); - const row = rows[0]; - const text = row["summary"]; - if (text && text.length > 0) { - const buf2 = Buffer.from(text, "utf-8"); - this.files.set(p22, buf2); - return text; - } - const buf = decodeContent(row["content"]); + const text = rows[0]["summary"] ?? ""; + const buf = Buffer.from(text, "utf-8"); this.files.set(p22, buf); - return buf.toString("utf-8"); + return text; } // ── IFileSystem: writes ─────────────────────────────────────────────────── /** Write a file with optional row-level metadata (project, description, dates). */ async writeFileWithMeta(path2, content, meta) { const p22 = normPath(path2); + if (this.sessionPaths.has(p22)) + throw fsErr("EPERM", "session files are read-only", p22); if (this.dirs.has(p22) && !this.files.has(p22)) throw fsErr("EISDIR", "illegal operation on a directory", p22); - const buf = typeof content === "string" ? Buffer.from(content, "utf-8") : Buffer.from(content); + const text = typeof content === "string" ? content : Buffer.from(content).toString("utf-8"); + const buf = Buffer.from(text, "utf-8"); const mime = guessMime(basename4(p22)); - const contentText = isText(buf) ? buf.toString("utf-8") : ""; this.files.set(p22, buf); this.meta.set(p22, { size: buf.length, mime, mtime: /* @__PURE__ */ new Date() }); this.addToTree(p22); this.pending.set(p22, { path: p22, filename: basename4(p22), - content: buf, - contentText, + contentText: text, mimeType: mime, sizeBytes: buf.length, ...meta @@ -67368,19 +67341,20 @@ var DeeplakeFs = class _DeeplakeFs { } async writeFile(path2, content, _opts) { const p22 = normPath(path2); + if (this.sessionPaths.has(p22)) + throw fsErr("EPERM", "session files are read-only", p22); if (this.dirs.has(p22) && !this.files.has(p22)) throw fsErr("EISDIR", "illegal operation on a directory", p22); - const buf = typeof content === "string" ? Buffer.from(content, "utf-8") : Buffer.from(content); + const text = typeof content === "string" ? content : Buffer.from(content).toString("utf-8"); + const buf = Buffer.from(text, "utf-8"); const mime = guessMime(basename4(p22)); - const contentText = isText(buf) ? buf.toString("utf-8") : ""; this.files.set(p22, buf); this.meta.set(p22, { size: buf.length, mime, mtime: /* @__PURE__ */ new Date() }); this.addToTree(p22); this.pending.set(p22, { path: p22, filename: basename4(p22), - content: buf, - contentText, + contentText: text, mimeType: mime, sizeBytes: buf.length }); @@ -67392,10 +67366,11 @@ var DeeplakeFs = class _DeeplakeFs { async appendFile(path2, content, opts) { const p22 = normPath(path2); const add = typeof content === "string" ? content : Buffer.from(content).toString("utf-8"); + if (this.sessionPaths.has(p22)) + throw fsErr("EPERM", "session files are read-only", p22); if (this.files.has(p22) || await this.exists(p22).catch(() => false)) { - const addHex = Buffer.from(add, "utf-8").toString("hex"); const ts3 = (/* @__PURE__ */ new Date()).toISOString(); - await this.client.query(`UPDATE "${this.table}" SET summary = summary || E'${sqlStr(add)}', content = content || E'\\\\x${addHex}', size_bytes = size_bytes + ${Buffer.byteLength(add, "utf-8")}, last_update_date = '${ts3}' WHERE path = '${sqlStr(p22)}'`); + await this.client.query(`UPDATE "${this.table}" SET summary = summary || E'${sqlStr(add)}', size_bytes = size_bytes + ${Buffer.byteLength(add, "utf-8")}, last_update_date = '${ts3}' WHERE path = '${sqlStr(p22)}'`); this.files.set(p22, null); const m26 = this.meta.get(p22); if (m26) { @@ -67403,7 +67378,7 @@ var DeeplakeFs = class _DeeplakeFs { m26.mtime = new Date(ts3); } } else { - await this.writeFile(p22, typeof content === "string" ? Buffer.from(content, "utf-8") : Buffer.from(content), opts); + await this.writeFile(p22, content, opts); await this.flush(); } } @@ -67511,6 +67486,8 @@ var DeeplakeFs = class _DeeplakeFs { // ── IFileSystem: structural mutations ───────────────────────────────────── async rm(path2, opts) { const p22 = normPath(path2); + if (this.sessionPaths.has(p22)) + throw fsErr("EPERM", "session files are read-only", p22); if (!this.files.has(p22) && !this.dirs.has(p22)) { if (opts?.force) return; @@ -67532,12 +67509,13 @@ var DeeplakeFs = class _DeeplakeFs { stack.push(childPath); } } - for (const fp of toDelete) + const safeToDelete = toDelete.filter((fp) => !this.sessionPaths.has(fp)); + for (const fp of safeToDelete) this.removeFromTree(fp); this.dirs.delete(p22); this.dirs.get(parentOf(p22))?.delete(basename4(p22)); - if (toDelete.length > 0) { - const inList = toDelete.map((fp) => `'${sqlStr(fp)}'`).join(", "); + if (safeToDelete.length > 0) { + const inList = safeToDelete.map((fp) => `'${sqlStr(fp)}'`).join(", "); await this.client.query(`DELETE FROM "${this.table}" WHERE path IN (${inList})`); } } else { @@ -67547,6 +67525,8 @@ var DeeplakeFs = class _DeeplakeFs { } async cp(src, dest, opts) { const s10 = normPath(src), d15 = normPath(dest); + if (this.sessionPaths.has(d15)) + throw fsErr("EPERM", "session files are read-only", d15); if (this.dirs.has(s10) && !this.files.has(s10)) { if (!opts?.recursive) throw fsErr("EISDIR", "is a directory", s10); @@ -67558,6 +67538,11 @@ var DeeplakeFs = class _DeeplakeFs { } } async mv(src, dest) { + const s10 = normPath(src), d15 = normPath(dest); + if (this.sessionPaths.has(s10)) + throw fsErr("EPERM", "session files are read-only", s10); + if (this.sessionPaths.has(d15)) + throw fsErr("EPERM", "session files are read-only", d15); await this.cp(src, dest, { recursive: true }); await this.rm(src, { recursive: true, force: true }); } diff --git a/claude-code/bundle/wiki-worker.js b/claude-code/bundle/wiki-worker.js index 9bec648..f3e864d 100755 --- a/claude-code/bundle/wiki-worker.js +++ b/claude-code/bundle/wiki-worker.js @@ -103,17 +103,16 @@ async function main() { if (existsSync(tmpSummary)) { const text = readFileSync(tmpSummary, "utf-8"); if (text.trim()) { - const hex = Buffer.from(text, "utf-8").toString("hex"); const fname = `${cfg.sessionId}.md`; const vpath = `/summaries/${cfg.userName}/${fname}`; const ts = (/* @__PURE__ */ new Date()).toISOString(); await query(`SELECT deeplake_sync_table('${cfg.memoryTable}')`); const existing = await query(`SELECT path FROM "${cfg.memoryTable}" WHERE path = '${esc(vpath)}' LIMIT 1`); if (existing.length > 0) { - await query(`UPDATE "${cfg.memoryTable}" SET content = E'\\\\x${hex}', summary = E'${esc(text)}', size_bytes = ${Buffer.byteLength(text)}, last_update_date = '${ts}' WHERE path = '${esc(vpath)}'`); + await query(`UPDATE "${cfg.memoryTable}" SET summary = E'${esc(text)}', size_bytes = ${Buffer.byteLength(text)}, last_update_date = '${ts}' WHERE path = '${esc(vpath)}'`); } else { const id = crypto.randomUUID(); - await query(`INSERT INTO "${cfg.memoryTable}" (id, path, filename, content, summary, author, mime_type, size_bytes, project, creation_date, last_update_date) VALUES ('${id}', '${esc(vpath)}', '${esc(fname)}', E'\\\\x${hex}', E'${esc(text)}', '${esc(cfg.userName)}', 'text/markdown', ${Buffer.byteLength(text)}, '${esc(cfg.project)}', '${ts}', '${ts}')`); + await query(`INSERT INTO "${cfg.memoryTable}" (id, path, filename, summary, author, mime_type, size_bytes, project, creation_date, last_update_date) VALUES ('${id}', '${esc(vpath)}', '${esc(fname)}', E'${esc(text)}', '${esc(cfg.userName)}', 'text/markdown', ${Buffer.byteLength(text)}, '${esc(cfg.project)}', '${ts}', '${ts}')`); } wlog(`uploaded ${vpath}`); try { diff --git a/claude-code/commands/login.md b/claude-code/commands/login.md index 1395768..79a0578 100644 --- a/claude-code/commands/login.md +++ b/claude-code/commands/login.md @@ -9,10 +9,12 @@ Run: node "${CLAUDE_PLUGIN_ROOT}/bundle/commands/auth-login.js" login ``` -If login succeeds, show this welcome message: +If login succeeds, show this welcome message. Include the organization name from the command output: 🐝 Welcome to Hivemind! +Current org: **{org name from output}** + Your Claude Code agents can now talk to each other and share memory across sessions, teammates, and machines. Get started: diff --git a/claude-code/skills/deeplake-memory/SKILL.md b/claude-code/skills/hivemind-memory/SKILL.md similarity index 100% rename from claude-code/skills/deeplake-memory/SKILL.md rename to claude-code/skills/hivemind-memory/SKILL.md diff --git a/claude-code/tests/deeplake-fs.test.ts b/claude-code/tests/deeplake-fs.test.ts index 1b0620d..4cbf03b 100644 --- a/claude-code/tests/deeplake-fs.test.ts +++ b/claude-code/tests/deeplake-fs.test.ts @@ -1,9 +1,9 @@ import { describe, it, expect, beforeEach, vi } from "vitest"; -import { DeeplakeFs, isText, guessMime } from "../../src/shell/deeplake-fs.js"; +import { DeeplakeFs, guessMime } from "../../src/shell/deeplake-fs.js"; // ── Mock ManagedClient ──────────────────────────────────────────────────────── type Row = { - id: string; path: string; filename: string; content: Buffer; + id: string; path: string; filename: string; summary: string; mime_type: string; size_bytes: number; project: string; description: string; creation_date: string; last_update_date: string; }; @@ -13,8 +13,7 @@ function makeClient(seed: Record = {}) { id: `seed-${path}`, path, filename: path.split("/").pop()!, - content, - summary: isText(content) ? content.toString("utf-8") : "", + summary: content.toString("utf-8"), mime_type: guessMime(path.split("/").pop()!), size_bytes: content.length, project: "", @@ -31,27 +30,20 @@ function makeClient(seed: Record = {}) { if (sql.includes("SELECT path, size_bytes, mime_type")) { return rows.map(r => ({ path: r.path, size_bytes: r.size_bytes, mime_type: r.mime_type })); } - // Read: SELECT content FROM ... WHERE path = '...' - if (sql.includes("SELECT content FROM")) { + // Read: SELECT summary FROM ... WHERE path = '...' + if (sql.includes("SELECT summary FROM")) { const match = sql.match(/path = '([^']+)'/); const row = match ? rows.find(r => r.path === match[1]) : undefined; - // Return hex-encoded content like PostgreSQL BYTEA - return row ? [{ content: `\\x${row.content.toString("hex")}` }] : []; + return row ? [{ summary: row.summary }] : []; } - // Read: SELECT summary, content FROM ... WHERE path = '...' - if (sql.includes("SELECT summary, content") && !sql.includes("IN (")) { - const match = sql.match(/path = '([^']+)'/); - const row = match ? rows.find(r => r.path === match[1]) : undefined; - return row ? [{ summary: row.summary, content: `\\x${row.content.toString("hex")}` }] : []; - } - // Prefetch: SELECT path, summary, content FROM ... WHERE path IN (...) - if (sql.includes("SELECT path, summary, content") && sql.includes("IN (")) { + // Prefetch: SELECT path, summary FROM ... WHERE path IN (...) + if (sql.includes("SELECT path, summary") && sql.includes("IN (")) { const inMatch = sql.match(/IN \(([^)]+)\)/); if (inMatch) { const paths = inMatch[1].split(",").map(s => s.trim().replace(/^'|'$/g, "")); return rows .filter(r => paths.includes(r.path)) - .map(r => ({ path: r.path, summary: r.summary, content: `\\x${r.content.toString("hex")}` })); + .map(r => ({ path: r.path, summary: r.summary })); } return []; } @@ -103,23 +95,18 @@ function makeClient(seed: Record = {}) { if (sql.includes("summary = summary ||")) { // appendFile: SQL-level concat - const hexMatch = sql.match(/content \|\| E'\\\\x([0-9a-f]*)'/); - if (hexMatch) { - const appendBuf = Buffer.from(hexMatch[1], "hex"); - row.content = Buffer.concat([row.content, appendBuf]); - row.summary += appendBuf.toString("utf-8"); - row.size_bytes = row.content.length; + const appendMatch = sql.match(/summary \|\| E'((?:[^']|'')*)'/); + if (appendMatch) { + const appendText = appendMatch[1].replace(/''/g, "'"); + row.summary += appendText; + row.size_bytes = Buffer.byteLength(row.summary, "utf-8"); } } else { // Full overwrite UPDATE (_doFlush for existing paths) - const hexMatch = sql.match(/content = E'\\\\x([0-9a-f]*)'/); const textMatch = sql.match(/summary = E'((?:[^']|'')*)'/); - if (hexMatch) { - row.content = Buffer.from(hexMatch[1], "hex"); - row.size_bytes = row.content.length; - } if (textMatch) { row.summary = textMatch[1].replace(/''/g, "'"); + row.size_bytes = Buffer.byteLength(row.summary, "utf-8"); } } // Handle new metadata columns in any UPDATE @@ -144,13 +131,9 @@ function makeClient(seed: Record = {}) { const pathMatch = hasId ? sql.match(/VALUES \('[^']+', '([^']+)'/) // skip id : sql.match(/VALUES \('([^']+)'/); - const hexMatch = sql.match(/E'\\\\x([0-9a-f]*)'/); - const textMatch = sql.match(/E'\\\\x[0-9a-f]*', E'((?:[^']|'')*)'/); if (pathMatch) { const path = pathMatch[1]; const filename = path.split("/").pop()!; - const content = hexMatch ? Buffer.from(hexMatch[1], "hex") : Buffer.alloc(0); - const summary = textMatch?.[1]?.replace(/''/g, "'") ?? ""; const id = idMatch?.[1] ?? ""; // Parse columns and values positionally const colsPart = sql.match(/\(([^)]+)\)\s+VALUES/)?.[1] ?? ""; @@ -189,7 +172,8 @@ function makeClient(seed: Record = {}) { // Remove existing row if any (upsert) const idx = rows.findIndex(r => r.path === path); if (idx >= 0) rows.splice(idx, 1); - rows.push({ id, path, filename, content, summary, mime_type: "text/plain", size_bytes: content.length, project, description, creation_date, last_update_date }); + const summary = colMap["summary"] ?? ""; + rows.push({ id, path, filename, summary, mime_type: "text/plain", size_bytes: Buffer.byteLength(summary, "utf-8"), project, description, creation_date, last_update_date }); } } return []; @@ -218,25 +202,11 @@ async function makeFs(seed: Record = {}, mount = "/memo return { fs, client }; } -// ── Unit: helpers ───────────────────────────────────────────────────────────── -describe("isText", () => { - it("returns true for plain UTF-8", () => { - expect(isText(Buffer.from("hello world"))).toBe(true); - }); - it("returns false for buffer containing null byte", () => { - expect(isText(Buffer.from([0x68, 0x00, 0x6c]))).toBe(false); - }); - it("returns false for buffer with null byte (binary marker)", () => { - // Real binary files (PNG body, PDFs, zips) contain null bytes - const binary = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x00, 0x0a, 0x1a, 0x0a]); - expect(isText(binary)).toBe(false); - }); -}); describe("guessMime", () => { it("returns application/json for .json", () => expect(guessMime("foo.json")).toBe("application/json")); - it("returns image/png for .png", () => expect(guessMime("image.png")).toBe("image/png")); - it("returns octet-stream for .bin", () => expect(guessMime("file.bin")).toBe("application/octet-stream")); + it("returns text/markdown for .md", () => expect(guessMime("notes.md")).toBe("text/markdown")); + it("returns text/plain for unknown ext", () => expect(guessMime("file.xyz")).toBe("text/plain")); }); // ── Bootstrap ───────────────────────────────────────────────────────────────── @@ -274,9 +244,8 @@ describe("readFile", () => { const { fs, client } = await makeFs({ "/memory/hello.txt": "hello" }); const content = await fs.readFile("/memory/hello.txt"); expect(content).toBe("hello"); - // Should use the summary+content SELECT, not content-only SELECT const calls = (client.query.mock.calls as [string][]); - expect(calls.some(c => (c[0] as string).includes("summary, content"))).toBe(true); + expect(calls.some(c => (c[0] as string).includes("SELECT summary FROM"))).toBe(true); }); it("throws ENOENT for missing file", async () => { @@ -290,22 +259,19 @@ describe("readFile", () => { }); }); -// ── Binary reads ────────────────────────────────────────────────────────────── +// ── Buffer reads ────────────────────────────────────────────────────────────── describe("readFileBuffer", () => { - it("roundtrips binary content exactly", async () => { - // PNG-like: has null bytes → binary - const binary = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x00, 0x01, 0x02, 0x03]); - const { fs } = await makeFs({ "/memory/img.png": binary }); - - const result = await fs.readFileBuffer("/memory/img.png"); - expect(Buffer.from(result)).toEqual(binary); + it("roundtrips text content as buffer", async () => { + const { fs } = await makeFs({ "/memory/notes.txt": "hello world" }); + const result = await fs.readFileBuffer("/memory/notes.txt"); + expect(Buffer.from(result).toString("utf-8")).toBe("hello world"); }); - it("reads via SQL SELECT content query", async () => { - const { fs, client } = await makeFs({ "/memory/data.bin": Buffer.from([1, 2, 3]) }); - await fs.readFileBuffer("/memory/data.bin"); + it("reads via SQL SELECT summary query", async () => { + const { fs, client } = await makeFs({ "/memory/data.txt": "test data" }); + await fs.readFileBuffer("/memory/data.txt"); const selectCalls = (client.query.mock.calls as [string][]).filter(c => - (c[0] as string).includes("SELECT content FROM") + (c[0] as string).includes("SELECT summary FROM") ); expect(selectCalls.length).toBeGreaterThan(0); }); @@ -349,18 +315,16 @@ describe("writeFile", () => { expect(await fs.readFile("/memory/a.txt")).toBe("new"); }); - it("stores contentText='' for binary files (INSERT has empty E'' for summary)", async () => { + it("stores text content in summary column on INSERT", async () => { const { fs, client } = await makeFs({}); - const binary = Buffer.from([0x89, 0x50, 0x00, 0x01]); // Write 10 to trigger flush for (let i = 0; i < 9; i++) await fs.writeFile(`/memory/dummy${i}.txt`, "x"); - await fs.writeFile("/memory/img.png", binary); + await fs.writeFile("/memory/notes.md", "# Hello"); const insertCalls = (client.query.mock.calls as [string][]) - .filter(c => (c[0] as string).startsWith("INSERT") && (c[0] as string).includes("img.png")); + .filter(c => (c[0] as string).startsWith("INSERT") && (c[0] as string).includes("notes.md")); expect(insertCalls.length).toBe(1); - // summary should be E'' (empty string) for binary - expect(insertCalls[0][0]).toMatch(/E'\\\\x[0-9a-f]+', E''/); + expect(insertCalls[0][0]).toContain("# Hello"); }); }); @@ -552,7 +516,7 @@ describe("prefetch", () => { // Should issue exactly one SELECT ... WHERE path IN (...) query const prefetchCalls = (client.query.mock.calls as [string][]).filter( - c => c[0].includes("SELECT path, summary, content") && c[0].includes("IN (") + c => c[0].includes("SELECT path, summary") && c[0].includes("IN (") ); expect(prefetchCalls.length).toBe(1); expect(prefetchCalls[0][0]).toContain("/memory/a.txt"); @@ -577,7 +541,7 @@ describe("prefetch", () => { // Only b.txt should be in the IN list const prefetchCalls = (client.query.mock.calls as [string][]).filter( - c => c[0].includes("SELECT path, summary, content") && c[0].includes("IN (") + c => c[0].includes("SELECT path, summary") && c[0].includes("IN (") ); expect(prefetchCalls.length).toBe(1); expect(prefetchCalls[0][0]).not.toContain("/memory/a.txt"); @@ -593,7 +557,7 @@ describe("prefetch", () => { // No query should be issued — file is in pending batch const prefetchCalls = (client.query.mock.calls as [string][]).filter( - c => c[0].includes("SELECT path, summary, content") + c => c[0].includes("SELECT path, summary") ); expect(prefetchCalls.length).toBe(0); }); @@ -606,7 +570,7 @@ describe("prefetch", () => { // Only a.txt should be queried, nonexistent is not in the tree const prefetchCalls = (client.query.mock.calls as [string][]).filter( - c => c[0].includes("SELECT path, summary, content") && c[0].includes("IN (") + c => c[0].includes("SELECT path, summary") && c[0].includes("IN (") ); expect(prefetchCalls.length).toBe(1); expect(prefetchCalls[0][0]).toContain("/memory/a.txt"); @@ -788,7 +752,7 @@ describe("virtual index.md", () => { ]); const content = await fs.readFile("/index.md"); expect(content).toContain("# Session Index"); - expect(content).toContain("| Session | Created | Last Updated | Project | Description |"); + expect(content).toContain("| Session | Conversation | Created | Last Updated | Project | Description |"); expect(content).toContain("aaa-111"); expect(content).toContain("bbb-222"); expect(content).toContain("my-project"); @@ -843,7 +807,7 @@ describe("virtual index.md", () => { const { fs } = await makeFs({}, "/"); const content = await fs.readFile("/index.md"); expect(content).toContain("# Session Index"); - expect(content).toContain("| Session | Created | Last Updated | Project | Description |"); + expect(content).toContain("| Session | Conversation | Created | Last Updated | Project | Description |"); // No data rows const lines = content.split("\n").filter(l => l.startsWith("| [")); expect(lines.length).toBe(0); @@ -873,7 +837,7 @@ describe("virtual index.md", () => { // Manually insert a legacy row (no username dir) client._rows.push({ id: "legacy", path: "/summaries/old-sess.md", filename: "old-sess.md", - content: Buffer.from("# Old"), summary: "# Old", mime_type: "text/markdown", + summary: "# Old", mime_type: "text/markdown", size_bytes: 5, project: "proj-a", description: "Legacy", creation_date: "2026-04-01", last_update_date: "2026-04-01", }); const content = await fs.readFile("/index.md"); diff --git a/claude-code/tests/real-table-test.mjs b/claude-code/tests/real-table-test.mjs index 88326d9..ef380f2 100644 --- a/claude-code/tests/real-table-test.mjs +++ b/claude-code/tests/real-table-test.mjs @@ -1,15 +1,15 @@ #!/usr/bin/env node /** - * Manual test script: exercises INSERT/UPDATE/appendFile patterns against a real Deeplake table. - * Uses a temporary test table, cleans up after itself. + * E2E test: exercises the text-only schema (no BYTEA content column) + * against a real Deeplake table. Uses a temporary test table, cleans up after itself. */ import { readFileSync } from "node:fs"; import { randomUUID } from "node:crypto"; - import { homedir } from "node:os"; import { join } from "node:path"; + const creds = JSON.parse(readFileSync(join(homedir(), ".deeplake/credentials.json"), "utf-8")); -const TABLE = "test_upsert_" + Date.now(); +const TABLE = "test_textonly_" + Date.now(); const API = creds.apiUrl + "/workspaces/" + creds.workspaceId + "/tables"; async function query(sql) { @@ -26,7 +26,6 @@ async function query(sql) { if (!r.ok) throw new Error(`API ${r.status}: ${text.slice(0, 300)}`); try { const json = JSON.parse(text); - // Convert columnar format {columns, rows} to array of objects if (json.columns && json.rows) { return json.rows.map(row => { const obj = {}; @@ -55,171 +54,186 @@ function assert(condition, name) { } try { - // ── Setup ────────────────────────────────────────────────────────────────── - console.log(`\nCreating table "${TABLE}"...`); + // ── Setup: text-only schema (no BYTEA content column) ───────────────────── + console.log(`\nCreating table "${TABLE}" (text-only schema)...`); await query( `CREATE TABLE IF NOT EXISTS "${TABLE}" (` + `id TEXT NOT NULL DEFAULT '', ` + `path TEXT NOT NULL DEFAULT '', ` + `filename TEXT NOT NULL DEFAULT '', ` + - `content BYTEA NOT NULL DEFAULT ''::bytea, ` + `summary TEXT NOT NULL DEFAULT '', ` + - `mime_type TEXT NOT NULL DEFAULT 'application/octet-stream', ` + + `author TEXT NOT NULL DEFAULT '', ` + + `mime_type TEXT NOT NULL DEFAULT 'text/plain', ` + `size_bytes BIGINT NOT NULL DEFAULT 0, ` + - `timestamp TEXT NOT NULL DEFAULT ''` + + `project TEXT NOT NULL DEFAULT '', ` + + `description TEXT NOT NULL DEFAULT '', ` + + `creation_date TEXT NOT NULL DEFAULT '', ` + + `last_update_date TEXT NOT NULL DEFAULT ''` + `) USING deeplake` ); console.log("Table created.\n"); - // ── Test 1: INSERT new row ───────────────────────────────────────────────── - console.log("Test 1: INSERT new row"); + // ── Test 1: INSERT new row (text only, no hex) ──────────────────────────── + console.log("Test 1: INSERT new row (text-only)"); const id1 = randomUUID(); const ts1 = new Date().toISOString(); - const text1 = "hello world"; - const hex1 = Buffer.from(text1).toString("hex"); + const text1 = "# Hello World\nThis is a test file."; await query( - `INSERT INTO "${TABLE}" (id, path, filename, content, summary, mime_type, size_bytes, timestamp) ` + - `VALUES ('${id1}', '/test/file1.txt', 'file1.txt', E'\\\\x${hex1}', E'${esc(text1)}', 'text/plain', ${Buffer.byteLength(text1)}, '${ts1}')` + `INSERT INTO "${TABLE}" (id, path, filename, summary, author, mime_type, size_bytes, creation_date, last_update_date) ` + + `VALUES ('${id1}', '/test/file1.md', 'file1.md', E'${esc(text1)}', 'test-user', 'text/markdown', ${Buffer.byteLength(text1)}, '${ts1}', '${ts1}')` ); - await query(`SELECT deeplake_sync_table('${TABLE}')`); - const rows1 = await query(`SELECT id, path, summary, timestamp FROM "${TABLE}" WHERE path = '/test/file1.txt'`); + await sync(); + const rows1 = await query(`SELECT id, path, summary, author, creation_date FROM "${TABLE}" WHERE path = '/test/file1.md'`); assert(rows1.length === 1, "row inserted"); - assert(rows1[0].id === id1, `id matches (${rows1[0].id})`); + assert(rows1[0].id === id1, `id matches`); assert(rows1[0].summary === text1, "summary matches"); - assert(rows1[0].timestamp === ts1, "timestamp matches"); + assert(rows1[0].author === "test-user", "author matches"); + assert(rows1[0].creation_date === ts1, "creation_date matches"); - // ── Test 2: UPDATE existing row (preserves id, updates timestamp) ────────── - console.log("\nTest 2: UPDATE existing row — id preserved, timestamp refreshed"); - await new Promise(r => setTimeout(r, 100)); // ensure different timestamp + // ── Test 2: UPDATE existing row ─────────────────────────────────────────── + console.log("\nTest 2: UPDATE existing row — id preserved, content replaced"); + await new Promise(r => setTimeout(r, 100)); const ts2 = new Date().toISOString(); - const text2 = "updated content"; - const hex2 = Buffer.from(text2).toString("hex"); + const text2 = "# Updated\nNew content here."; await query( - `UPDATE "${TABLE}" SET content = E'\\\\x${hex2}', summary = E'${esc(text2)}', ` + - `mime_type = 'text/plain', size_bytes = ${Buffer.byteLength(text2)}, timestamp = '${ts2}' ` + - `WHERE path = '/test/file1.txt'` + `UPDATE "${TABLE}" SET summary = E'${esc(text2)}', ` + + `size_bytes = ${Buffer.byteLength(text2)}, last_update_date = '${ts2}' ` + + `WHERE path = '/test/file1.md'` ); - await query(`SELECT deeplake_sync_table('${TABLE}')`); - const rows2 = await query(`SELECT id, summary, timestamp FROM "${TABLE}" WHERE path = '/test/file1.txt'`); + await sync(); + const rows2 = await query(`SELECT id, summary, last_update_date FROM "${TABLE}" WHERE path = '/test/file1.md'`); assert(rows2.length === 1, "still one row"); - assert(rows2[0].id === id1, `id preserved after UPDATE (${rows2[0].id})`); + assert(rows2[0].id === id1, `id preserved after UPDATE`); assert(rows2[0].summary === text2, "summary updated"); - assert(rows2[0].timestamp === ts2, `timestamp updated (${rows2[0].timestamp})`); + assert(rows2[0].last_update_date === ts2, "last_update_date refreshed"); - // ── Test 3: appendFile UPDATE (concat content, update timestamp) ─────────── - console.log("\nTest 3: appendFile UPDATE — concat content, timestamp refreshed"); + // ── Test 3: appendFile — SQL-level text concat ──────────────────────────── + console.log("\nTest 3: appendFile — SQL text concat (no hex)"); await new Promise(r => setTimeout(r, 100)); const ts3 = new Date().toISOString(); - const append = "\nappended line"; - const appendHex = Buffer.from(append).toString("hex"); + const append = "\n## Appended Section\nExtra content."; await query( `UPDATE "${TABLE}" SET ` + `summary = summary || E'${esc(append)}', ` + - `content = content || E'\\\\x${appendHex}', ` + `size_bytes = size_bytes + ${Buffer.byteLength(append)}, ` + - `timestamp = '${ts3}' ` + - `WHERE path = '/test/file1.txt'` + `last_update_date = '${ts3}' ` + + `WHERE path = '/test/file1.md'` ); - await query(`SELECT deeplake_sync_table('${TABLE}')`); - const rows3 = await query(`SELECT id, summary, size_bytes, timestamp FROM "${TABLE}" WHERE path = '/test/file1.txt'`); + await sync(); + const rows3 = await query(`SELECT id, summary, size_bytes FROM "${TABLE}" WHERE path = '/test/file1.md'`); assert(rows3.length === 1, "still one row"); - assert(rows3[0].id === id1, `id preserved after append (${rows3[0].id})`); - assert(rows3[0].summary === text2 + append, `summary concatenated`); - assert(rows3[0].timestamp === ts3, `timestamp updated (${rows3[0].timestamp})`); - - // ── Test 4: SELECT check before upsert (path exists) ────────────────────── - console.log("\nTest 4: SELECT-based existence check for upsert"); - const exists = await query(`SELECT path FROM "${TABLE}" WHERE path = '/test/file1.txt' LIMIT 1`); - assert(exists.length > 0, "existing path found"); - const notExists = await query(`SELECT path FROM "${TABLE}" WHERE path = '/test/nonexistent.txt' LIMIT 1`); - assert(notExists.length === 0, "missing path returns empty"); - - // ── Test 5: Full upsert flow — check then UPDATE ────────────────────────── - console.log("\nTest 5: Full upsert — SELECT then UPDATE for existing path"); + assert(rows3[0].id === id1, `id preserved after append`); + assert(rows3[0].summary === text2 + append, "summary concatenated correctly"); + assert(rows3[0].size_bytes === Buffer.byteLength(text2 + append), "size_bytes updated"); + + // ── Test 4: Upsert flow — SELECT then UPDATE ───────────────────────────── + console.log("\nTest 4: Upsert — SELECT then UPDATE for existing path"); await new Promise(r => setTimeout(r, 100)); + const ts4 = new Date().toISOString(); + const text4 = "upsert-overwrite"; + const check4 = await query(`SELECT path FROM "${TABLE}" WHERE path = '/test/file1.md' LIMIT 1`); + assert(check4.length > 0, "existing path found"); + await query( + `UPDATE "${TABLE}" SET summary = E'${esc(text4)}', ` + + `size_bytes = ${Buffer.byteLength(text4)}, last_update_date = '${ts4}' ` + + `WHERE path = '/test/file1.md'` + ); + await sync(); + const rows4 = await query(`SELECT id, summary FROM "${TABLE}" WHERE path = '/test/file1.md'`); + assert(rows4[0].id === id1, "id preserved through upsert"); + assert(rows4[0].summary === text4, "content replaced via upsert"); + + // ── Test 5: Upsert flow — SELECT then INSERT for new path ──────────────── + console.log("\nTest 5: Upsert — INSERT for new path"); + const id5 = randomUUID(); const ts5 = new Date().toISOString(); - const text5 = "upsert-updated"; - const hex5 = Buffer.from(text5).toString("hex"); - const check5 = await query(`SELECT path FROM "${TABLE}" WHERE path = '/test/file1.txt' LIMIT 1`); - if (check5.length > 0) { - await query( - `UPDATE "${TABLE}" SET content = E'\\\\x${hex5}', summary = E'${esc(text5)}', ` + - `mime_type = 'text/plain', size_bytes = ${Buffer.byteLength(text5)}, timestamp = '${ts5}' ` + - `WHERE path = '/test/file1.txt'` - ); - } - await query(`SELECT deeplake_sync_table('${TABLE}')`); - const rows5 = await query(`SELECT id, summary, timestamp FROM "${TABLE}" WHERE path = '/test/file1.txt'`); - assert(rows5[0].id === id1, `id still preserved through upsert (${rows5[0].id})`); - assert(rows5[0].summary === text5, "content replaced via upsert"); - assert(rows5[0].timestamp === ts5, "timestamp refreshed via upsert"); - - // ── Test 6: Full upsert flow — SELECT then INSERT for new path ───────────── - console.log("\nTest 6: Full upsert — SELECT then INSERT for new path"); - const id6 = randomUUID(); - const ts6 = new Date().toISOString(); - const text6 = "brand new file"; - const hex6 = Buffer.from(text6).toString("hex"); - const check6 = await query(`SELECT path FROM "${TABLE}" WHERE path = '/test/file2.txt' LIMIT 1`); - if (check6.length === 0) { - await query( - `INSERT INTO "${TABLE}" (id, path, filename, content, summary, mime_type, size_bytes, timestamp) ` + - `VALUES ('${id6}', '/test/file2.txt', 'file2.txt', E'\\\\x${hex6}', E'${esc(text6)}', 'text/plain', ${Buffer.byteLength(text6)}, '${ts6}')` - ); - } - await query(`SELECT deeplake_sync_table('${TABLE}')`); - const rows6 = await query(`SELECT id, summary, timestamp FROM "${TABLE}" WHERE path = '/test/file2.txt'`); - assert(rows6.length === 1, "new row inserted"); - assert(rows6[0].id === id6, `new id assigned (${rows6[0].id})`); - assert(rows6[0].summary === text6, "content correct"); - - // ── Test 7: Multiple updates preserve same id ────────────────────────────── - console.log("\nTest 7: Multiple sequential updates preserve same id"); + const text5 = "# Second File\nBrand new."; + const check5 = await query(`SELECT path FROM "${TABLE}" WHERE path = '/test/file2.md' LIMIT 1`); + assert(check5.length === 0, "path does not exist yet"); + await query( + `INSERT INTO "${TABLE}" (id, path, filename, summary, author, mime_type, size_bytes, project, description, creation_date, last_update_date) ` + + `VALUES ('${id5}', '/test/file2.md', 'file2.md', E'${esc(text5)}', 'alice', 'text/markdown', ${Buffer.byteLength(text5)}, 'my-project', 'test file', '${ts5}', '${ts5}')` + ); + await sync(); + const rows5 = await query(`SELECT id, summary, author, project, description FROM "${TABLE}" WHERE path = '/test/file2.md'`); + assert(rows5.length === 1, "new row inserted"); + assert(rows5[0].id === id5, "correct id"); + assert(rows5[0].summary === text5, "content correct"); + assert(rows5[0].author === "alice", "author set"); + assert(rows5[0].project === "my-project", "project set"); + assert(rows5[0].description === "test file", "description set"); + + // ── Test 6: Multiple updates preserve id ────────────────────────────────── + console.log("\nTest 6: Multiple sequential updates preserve id"); for (let i = 0; i < 3; i++) { const ts = new Date().toISOString(); - const txt = `update-${i}`; - const hx = Buffer.from(txt).toString("hex"); + const txt = `revision-${i}`; await query( - `UPDATE "${TABLE}" SET content = E'\\\\x${hx}', summary = E'${esc(txt)}', ` + - `size_bytes = ${Buffer.byteLength(txt)}, timestamp = '${ts}' ` + - `WHERE path = '/test/file1.txt'` + `UPDATE "${TABLE}" SET summary = E'${esc(txt)}', ` + + `size_bytes = ${Buffer.byteLength(txt)}, last_update_date = '${ts}' ` + + `WHERE path = '/test/file1.md'` ); } - await query(`SELECT deeplake_sync_table('${TABLE}')`); - const rows7 = await query(`SELECT id, summary FROM "${TABLE}" WHERE path = '/test/file1.txt'`); - assert(rows7[0].id === id1, `id still original after 3 updates (${rows7[0].id})`); - assert(rows7[0].summary === "update-2", "content is from last update"); - - // ── Test 8: DELETE then re-INSERT gets new id ────────────────────────────── - console.log("\nTest 8: After DELETE, re-INSERT gets a new id"); - await query(`DELETE FROM "${TABLE}" WHERE path = '/test/file2.txt'`); - await query(`SELECT deeplake_sync_table('${TABLE}')`); - const id8 = randomUUID(); - const ts8 = new Date().toISOString(); + await sync(); + const rows6 = await query(`SELECT id, summary FROM "${TABLE}" WHERE path = '/test/file1.md'`); + assert(rows6[0].id === id1, "id still original after 3 updates"); + assert(rows6[0].summary === "revision-2", "content is from last update"); + + // ── Test 7: DELETE then re-INSERT gets new id ───────────────────────────── + console.log("\nTest 7: DELETE + re-INSERT gets new id"); + await query(`DELETE FROM "${TABLE}" WHERE path = '/test/file2.md'`); + await sync(); + const id7 = randomUUID(); + const ts7 = new Date().toISOString(); await query( - `INSERT INTO "${TABLE}" (id, path, filename, content, summary, mime_type, size_bytes, timestamp) ` + - `VALUES ('${id8}', '/test/file2.txt', 'file2.txt', E'\\\\x${hex6}', E'${esc(text6)}', 'text/plain', ${Buffer.byteLength(text6)}, '${ts8}')` + `INSERT INTO "${TABLE}" (id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date) ` + + `VALUES ('${id7}', '/test/file2.md', 'file2.md', E'${esc(text5)}', 'text/markdown', ${Buffer.byteLength(text5)}, '${ts7}', '${ts7}')` ); - await query(`SELECT deeplake_sync_table('${TABLE}')`); - const rows8 = await query(`SELECT id FROM "${TABLE}" WHERE path = '/test/file2.txt'`); - assert(rows8[0].id === id8, `new id after delete+insert (${rows8[0].id} !== old ${id6})`); + await sync(); + const rows7 = await query(`SELECT id FROM "${TABLE}" WHERE path = '/test/file2.md'`); + assert(rows7[0].id === id7, `new id after delete+insert (got ${rows7[0].id})`); - // ── Test 9: UPDATE on non-existent path is a no-op ───────────────────────── - console.log("\nTest 9: UPDATE on non-existent path is a no-op"); + // ── Test 8: UPDATE on non-existent path is a no-op ──────────────────────── + console.log("\nTest 8: UPDATE on non-existent path is a no-op"); await query( - `UPDATE "${TABLE}" SET summary = E'ghost', timestamp = '${new Date().toISOString()}' ` + - `WHERE path = '/test/does-not-exist.txt'` + `UPDATE "${TABLE}" SET summary = E'ghost', last_update_date = '${new Date().toISOString()}' ` + + `WHERE path = '/test/does-not-exist.md'` ); - await query(`SELECT deeplake_sync_table('${TABLE}')`); - const rows9 = await query(`SELECT path FROM "${TABLE}" WHERE path = '/test/does-not-exist.txt'`); - assert(rows9.length === 0, "no row created by UPDATE on missing path"); - - // ── Summary ──────────────────────────────────────────────────────────────── + await sync(); + const rows8 = await query(`SELECT path FROM "${TABLE}" WHERE path = '/test/does-not-exist.md'`); + assert(rows8.length === 0, "no row created by UPDATE on missing path"); + + // ── Test 9: Special characters in content ───────────────────────────────── + console.log("\nTest 9: Special characters — quotes, backslashes, unicode"); + const id9 = randomUUID(); + const text9 = "It's a \"test\" with \\backslashes\\ and émojis 🎉"; + await query( + `INSERT INTO "${TABLE}" (id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date) ` + + `VALUES ('${id9}', '/test/special.md', 'special.md', E'${esc(text9)}', 'text/markdown', ${Buffer.byteLength(text9)}, '${new Date().toISOString()}', '${new Date().toISOString()}')` + ); + await sync(); + const rows9 = await query(`SELECT summary FROM "${TABLE}" WHERE path = '/test/special.md'`); + assert(rows9.length === 1, "row with special chars inserted"); + assert(rows9[0].summary === text9, `special chars roundtripped: ${rows9[0].summary}`); + + // ── Test 10: No BYTEA column in schema ──────────────────────────────────── + console.log("\nTest 10: Schema has no BYTEA content column"); + const cols = await query( + `SELECT column_name, data_type FROM information_schema.columns WHERE table_name = '${TABLE}'` + ); + const colNames = cols.map(c => c.column_name); + assert(!colNames.includes("content"), "no 'content' column in schema"); + assert(colNames.includes("summary"), "has 'summary' column"); + assert(colNames.includes("author"), "has 'author' column"); + const summaryCol = cols.find(c => c.column_name === "summary"); + assert(summaryCol?.data_type === "text", `summary is TEXT (got ${summaryCol?.data_type})`); + + // ── Summary ─────────────────────────────────────────────────────────────── console.log(`\n${"=".repeat(50)}`); console.log(`Results: ${passed} passed, ${failed} failed`); } finally { - // ── Cleanup ──────────────────────────────────────────────────────────────── + // ── Cleanup ─────────────────────────────────────────────────────────────── console.log(`\nDropping table "${TABLE}"...`); try { await query(`DROP TABLE "${TABLE}"`); diff --git a/claude-code/tests/session-summary.test.ts b/claude-code/tests/session-summary.test.ts index 0f81399..09f123a 100644 --- a/claude-code/tests/session-summary.test.ts +++ b/claude-code/tests/session-summary.test.ts @@ -1,9 +1,9 @@ import { describe, it, expect, beforeEach, vi } from "vitest"; -import { DeeplakeFs, isText, guessMime } from "../../src/shell/deeplake-fs.js"; +import { DeeplakeFs, guessMime } from "../../src/shell/deeplake-fs.js"; // ── Mock client (same pattern as deeplake-fs.test.ts) ──────────────────────── type Row = { - id: string; path: string; filename: string; content: Buffer; + id: string; path: string; filename: string; summary: string; mime_type: string; size_bytes: number; project: string; description: string; creation_date: string; last_update_date: string; }; @@ -13,8 +13,7 @@ function makeClient(seed: Record = {}) { id: `seed-${path}`, path, filename: path.split("/").pop()!, - content, - summary: isText(content) ? content.toString("utf-8") : "", + summary: content.toString("utf-8"), mime_type: guessMime(path.split("/").pop()!), size_bytes: content.length, project: "", @@ -29,15 +28,10 @@ function makeClient(seed: Record = {}) { if (sql.includes("SELECT path, size_bytes, mime_type")) { return rows.map(r => ({ path: r.path, size_bytes: r.size_bytes, mime_type: r.mime_type })); } - if (sql.includes("SELECT content FROM")) { + if (sql.includes("SELECT summary FROM")) { const match = sql.match(/path = '([^']+)'/); const row = match ? rows.find(r => r.path === match[1]) : undefined; - return row ? [{ content: `\\x${row.content.toString("hex")}` }] : []; - } - if (sql.includes("SELECT summary, content")) { - const match = sql.match(/path = '([^']+)'/); - const row = match ? rows.find(r => r.path === match[1]) : undefined; - return row ? [{ summary: row.summary, content: `\\x${row.content.toString("hex")}` }] : []; + return row ? [{ summary: row.summary }] : []; } if (sql.includes("SELECT path, project, description, creation_date, last_update_date")) { return rows @@ -71,18 +65,18 @@ function makeClient(seed: Record = {}) { const cdMatch = sql.match(/creation_date = '([^']+)'/); if (cdMatch) row.creation_date = cdMatch[1]; if (sql.includes("summary = summary ||")) { - const hexMatch = sql.match(/content \|\| E'\\\\x([0-9a-f]*)'/); - if (hexMatch) { - const appendBuf = Buffer.from(hexMatch[1], "hex"); - row.content = Buffer.concat([row.content, appendBuf]); - row.summary += appendBuf.toString("utf-8"); - row.size_bytes = row.content.length; + const appendMatch = sql.match(/summary \|\| E'((?:[^']|'')*)'/); + if (appendMatch) { + const appendText = appendMatch[1].replace(/''/g, "'"); + row.summary += appendText; + row.size_bytes = Buffer.byteLength(row.summary, "utf-8"); } } else { - const hexMatch = sql.match(/content = E'\\\\x([0-9a-f]*)'/); const textMatch = sql.match(/summary = E'((?:[^']|'')*)'/); - if (hexMatch) { row.content = Buffer.from(hexMatch[1], "hex"); row.size_bytes = row.content.length; } - if (textMatch) { row.summary = textMatch[1].replace(/''/g, "'"); } + if (textMatch) { + row.summary = textMatch[1].replace(/''/g, "'"); + row.size_bytes = Buffer.byteLength(row.summary, "utf-8"); + } } const projMatch = sql.match(/project = '([^']*)'/); if (projMatch) row.project = projMatch[1]; @@ -96,12 +90,8 @@ function makeClient(seed: Record = {}) { const valuesMatch = sql.match(/VALUES \((.+)\)$/s); if (valuesMatch) { const pathMatch = sql.match(/VALUES \('[^']+', '([^']+)'/); - const hexMatch = sql.match(/E'\\\\x([0-9a-f]*)'/); - const textMatch = sql.match(/E'\\\\x[0-9a-f]*', E'((?:[^']|'')*)'/); if (pathMatch) { const path = pathMatch[1]; - const content = hexMatch ? Buffer.from(hexMatch[1], "hex") : Buffer.alloc(0); - const summary = textMatch?.[1]?.replace(/''/g, "'") ?? ""; const colsPart = sql.match(/\(([^)]+)\)\s+VALUES/)?.[1] ?? ""; const colsList = colsPart.split(",").map(c => c.trim()); const valsStr = valuesMatch[1]; @@ -125,11 +115,12 @@ function makeClient(seed: Record = {}) { } const colMap: Record = {}; for (let c = 0; c < colsList.length; c++) colMap[colsList[c]] = allVals[c] ?? ""; + const summary = colMap["summary"] ?? ""; const idx = rows.findIndex(r => r.path === path); if (idx >= 0) rows.splice(idx, 1); rows.push({ id: colMap["id"] ?? "", path, filename: path.split("/").pop()!, - content, summary, mime_type: "text/plain", size_bytes: content.length, + summary, mime_type: "text/plain", size_bytes: Buffer.byteLength(summary, "utf-8"), project: colMap["project"] ?? "", description: colMap["description"] ?? "", creation_date: colMap["creation_date"] ?? "", last_update_date: colMap["last_update_date"] ?? "", }); diff --git a/claude-code/tests/sessions-table.test.ts b/claude-code/tests/sessions-table.test.ts index ac35a11..81f3bb5 100644 --- a/claude-code/tests/sessions-table.test.ts +++ b/claude-code/tests/sessions-table.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect, vi } from "vitest"; -import { DeeplakeFs, isText, guessMime } from "../../src/shell/deeplake-fs.js"; +import { DeeplakeFs, guessMime } from "../../src/shell/deeplake-fs.js"; // ── Mock client that simulates both memory and sessions tables ────────────── @@ -48,20 +48,11 @@ function makeClient(memoryRows: Row[] = [], sessionRows: Row[] = []) { } // Read from memory table - if (sql.includes("SELECT summary, content") && !isSessionsQuery) { + if (sql.includes("SELECT summary FROM") && !isSessionsQuery) { const pathMatch = sql.match(/path = '([^']+)'/); if (pathMatch) { const row = memoryRows.find(r => r.path === pathMatch[1]); - return row ? [{ summary: row.text_content, content: "" }] : []; - } - } - - // SELECT content (binary) from memory - if (sql.includes("SELECT content FROM") && !isSessionsQuery) { - const pathMatch = sql.match(/path = '([^']+)'/); - if (pathMatch) { - const row = memoryRows.find(r => r.path === pathMatch[1]); - return row ? [{ content: `\\x${Buffer.from(row.text_content).toString("hex")}` }] : []; + return row ? [{ summary: row.text_content }] : []; } } @@ -204,6 +195,78 @@ describe("DeeplakeFs — multiple sessions in same table", () => { }); }); +describe("session files are read-only", () => { + async function makeFsWithSession() { + const sessionRows: Row[] = [ + { path: "/sessions/alice/alice_org_default_s1.jsonl", text_content: '{"type":"user_message"}', size_bytes: 22, mime_type: "application/json", creation_date: "2026-01-01T00:00:01Z" }, + ]; + const memoryRows: Row[] = [ + { path: "/notes.md", text_content: "hello", size_bytes: 5, mime_type: "text/markdown", creation_date: "2026-01-01" }, + ]; + const client = makeClient(memoryRows, sessionRows); + const fs = await DeeplakeFs.create(client as never, "memory", "/", "sessions"); + return { fs, client }; + } + + it("writeFile rejects session paths with EPERM", async () => { + const { fs } = await makeFsWithSession(); + await expect(fs.writeFile("/sessions/alice/alice_org_default_s1.jsonl", "overwrite")) + .rejects.toMatchObject({ code: "EPERM" }); + }); + + it("appendFile rejects session paths with EPERM", async () => { + const { fs } = await makeFsWithSession(); + await expect(fs.appendFile("/sessions/alice/alice_org_default_s1.jsonl", "append")) + .rejects.toMatchObject({ code: "EPERM" }); + }); + + it("rm rejects session paths with EPERM", async () => { + const { fs } = await makeFsWithSession(); + await expect(fs.rm("/sessions/alice/alice_org_default_s1.jsonl")) + .rejects.toMatchObject({ code: "EPERM" }); + }); + + it("cp rejects session path as destination with EPERM", async () => { + const { fs } = await makeFsWithSession(); + await expect(fs.cp("/notes.md", "/sessions/alice/alice_org_default_s1.jsonl")) + .rejects.toMatchObject({ code: "EPERM" }); + }); + + it("mv rejects session path as source with EPERM", async () => { + const { fs } = await makeFsWithSession(); + await expect(fs.mv("/sessions/alice/alice_org_default_s1.jsonl", "/moved.jsonl")) + .rejects.toMatchObject({ code: "EPERM" }); + }); + + it("mv rejects session path as destination with EPERM", async () => { + const { fs } = await makeFsWithSession(); + await expect(fs.mv("/notes.md", "/sessions/alice/alice_org_default_s1.jsonl")) + .rejects.toMatchObject({ code: "EPERM" }); + }); + + it("readFile still works on session paths", async () => { + const { fs } = await makeFsWithSession(); + const content = await fs.readFile("/sessions/alice/alice_org_default_s1.jsonl"); + expect(content).toContain("user_message"); + }); + + it("cp from session path as source works (read-only source is fine)", async () => { + const { fs } = await makeFsWithSession(); + await fs.cp("/sessions/alice/alice_org_default_s1.jsonl", "/copy.jsonl"); + const content = await fs.readFile("/copy.jsonl"); + expect(content).toContain("user_message"); + }); + + it("rm -rf on parent dir skips session files", async () => { + const { fs } = await makeFsWithSession(); + // rm -rf /sessions should not remove session files from the tree + await fs.rm("/sessions", { recursive: true, force: true }); + // Session file should still be readable + const content = await fs.readFile("/sessions/alice/alice_org_default_s1.jsonl"); + expect(content).toContain("user_message"); + }); +}); + describe("ensureSessionsTable schema", () => { it("creates table with JSONB message column", async () => { const client = { @@ -257,6 +320,7 @@ describe("ensureSessionsTable schema", () => { expect(createSql).toContain("summary TEXT"); expect(createSql).toContain("author TEXT"); expect(createSql).not.toContain("content_text"); + expect(createSql).not.toContain("BYTEA"); }); it("memory table migration adds author column", async () => { diff --git a/package-lock.json b/package-lock.json index edd81ae..1e88ebc 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "deeplake-claude-code-plugins", - "version": "0.6.6", + "version": "0.6.7", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "deeplake-claude-code-plugins", - "version": "0.6.6", + "version": "0.6.7", "dependencies": { "deeplake": "^0.3.30", "just-bash": "^2.14.0", diff --git a/package.json b/package.json index 2a06bf5..bd502a0 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "hivemind", - "version": "0.6.6", + "version": "0.6.7", "description": "Cloud-backed persistent shared memory for AI agents powered by Deeplake", "type": "module", "bin": { diff --git a/src/deeplake-api.ts b/src/deeplake-api.ts index e658cbf..24c12f0 100644 --- a/src/deeplake-api.ts +++ b/src/deeplake-api.ts @@ -37,7 +37,6 @@ class Semaphore { export interface WriteRow { path: string; filename: string; - content: Buffer; contentText: string; mimeType: string; sizeBytes: number; @@ -135,7 +134,6 @@ export class DeeplakeApi { } private async upsertRowSql(row: WriteRow): Promise { - const hex = row.content.toString("hex"); const ts = new Date().toISOString(); const cd = row.creationDate ?? ts; const lud = row.lastUpdateDate ?? ts; @@ -143,7 +141,7 @@ export class DeeplakeApi { `SELECT path FROM "${this.tableName}" WHERE path = '${sqlStr(row.path)}' LIMIT 1` ); if (exists.length > 0) { - let setClauses = `content = E'\\\\x${hex}', summary = E'${sqlStr(row.contentText)}', ` + + let setClauses = `summary = E'${sqlStr(row.contentText)}', ` + `mime_type = '${sqlStr(row.mimeType)}', size_bytes = ${row.sizeBytes}, last_update_date = '${lud}'`; if (row.project !== undefined) setClauses += `, project = '${sqlStr(row.project)}'`; if (row.description !== undefined) setClauses += `, description = '${sqlStr(row.description)}'`; @@ -152,8 +150,8 @@ export class DeeplakeApi { ); } else { const id = randomUUID(); - let cols = "id, path, filename, content, summary, mime_type, size_bytes, creation_date, last_update_date"; - let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'\\\\x${hex}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`; + let cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date"; + let vals = `'${id}', '${sqlStr(row.path)}', '${sqlStr(row.filename)}', E'${sqlStr(row.contentText)}', '${sqlStr(row.mimeType)}', ${row.sizeBytes}, '${cd}', '${lud}'`; if (row.project !== undefined) { cols += ", project"; vals += `, '${sqlStr(row.project)}'`; } if (row.description !== undefined) { cols += ", description"; vals += `, '${sqlStr(row.description)}'`; } await this.query( @@ -220,10 +218,9 @@ export class DeeplakeApi { `id TEXT NOT NULL DEFAULT '', ` + `path TEXT NOT NULL DEFAULT '', ` + `filename TEXT NOT NULL DEFAULT '', ` + - `content BYTEA NOT NULL DEFAULT ''::bytea, ` + `summary TEXT NOT NULL DEFAULT '', ` + `author TEXT NOT NULL DEFAULT '', ` + - `mime_type TEXT NOT NULL DEFAULT 'application/octet-stream', ` + + `mime_type TEXT NOT NULL DEFAULT 'text/plain', ` + `size_bytes BIGINT NOT NULL DEFAULT 0, ` + `project TEXT NOT NULL DEFAULT '', ` + `description TEXT NOT NULL DEFAULT '', ` + diff --git a/src/hooks/session-start.ts b/src/hooks/session-start.ts index 11f543a..e6d76f7 100644 --- a/src/hooks/session-start.ts +++ b/src/hooks/session-start.ts @@ -117,12 +117,11 @@ async function createPlaceholder(api: DeeplakeApi, table: string, sessionId: str `- **Status**: in-progress`, "", ].join("\n"); - const hex = Buffer.from(content, "utf-8").toString("hex"); const filename = `${sessionId}.md`; await api.query( - `INSERT INTO "${table}" (id, path, filename, content, summary, author, mime_type, size_bytes, project, description, creation_date, last_update_date) ` + - `VALUES ('${crypto.randomUUID()}', '${sqlStr(summaryPath)}', '${sqlStr(filename)}', E'\\\\x${hex}', E'${sqlStr(content)}', '${sqlStr(userName)}', 'text/markdown', ` + + `INSERT INTO "${table}" (id, path, filename, summary, author, mime_type, size_bytes, project, description, creation_date, last_update_date) ` + + `VALUES ('${crypto.randomUUID()}', '${sqlStr(summaryPath)}', '${sqlStr(filename)}', E'${sqlStr(content)}', '${sqlStr(userName)}', 'text/markdown', ` + `${Buffer.byteLength(content, "utf-8")}, '${sqlStr(projectName)}', 'in progress', '${now}', '${now}')` ); diff --git a/src/hooks/wiki-worker.ts b/src/hooks/wiki-worker.ts index 4014402..9fbecb6 100644 --- a/src/hooks/wiki-worker.ts +++ b/src/hooks/wiki-worker.ts @@ -161,7 +161,6 @@ async function main(): Promise { if (existsSync(tmpSummary)) { const text = readFileSync(tmpSummary, "utf-8"); if (text.trim()) { - const hex = Buffer.from(text, "utf-8").toString("hex"); const fname = `${cfg.sessionId}.md`; const vpath = `/summaries/${cfg.userName}/${fname}`; const ts = new Date().toISOString(); @@ -174,15 +173,15 @@ async function main(): Promise { if (existing.length > 0) { await query( `UPDATE "${cfg.memoryTable}" SET ` + - `content = E'\\\\x${hex}', summary = E'${esc(text)}', ` + + `summary = E'${esc(text)}', ` + `size_bytes = ${Buffer.byteLength(text)}, last_update_date = '${ts}' ` + `WHERE path = '${esc(vpath)}'` ); } else { const id = crypto.randomUUID(); await query( - `INSERT INTO "${cfg.memoryTable}" (id, path, filename, content, summary, author, mime_type, size_bytes, project, creation_date, last_update_date) ` + - `VALUES ('${id}', '${esc(vpath)}', '${esc(fname)}', E'\\\\x${hex}', E'${esc(text)}', '${esc(cfg.userName)}', 'text/markdown', ` + + `INSERT INTO "${cfg.memoryTable}" (id, path, filename, summary, author, mime_type, size_bytes, project, creation_date, last_update_date) ` + + `VALUES ('${id}', '${esc(vpath)}', '${esc(fname)}', E'${esc(text)}', '${esc(cfg.userName)}', 'text/markdown', ` + `${Buffer.byteLength(text)}, '${esc(cfg.project)}', '${ts}', '${ts}')` ); } diff --git a/src/shell/deeplake-fs.ts b/src/shell/deeplake-fs.ts index 4191fa9..8b97232 100644 --- a/src/shell/deeplake-fs.ts +++ b/src/shell/deeplake-fs.ts @@ -13,7 +13,6 @@ interface DirentEntry { name: string; isFile: boolean; isDirectory: boolean; isS // ── constants ───────────────────────────────────────────────────────────────── const BATCH_SIZE = 10; const FLUSH_DEBOUNCE_MS = 200; -const TEXT_DETECT_BYTES = 4096; // ── helpers ─────────────────────────────────────────────────────────────────── export function normPath(p: string): string { @@ -28,22 +27,14 @@ function parentOf(p: string): string { import { sqlStr as esc } from "../utils/sql.js"; -export function isText(buf: Buffer): boolean { - const end = Math.min(buf.length, TEXT_DETECT_BYTES); - for (let i = 0; i < end; i++) if (buf[i] === 0) return false; - return true; -} - export function guessMime(filename: string): string { const ext = filename.split(".").pop()?.toLowerCase() ?? ""; return ( ({ json: "application/json", md: "text/markdown", txt: "text/plain", js: "text/javascript", ts: "text/typescript", html: "text/html", - css: "text/css", png: "image/png", jpg: "image/jpeg", jpeg: "image/jpeg", - pdf: "application/pdf", svg: "image/svg+xml", gz: "application/gzip", - zip: "application/zip", - } as Record)[ext] ?? "application/octet-stream" + css: "text/css", + } as Record)[ext] ?? "text/plain" ); } @@ -51,23 +42,11 @@ function fsErr(code: string, msg: string, path: string): Error { return Object.assign(new Error(`${code}: ${msg}, '${path}'`), { code }); } -// Decode content returned from SQL: PostgreSQL hex-encodes BYTEA as '\x...' -function decodeContent(raw: unknown): Buffer { - if (raw instanceof Uint8Array) return Buffer.from(raw); - if (Buffer.isBuffer(raw)) return raw; - if (typeof raw === "string") { - return raw.startsWith("\\x") - ? Buffer.from(raw.slice(2), "hex") - : Buffer.from(raw, "base64"); - } - throw new Error(`Unexpected content type: ${typeof raw}`); -} - // ── types ───────────────────────────────────────────────────────────────────── interface FileMeta { size: number; mime: string; mtime: Date; } interface PendingRow { - path: string; filename: string; content: Buffer; + path: string; filename: string; contentText: string; mimeType: string; sizeBytes: number; project?: string; description?: string; creationDate?: string; lastUpdateDate?: string; @@ -239,7 +218,6 @@ export class DeeplakeFs implements IFileSystem { } private async upsertRow(r: PendingRow): Promise { - const hex = r.content.toString("hex"); const text = esc(r.contentText); const p = esc(r.path); const fname = esc(r.filename); @@ -248,7 +226,7 @@ export class DeeplakeFs implements IFileSystem { const cd = r.creationDate ?? ts; const lud = r.lastUpdateDate ?? ts; if (this.flushed.has(r.path)) { - let setClauses = `filename = '${fname}', content = E'\\\\x${hex}', summary = E'${text}', ` + + let setClauses = `filename = '${fname}', summary = E'${text}', ` + `mime_type = '${mime}', size_bytes = ${r.sizeBytes}, last_update_date = '${esc(lud)}'`; if (r.project !== undefined) setClauses += `, project = '${esc(r.project)}'`; if (r.description !== undefined) setClauses += `, description = '${esc(r.description)}'`; @@ -257,10 +235,10 @@ export class DeeplakeFs implements IFileSystem { ); } else { const id = randomUUID(); - const cols = "id, path, filename, content, summary, mime_type, size_bytes, creation_date, last_update_date" + + const cols = "id, path, filename, summary, mime_type, size_bytes, creation_date, last_update_date" + (r.project !== undefined ? ", project" : "") + (r.description !== undefined ? ", description" : ""); - const vals = `'${id}', '${p}', '${fname}', E'\\\\x${hex}', E'${text}', '${mime}', ${r.sizeBytes}, '${esc(cd)}', '${esc(lud)}'` + + const vals = `'${id}', '${p}', '${fname}', E'${text}', '${mime}', ${r.sizeBytes}, '${esc(cd)}', '${esc(lud)}'` + (r.project !== undefined ? `, '${esc(r.project)}'` : "") + (r.description !== undefined ? `, '${esc(r.description)}'` : ""); await this.client.query( @@ -277,13 +255,22 @@ export class DeeplakeFs implements IFileSystem { `SELECT path, project, description, creation_date, last_update_date FROM "${this.table}" ` + `WHERE path LIKE '${esc("/summaries/")}%' ORDER BY last_update_date DESC` ); + + // Build a lookup: sessionId → JSONL path from sessionPaths + const sessionPathsByUser = new Map(); + for (const sp of this.sessionPaths) { + // Session path format: /sessions//___.jsonl + const m = sp.match(/\/sessions\/[^/]+\/[^/]+_([^.]+)\.jsonl$/); + if (m) sessionPathsByUser.set(m[1], sp.slice(1)); // strip leading / + } + const lines: string[] = [ "# Session Index", "", "List of all Claude Code sessions with summaries.", "", - "| Session | Created | Last Updated | Project | Description |", - "|---------|---------|--------------|---------|-------------|", + "| Session | Conversation | Created | Last Updated | Project | Description |", + "|---------|-------------|---------|--------------|---------|-------------|", ]; for (const row of rows) { const p = row["path"] as string; @@ -293,11 +280,13 @@ export class DeeplakeFs implements IFileSystem { const summaryUser = match[1]; const sessionId = match[2]; const relPath = `summaries/${summaryUser}/${sessionId}.md`; + const convPath = sessionPathsByUser.get(sessionId); + const convLink = convPath ? `[messages](${convPath})` : ""; const project = (row["project"] as string) || ""; const description = (row["description"] as string) || ""; const creationDate = (row["creation_date"] as string) || ""; const lastUpdateDate = (row["last_update_date"] as string) || ""; - lines.push(`| [${sessionId}](${relPath}) | ${creationDate} | ${lastUpdateDate} | ${project} | ${description} |`); + lines.push(`| [${sessionId}](${relPath}) | ${convLink} | ${creationDate} | ${lastUpdateDate} | ${project} | ${description} |`); } lines.push(""); return lines.join("\n"); @@ -324,16 +313,12 @@ export class DeeplakeFs implements IFileSystem { const inList = uncached.map(p => `'${esc(p)}'`).join(", "); const rows = await this.client.query( - `SELECT path, summary, content FROM "${this.table}" WHERE path IN (${inList})` + `SELECT path, summary FROM "${this.table}" WHERE path IN (${inList})` ); for (const row of rows) { const p = row["path"] as string; - const text = row["summary"] as string; - if (text && text.length > 0) { - this.files.set(p, Buffer.from(text, "utf-8")); - } else if (row["content"] != null) { - this.files.set(p, decodeContent(row["content"])); - } + const text = (row["summary"] as string) ?? ""; + this.files.set(p, Buffer.from(text, "utf-8")); } } @@ -350,7 +335,7 @@ export class DeeplakeFs implements IFileSystem { // 2. Pending batch (written but not yet flushed) const pend = this.pending.get(p); - if (pend) { this.files.set(p, pend.content); return pend.content; } + if (pend) { const buf = Buffer.from(pend.contentText, "utf-8"); this.files.set(p, buf); return buf; } // 3. Session files: concatenate rows from sessions table if (this.sessionPaths.has(p) && this.sessionsTable) { @@ -364,12 +349,12 @@ export class DeeplakeFs implements IFileSystem { return buf; } - // 4. SQL query — content column (BYTEA returned as hex '\x...') + // 4. SQL query — summary column (text content) const rows = await this.client.query( - `SELECT content FROM "${this.table}" WHERE path = '${esc(p)}' LIMIT 1` + `SELECT summary FROM "${this.table}" WHERE path = '${esc(p)}' LIMIT 1` ); if (rows.length === 0) throw fsErr("ENOENT", "no such file or directory", p); - const buf = decodeContent(rows[0]["content"]); + const buf = Buffer.from((rows[0]["summary"] as string) ?? "", "utf-8"); this.files.set(p, buf); return buf; } @@ -402,7 +387,7 @@ export class DeeplakeFs implements IFileSystem { // Pending batch const pend = this.pending.get(p); - if (pend) return pend.contentText || pend.content.toString("utf-8"); + if (pend) return pend.contentText; // Session files: concatenate rows from sessions table, ordered by creation_date if (this.sessionPaths.has(p) && this.sessionsTable) { @@ -416,22 +401,14 @@ export class DeeplakeFs implements IFileSystem { return text; } - // For text files prefer summary column (avoids decoding binary column) const rows = await this.client.query( - `SELECT summary, content FROM "${this.table}" WHERE path = '${esc(p)}' LIMIT 1` + `SELECT summary FROM "${this.table}" WHERE path = '${esc(p)}' LIMIT 1` ); if (rows.length === 0) throw fsErr("ENOENT", "no such file or directory", p); - const row = rows[0]; - const text = row["summary"] as string; - if (text && text.length > 0) { - const buf = Buffer.from(text, "utf-8"); - this.files.set(p, buf); - return text; - } - // Binary file: decode content column - const buf = decodeContent(row["content"]); + const text = (rows[0]["summary"] as string) ?? ""; + const buf = Buffer.from(text, "utf-8"); this.files.set(p, buf); - return buf.toString("utf-8"); + return text; } // ── IFileSystem: writes ─────────────────────────────────────────────────── @@ -442,19 +419,20 @@ export class DeeplakeFs implements IFileSystem { meta: { project?: string; description?: string; creationDate?: string; lastUpdateDate?: string }, ): Promise { const p = normPath(path); + if (this.sessionPaths.has(p)) throw fsErr("EPERM", "session files are read-only", p); if (this.dirs.has(p) && !this.files.has(p)) throw fsErr("EISDIR", "illegal operation on a directory", p); - const buf = typeof content === "string" ? Buffer.from(content, "utf-8") : Buffer.from(content); + const text = typeof content === "string" ? content : Buffer.from(content).toString("utf-8"); + const buf = Buffer.from(text, "utf-8"); const mime = guessMime(basename(p)); - const contentText = isText(buf) ? buf.toString("utf-8") : ""; this.files.set(p, buf); this.meta.set(p, { size: buf.length, mime, mtime: new Date() }); this.addToTree(p); this.pending.set(p, { - path: p, filename: basename(p), content: buf, - contentText, mimeType: mime, sizeBytes: buf.length, + path: p, filename: basename(p), + contentText: text, mimeType: mime, sizeBytes: buf.length, ...meta, }); @@ -464,19 +442,20 @@ export class DeeplakeFs implements IFileSystem { async writeFile(path: string, content: FileContent, _opts?: WriteFileOptions | BufferEncoding): Promise { const p = normPath(path); + if (this.sessionPaths.has(p)) throw fsErr("EPERM", "session files are read-only", p); if (this.dirs.has(p) && !this.files.has(p)) throw fsErr("EISDIR", "illegal operation on a directory", p); - const buf = typeof content === "string" ? Buffer.from(content, "utf-8") : Buffer.from(content); + const text = typeof content === "string" ? content : Buffer.from(content).toString("utf-8"); + const buf = Buffer.from(text, "utf-8"); const mime = guessMime(basename(p)); - const contentText = isText(buf) ? buf.toString("utf-8") : ""; this.files.set(p, buf); this.meta.set(p, { size: buf.length, mime, mtime: new Date() }); this.addToTree(p); this.pending.set(p, { - path: p, filename: basename(p), content: buf, - contentText, mimeType: mime, sizeBytes: buf.length, + path: p, filename: basename(p), + contentText: text, mimeType: mime, sizeBytes: buf.length, }); if (this.pending.size >= BATCH_SIZE) await this.flush(); @@ -487,14 +466,15 @@ export class DeeplakeFs implements IFileSystem { const p = normPath(path); const add = typeof content === "string" ? content : Buffer.from(content).toString("utf-8"); + // Session files are read-only (multi-row in sessions table, not memory table) + if (this.sessionPaths.has(p)) throw fsErr("EPERM", "session files are read-only", p); + // Fast path: SQL-level concat — no read-back, O(1) per append if (this.files.has(p) || await this.exists(p).catch(() => false)) { - const addHex = Buffer.from(add, "utf-8").toString("hex"); const ts = new Date().toISOString(); await this.client.query( `UPDATE "${this.table}" SET ` + `summary = summary || E'${esc(add)}', ` + - `content = content || E'\\\\x${addHex}', ` + `size_bytes = size_bytes + ${Buffer.byteLength(add, "utf-8")}, ` + `last_update_date = '${ts}' ` + `WHERE path = '${esc(p)}'` @@ -505,7 +485,7 @@ export class DeeplakeFs implements IFileSystem { if (m) { m.size += Buffer.byteLength(add, "utf-8"); m.mtime = new Date(ts); } } else { // File doesn't exist yet — create it - await this.writeFile(p, typeof content === "string" ? Buffer.from(content, "utf-8") : Buffer.from(content), opts); + await this.writeFile(p, content, opts); await this.flush(); } } @@ -603,6 +583,7 @@ export class DeeplakeFs implements IFileSystem { async rm(path: string, opts?: RmOptions): Promise { const p = normPath(path); + if (this.sessionPaths.has(p)) throw fsErr("EPERM", "session files are read-only", p); if (!this.files.has(p) && !this.dirs.has(p)) { if (opts?.force) return; throw fsErr("ENOENT", "no such file or directory", p); @@ -623,12 +604,14 @@ export class DeeplakeFs implements IFileSystem { if (this.dirs.has(childPath)) stack.push(childPath); } } - for (const fp of toDelete) this.removeFromTree(fp); + // Filter out session paths — they are read-only + const safeToDelete = toDelete.filter(fp => !this.sessionPaths.has(fp)); + for (const fp of safeToDelete) this.removeFromTree(fp); this.dirs.delete(p); this.dirs.get(parentOf(p))?.delete(basename(p)); - if (toDelete.length > 0) { - const inList = toDelete.map(fp => `'${esc(fp)}'`).join(", "); + if (safeToDelete.length > 0) { + const inList = safeToDelete.map(fp => `'${esc(fp)}'`).join(", "); await this.client.query(`DELETE FROM "${this.table}" WHERE path IN (${inList})`); } } else { @@ -639,6 +622,7 @@ export class DeeplakeFs implements IFileSystem { async cp(src: string, dest: string, opts?: CpOptions): Promise { const s = normPath(src), d = normPath(dest); + if (this.sessionPaths.has(d)) throw fsErr("EPERM", "session files are read-only", d); if (this.dirs.has(s) && !this.files.has(s)) { if (!opts?.recursive) throw fsErr("EISDIR", "is a directory", s); for (const fp of [...this.files.keys()].filter(k => k === s || k.startsWith(s + "/"))) { @@ -650,6 +634,9 @@ export class DeeplakeFs implements IFileSystem { } async mv(src: string, dest: string): Promise { + const s = normPath(src), d = normPath(dest); + if (this.sessionPaths.has(s)) throw fsErr("EPERM", "session files are read-only", s); + if (this.sessionPaths.has(d)) throw fsErr("EPERM", "session files are read-only", d); await this.cp(src, dest, { recursive: true }); await this.rm(src, { recursive: true, force: true }); }