@@ -184843,9 +184843,8 @@ import { createHash as createHash2 } from "crypto";
184843184843// src/lib/query-results-evaluator.ts
184844184844init_cli_executor();
184845184845init_logger();
184846- import { fstatSync, openSync, readFileSync as readFileSync4, writeFileSync } from "fs";
184846+ import { closeSync, fstatSync, mkdirSync as mkdirSync4 , openSync, readFileSync as readFileSync4, writeFileSync } from "fs";
184847184847import { dirname as dirname3, isAbsolute as isAbsolute3 } from "path";
184848- import { mkdirSync as mkdirSync4 } from "fs";
184849184848var BUILT_IN_EVALUATORS = {
184850184849 "json-decode": "JSON format decoder for query results",
184851184850 "csv-decode": "CSV format decoder for query results",
@@ -184856,12 +184855,18 @@ var metadataCache = /* @__PURE__ */ new Map();
184856184855async function extractQueryMetadata(queryPath) {
184857184856 try {
184858184857 const fd = openSync(queryPath, "r");
184859- const mtime = fstatSync(fd).mtimeMs;
184860- const cached2 = metadataCache.get(queryPath);
184861- if (cached2 && cached2.mtime === mtime) {
184862- return cached2.metadata;
184858+ let queryContent;
184859+ let mtime;
184860+ try {
184861+ mtime = fstatSync(fd).mtimeMs;
184862+ const cached2 = metadataCache.get(queryPath);
184863+ if (cached2 && cached2.mtime === mtime) {
184864+ return cached2.metadata;
184865+ }
184866+ queryContent = readFileSync4(fd, "utf-8");
184867+ } finally {
184868+ closeSync(fd);
184863184869 }
184864- const queryContent = readFileSync4(fd, "utf-8");
184865184870 const metadata = {};
184866184871 const kindMatch = queryContent.match(/@kind\s+([^\s]+)/);
184867184872 if (kindMatch) metadata.kind = kindMatch[1];
@@ -185122,7 +185127,7 @@ import { randomUUID as randomUUID2 } from "crypto";
185122185127// src/lib/sqlite-store.ts
185123185128var import_sql_asm = __toESM(require_sql_asm(), 1);
185124185129init_logger();
185125- import { mkdirSync as mkdirSync5, readFileSync as readFileSync5, renameSync, writeFileSync as writeFileSync2 } from "fs";
185130+ import { mkdirSync as mkdirSync5, readFileSync as readFileSync5, renameSync, unlinkSync, writeFileSync as writeFileSync2 } from "fs";
185126185131import { join as join8 } from "path";
185127185132var SqliteStore = class _SqliteStore {
185128185133 db = null;
@@ -185190,6 +185195,32 @@ var SqliteStore = class _SqliteStore {
185190185195 CREATE INDEX IF NOT EXISTS idx_annotations_category_entity
185191185196 ON annotations (category, entity_key);
185192185197 `);
185198+ this.exec(`
185199+ CREATE VIRTUAL TABLE IF NOT EXISTS annotations_fts
185200+ USING fts4(tokenize=unicode61, content, label, metadata);
185201+ `);
185202+ this.exec(`
185203+ CREATE TRIGGER IF NOT EXISTS annotations_ai
185204+ AFTER INSERT ON annotations BEGIN
185205+ INSERT INTO annotations_fts(rowid, content, label, metadata)
185206+ VALUES (new.id, new.content, new.label, new.metadata);
185207+ END;
185208+ `);
185209+ this.exec(`
185210+ CREATE TRIGGER IF NOT EXISTS annotations_ad
185211+ AFTER DELETE ON annotations BEGIN
185212+ DELETE FROM annotations_fts WHERE rowid = old.id;
185213+ END;
185214+ `);
185215+ this.exec(`
185216+ CREATE TRIGGER IF NOT EXISTS annotations_au
185217+ AFTER UPDATE ON annotations BEGIN
185218+ DELETE FROM annotations_fts WHERE rowid = old.id;
185219+ INSERT INTO annotations_fts(rowid, content, label, metadata)
185220+ VALUES (new.id, new.content, new.label, new.metadata);
185221+ END;
185222+ `);
185223+ this.backfillAnnotationsFts();
185193185224 this.exec(`
185194185225 CREATE TABLE IF NOT EXISTS query_result_cache (
185195185226 id INTEGER PRIMARY KEY AUTOINCREMENT,
@@ -185238,6 +185269,24 @@ var SqliteStore = class _SqliteStore {
185238185269 }
185239185270 this.dirty = true;
185240185271 }
185272+ /**
185273+ * Backfill the FTS index for any annotations rows that were inserted before
185274+ * the FTS table existed (schema migration). Compares row counts and rebuilds
185275+ * the entire FTS index when there is a mismatch.
185276+ */
185277+ backfillAnnotationsFts() {
185278+ const db = this.ensureDb();
185279+ const ftsCountResult = db.exec("SELECT COUNT(*) FROM annotations_fts");
185280+ const annCountResult = db.exec("SELECT COUNT(*) FROM annotations");
185281+ const ftsCount = ftsCountResult[0]?.values[0][0] ?? 0;
185282+ const annCount = annCountResult[0]?.values[0][0] ?? 0;
185283+ if (ftsCount < annCount) {
185284+ db.run("DELETE FROM annotations_fts");
185285+ db.run(
185286+ "INSERT INTO annotations_fts(rowid, content, label, metadata) SELECT id, content, label, metadata FROM annotations"
185287+ );
185288+ }
185289+ }
185241185290 /**
185242185291 * Get the number of rows modified by the last INSERT/UPDATE/DELETE.
185243185292 */
@@ -185262,7 +185311,15 @@ var SqliteStore = class _SqliteStore {
185262185311 const buffer = Buffer.from(data);
185263185312 const tmpPath = this.dbPath + ".tmp";
185264185313 writeFileSync2(tmpPath, buffer);
185265- renameSync(tmpPath, this.dbPath);
185314+ try {
185315+ renameSync(tmpPath, this.dbPath);
185316+ } catch {
185317+ writeFileSync2(this.dbPath, buffer);
185318+ try {
185319+ unlinkSync(tmpPath);
185320+ } catch {
185321+ }
185322+ }
185266185323 this.dirty = false;
185267185324 }
185268185325 /**
@@ -185413,8 +185470,8 @@ var SqliteStore = class _SqliteStore {
185413185470 params.$entity_key_prefix = filter.entityKeyPrefix + "%";
185414185471 }
185415185472 if (filter?.search) {
185416- conditions.push("(content LIKE $search OR metadata LIKE $search OR label LIKE $search)");
185417- params.$search = "%" + filter.search + "%" ;
185473+ conditions.push("id IN (SELECT rowid FROM annotations_fts WHERE annotations_fts MATCH $search)");
185474+ params.$search = filter.search;
185418185475 }
185419185476 let sql = "SELECT * FROM annotations";
185420185477 if (conditions.length > 0) {
@@ -192082,7 +192139,7 @@ var codeqlResolveTestsTool = {
192082192139};
192083192140
192084192141// src/tools/codeql/search-ql-code.ts
192085- import { closeSync, createReadStream as createReadStream3, fstatSync as fstatSync2, lstatSync, openSync as openSync2, readdirSync as readdirSync8, readFileSync as readFileSync12, realpathSync } from "fs";
192142+ import { closeSync as closeSync2 , createReadStream as createReadStream3, fstatSync as fstatSync2, lstatSync, openSync as openSync2, readdirSync as readdirSync8, readFileSync as readFileSync12, realpathSync } from "fs";
192086192143import { basename as basename8, extname as extname2, join as join19, resolve as resolve9 } from "path";
192087192144import { createInterface as createInterface3 } from "readline";
192088192145init_logger();
@@ -192148,7 +192205,7 @@ async function searchFile(filePath, regex, contextLines, maxCollect) {
192148192205 size = fstatSync2(fd).size;
192149192206 } catch {
192150192207 try {
192151- closeSync (fd);
192208+ closeSync2 (fd);
192152192209 } catch {
192153192210 }
192154192211 return { matches: [], totalCount: 0 };
@@ -192161,13 +192218,13 @@ async function searchFile(filePath, regex, contextLines, maxCollect) {
192161192218 content = readFileSync12(fd, "utf-8");
192162192219 } catch {
192163192220 try {
192164- closeSync (fd);
192221+ closeSync2 (fd);
192165192222 } catch {
192166192223 }
192167192224 return { matches: [], totalCount: 0 };
192168192225 }
192169192226 try {
192170- closeSync (fd);
192227+ closeSync2 (fd);
192171192228 } catch {
192172192229 }
192173192230 const lines = content.replace(/\r\n/g, "\n").split("\n");
@@ -192215,7 +192272,7 @@ async function searchFileStreaming(filePath, regex, contextLines, maxCollect, fd
192215192272 } catch {
192216192273 if (fd !== void 0) {
192217192274 try {
192218- closeSync (fd);
192275+ closeSync2 (fd);
192219192276 } catch {
192220192277 }
192221192278 }
@@ -195594,14 +195651,25 @@ function registerQueryResultsCacheRetrieveTool(server) {
195594195651 if (!subset2) {
195595195652 return { content: [{ type: "text", text: `Cached content not available for key: ${cacheKey2}` }] };
195596195653 }
195654+ let parsedResults;
195655+ try {
195656+ parsedResults = JSON.parse(subset2.content);
195657+ } catch {
195658+ return {
195659+ content: [{
195660+ type: "text",
195661+ text: subset2.content
195662+ }]
195663+ };
195664+ }
195597195665 return {
195598195666 content: [{
195599195667 type: "text",
195600195668 text: JSON.stringify({
195601195669 totalResults: subset2.totalResults,
195602195670 returnedResults: subset2.returnedResults,
195603195671 truncated: subset2.truncated,
195604- results: JSON.parse(subset2.content)
195672+ results: parsedResults
195605195673 }, null, 2)
195606195674 }]
195607195675 };
0 commit comments