Skip to content

Commit bf3f4b4

Browse files
committed
refactor(builder): floor mtime inside fileStat helper (#1080)
Encapsulate `Math.floor(stat.mtimeMs)` inside the `fileStat` helper so every consumer of the integer DB column gets a pre-floored value by default. Eliminates the risk that a future call site reads `stat.mtimeMs` and stores it un-floored, which would silently write a non-integer (or rounded-up integer) into the DB and cause spurious fast-skip misses on the next build. All six existing call sites simplified from `Math.floor(stat.mtimeMs)` to `stat.mtime`. Behaviour unchanged. Addresses Greptile P2 feedback on the revert PR.
1 parent d5a3890 commit bf3f4b4

4 files changed

Lines changed: 21 additions & 16 deletions

File tree

src/domain/graph/builder/helpers.ts

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -222,12 +222,17 @@ export function fileHash(content: string): string {
222222
}
223223

224224
/**
225-
* Stat a file, returning { mtimeMs, size } or null on error.
225+
* Stat a file, returning { mtime, size } or null on error.
226+
*
227+
* `mtime` is `Math.floor(stat.mtimeMs)` so it matches the integer column
228+
* stored in the DB. Floor-once-here keeps every consumer honest: storing or
229+
* comparing a non-floored `mtimeMs` against the integer DB column would cause
230+
* spurious fast-skip misses on the next build.
226231
*/
227-
export function fileStat(filePath: string): { mtimeMs: number; size: number } | null {
232+
export function fileStat(filePath: string): { mtime: number; size: number } | null {
228233
try {
229234
const s = fs.statSync(filePath);
230-
return { mtimeMs: s.mtimeMs, size: s.size };
235+
return { mtime: Math.floor(s.mtimeMs), size: s.size };
231236
} catch {
232237
return null;
233238
}

src/domain/graph/builder/pipeline.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -938,7 +938,7 @@ async function backfillNativeDroppedFiles(ctx: PipelineContext): Promise<void> {
938938
}
939939
if (code === null) continue;
940940
const stat = fileStat(absPath);
941-
const mtime = stat ? Math.floor(stat.mtimeMs) : 0;
941+
const mtime = stat ? stat.mtime : 0;
942942
const size = stat ? stat.size : 0;
943943
upsertHash.run(relPath, fileHash(code), mtime, size);
944944
}

src/domain/graph/builder/stages/detect-changes.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ interface FileHashRow {
2727
}
2828

2929
interface FileStat {
30-
mtimeMs: number;
30+
mtime: number;
3131
size: number;
3232
}
3333

@@ -182,7 +182,7 @@ function mtimeAndHashTiers(
182182
if (!stat) continue;
183183
const storedMtime = record.mtime || 0;
184184
const storedSize = record.size || 0;
185-
if (storedSize > 0 && Math.floor(stat.mtimeMs) === storedMtime && stat.size === storedSize) {
185+
if (storedSize > 0 && stat.mtime === storedMtime && stat.size === storedSize) {
186186
skipped.push(relPath);
187187
continue;
188188
}
@@ -596,9 +596,9 @@ export function detectNoChanges(
596596
log(`false: stored size <= 0 for ${relPath} (stored=${record.size})`);
597597
return false;
598598
}
599-
if (Math.floor(stat.mtimeMs) !== storedMtime || stat.size !== storedSize) {
599+
if (stat.mtime !== storedMtime || stat.size !== storedSize) {
600600
log(
601-
`false: mtime/size diff for ${relPath}: stat=${Math.floor(stat.mtimeMs)}/${stat.size} stored=${storedMtime}/${storedSize} (mtimeMs=${stat.mtimeMs})`,
601+
`false: mtime/size diff for ${relPath}: stat=${stat.mtime}/${stat.size} stored=${storedMtime}/${storedSize}`,
602602
);
603603
return false;
604604
}
@@ -663,7 +663,7 @@ export async function detectChanges(ctx: PipelineContext): Promise<void> {
663663
relPath: c.relPath,
664664
content: c.content,
665665
hash: c.hash,
666-
stat: c.stat ? { mtime: Math.floor(c.stat.mtimeMs), size: c.stat.size } : undefined,
666+
stat: c.stat ? { mtime: c.stat.mtime, size: c.stat.size } : undefined,
667667
_reverseDepOnly: c._reverseDepOnly,
668668
}));
669669
ctx.metadataUpdates = increResult.changed
@@ -674,7 +674,7 @@ export async function detectChanges(ctx: PipelineContext): Promise<void> {
674674
.map((c) => ({
675675
relPath: c.relPath,
676676
hash: c.hash,
677-
stat: { mtime: Math.floor(c.stat.mtimeMs), size: c.stat.size },
677+
stat: { mtime: c.stat.mtime, size: c.stat.size },
678678
}));
679679
if (!ctx.isFullBuild && ctx.parseChanges.length === 0 && ctx.removed.length === 0) {
680680
const ranAnalysis = await runPendingAnalysis(ctx);

src/domain/graph/builder/stages/insert-nodes.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ export function buildFileHashes(
128128
size = precomputed.stat.size;
129129
} else {
130130
const rawStat = fileStat(path.join(rootDir, relPath));
131-
mtime = rawStat ? Math.floor(rawStat.mtimeMs) : 0;
131+
mtime = rawStat ? rawStat.mtime : 0;
132132
size = rawStat ? rawStat.size : 0;
133133
}
134134
fileHashes.push({ file: relPath, hash: precomputed.hash, mtime, size });
@@ -143,7 +143,7 @@ export function buildFileHashes(
143143
}
144144
if (code !== null) {
145145
const stat = fileStat(absPath);
146-
const mtime = stat ? Math.floor(stat.mtimeMs) : 0;
146+
const mtime = stat ? stat.mtime : 0;
147147
const size = stat ? stat.size : 0;
148148
fileHashes.push({ file: relPath, hash: fileHash(code), mtime, size });
149149
}
@@ -152,7 +152,7 @@ export function buildFileHashes(
152152

153153
// Also include metadata-only updates (self-heal mtime/size without re-parse)
154154
for (const item of metadataUpdates) {
155-
const mtime = item.stat ? Math.floor(item.stat.mtime) : 0;
155+
const mtime = item.stat ? item.stat.mtime : 0;
156156
const size = item.stat ? item.stat.size : 0;
157157
fileHashes.push({ file: item.relPath, hash: item.hash, mtime, size });
158158
}
@@ -365,7 +365,7 @@ function updateFileHashes(
365365
size = precomputed.stat.size;
366366
} else {
367367
const rawStat = fileStat(path.join(rootDir, relPath));
368-
mtime = rawStat ? Math.floor(rawStat.mtimeMs) : 0;
368+
mtime = rawStat ? rawStat.mtime : 0;
369369
size = rawStat ? rawStat.size : 0;
370370
}
371371
upsertHash.run(relPath, precomputed.hash, mtime, size);
@@ -380,7 +380,7 @@ function updateFileHashes(
380380
}
381381
if (code !== null) {
382382
const stat = fileStat(absPath);
383-
const mtime = stat ? Math.floor(stat.mtimeMs) : 0;
383+
const mtime = stat ? stat.mtime : 0;
384384
const size = stat ? stat.size : 0;
385385
upsertHash.run(relPath, fileHash(code), mtime, size);
386386
}
@@ -389,7 +389,7 @@ function updateFileHashes(
389389

390390
// Also update metadata-only entries (self-heal mtime/size without re-parse)
391391
for (const item of metadataUpdates) {
392-
const mtime = item.stat ? Math.floor(item.stat.mtime) : 0;
392+
const mtime = item.stat ? item.stat.mtime : 0;
393393
const size = item.stat ? item.stat.size : 0;
394394
upsertHash.run(item.relPath, item.hash, mtime, size);
395395
}

0 commit comments

Comments
 (0)