feat(ai-psychosis): add readRecentEndRecords for cross-session reads
This commit is contained in:
parent
7b0afdb541
commit
f0f3bc3294
2 changed files with 108 additions and 3 deletions
|
|
@ -195,6 +195,37 @@ export function sessionsToday() {
|
|||
}
|
||||
}
|
||||
|
||||
// Tail-first scan: return the N most recent end records (records with
|
||||
// duration_min defined) in chronological order. Cost is bounded by N, not
|
||||
// by total file size — a 50K-record sessions.jsonl is read once but only
|
||||
// the last few hundred lines are JSON-parsed before N is satisfied.
|
||||
export function readRecentEndRecords(n) {
|
||||
if (!Number.isFinite(n) || n <= 0) return [];
|
||||
if (!existsSync(SESSIONS_LOG)) return [];
|
||||
|
||||
let lines;
|
||||
try {
|
||||
lines = readFileSync(SESSIONS_LOG, 'utf8').split('\n');
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
|
||||
const collected = [];
|
||||
for (let i = lines.length - 1; i >= 0 && collected.length < n; i--) {
|
||||
const line = lines[i];
|
||||
if (!line) continue;
|
||||
try {
|
||||
const rec = JSON.parse(line);
|
||||
if (rec.duration_min !== undefined) {
|
||||
collected.push(rec);
|
||||
}
|
||||
} catch { /* skip malformed */ }
|
||||
}
|
||||
|
||||
// Reverse so caller receives oldest-first (chronological order).
|
||||
return collected.reverse();
|
||||
}
|
||||
|
||||
// --- State file management ---
|
||||
|
||||
export function sessionStateFile(sid) {
|
||||
|
|
|
|||
|
|
@ -3,10 +3,19 @@
|
|||
// with the expected shape. Detector-level behaviour is covered in
|
||||
// per-detector test files (user-info, validation-seeking, stakes-matrix).
|
||||
|
||||
import { test, describe } from 'node:test';
|
||||
import { test, describe, before, after } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtempSync, rmSync, writeFileSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { tmpdir } from 'os';
|
||||
|
||||
import {
|
||||
// Allocate a fresh data dir before importing lib.mjs, so SESSIONS_LOG points
|
||||
// at a sandbox path. The lib.mjs module captures CLAUDE_PLUGIN_DATA at import
|
||||
// time, so the env var must be set first.
|
||||
const TEST_DATA_DIR = mkdtempSync(join(tmpdir(), 'ia-lib-test-'));
|
||||
process.env.CLAUDE_PLUGIN_DATA = TEST_DATA_DIR;
|
||||
|
||||
const {
|
||||
TIER1_TURN_THRESHOLD,
|
||||
TIER2_SESSION_THRESHOLD,
|
||||
THRESHOLD_VALSEEK_FLAGS,
|
||||
|
|
@ -14,7 +23,13 @@ import {
|
|||
HIGH_SYCOPHANCY_DOMAINS,
|
||||
HIGH_STAKES_DOMAINS,
|
||||
INFO_DOMAINS,
|
||||
} from '../hooks/scripts/lib.mjs';
|
||||
SESSIONS_LOG,
|
||||
readRecentEndRecords,
|
||||
} = await import('../hooks/scripts/lib.mjs');
|
||||
|
||||
after(() => {
|
||||
rmSync(TEST_DATA_DIR, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('v1.2 thresholds', () => {
|
||||
test('tier-1 turn threshold is 15', () => {
|
||||
|
|
@ -76,3 +91,62 @@ describe('domain classification arrays', () => {
|
|||
assert.equal(Object.isFrozen(INFO_DOMAINS), true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('readRecentEndRecords', () => {
|
||||
function writeFixture(records) {
|
||||
const lines = records.map(r => JSON.stringify(r)).join('\n') + '\n';
|
||||
writeFileSync(SESSIONS_LOG, lines);
|
||||
}
|
||||
|
||||
test('returns N most recent end records in chronological order', () => {
|
||||
writeFixture([
|
||||
{ session_id: 'a', start: '2026-05-01T10:00:00Z' }, // start record (no duration)
|
||||
{ session_id: 'a', start: '2026-05-01T10:00:00Z', end: '2026-05-01T10:30:00Z', duration_min: 30 },
|
||||
{ session_id: 'b', start: '2026-05-01T11:00:00Z' },
|
||||
{ session_id: 'b', start: '2026-05-01T11:00:00Z', end: '2026-05-01T11:45:00Z', duration_min: 45 },
|
||||
{ session_id: 'c', start: '2026-05-01T12:00:00Z', end: '2026-05-01T12:20:00Z', duration_min: 20 },
|
||||
{ session_id: 'd', start: '2026-05-01T13:00:00Z', end: '2026-05-01T13:50:00Z', duration_min: 50 },
|
||||
]);
|
||||
|
||||
const recent = readRecentEndRecords(3);
|
||||
assert.equal(recent.length, 3);
|
||||
assert.equal(recent[0].session_id, 'b');
|
||||
assert.equal(recent[1].session_id, 'c');
|
||||
assert.equal(recent[2].session_id, 'd');
|
||||
});
|
||||
|
||||
test('returns fewer than N when not enough end records exist', () => {
|
||||
writeFixture([
|
||||
{ session_id: 'a', start: '2026-05-01T10:00:00Z', end: '2026-05-01T10:30:00Z', duration_min: 30 },
|
||||
]);
|
||||
const recent = readRecentEndRecords(5);
|
||||
assert.equal(recent.length, 1);
|
||||
assert.equal(recent[0].session_id, 'a');
|
||||
});
|
||||
|
||||
test('skips malformed JSON lines', () => {
|
||||
const goodA = JSON.stringify({ session_id: 'a', duration_min: 1 });
|
||||
const goodB = JSON.stringify({ session_id: 'b', duration_min: 2 });
|
||||
writeFileSync(SESSIONS_LOG, `${goodA}\nnot json\n${goodB}\n`);
|
||||
const recent = readRecentEndRecords(5);
|
||||
assert.equal(recent.length, 2);
|
||||
assert.equal(recent[0].session_id, 'a');
|
||||
assert.equal(recent[1].session_id, 'b');
|
||||
});
|
||||
|
||||
test('empty file returns []', () => {
|
||||
writeFileSync(SESSIONS_LOG, '');
|
||||
assert.deepEqual(readRecentEndRecords(3), []);
|
||||
});
|
||||
|
||||
test('missing file returns []', () => {
|
||||
rmSync(SESSIONS_LOG, { force: true });
|
||||
assert.deepEqual(readRecentEndRecords(3), []);
|
||||
});
|
||||
|
||||
test('non-positive N returns []', () => {
|
||||
writeFixture([{ session_id: 'a', duration_min: 1 }]);
|
||||
assert.deepEqual(readRecentEndRecords(0), []);
|
||||
assert.deepEqual(readRecentEndRecords(-1), []);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue