feat(voyage): add lib/exporters/otlp-format.mjs — OTLP/JSON enum-integer SC #13
Step 10 av v4.1-execute (Wave 2, Session 3).
Pure function transformToOtlpJson(records) → OTLP/JSON v1.0 metrics payload
matching OTLP metrics.proto wire format.
CRITICAL (per research/01 dim 4 + risk-assessor CRITICAL 2):
- AggregationTemporality enum values er INTEGERS i JSON, IKKE strings
("CUMULATIVE" → 2, ikke "CUMULATIVE")
- timeUnixNano er uint64 over wire — emit som decimal STRING i JSON for å
unngå JS Number precision loss på nanosekund-skala
Inline integer enum constants ved module-scope:
- AGG_TEMPORALITY_UNSPECIFIED = 0
- AGG_TEMPORALITY_DELTA = 1
- AGG_TEMPORALITY_CUMULATIVE = 2
- DATA_POINT_FLAGS_NONE = 0
- DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK = 1
Output struktur: resourceMetrics → scopeMetrics → metrics array. Sum-metrics
(counters: *_total, *_count, *_passed, *_failed, *_skipped) får sum +
isMonotonic + aggregationTemporality. Andre får gauge.
Tester (7 nye, baseline 406 → 413):
- SC #13: typeof aggregationTemporality === 'number' (HEART of SC #13)
- SC #13: enum-konstant drift-pin (typeof + verdi-assert)
- SC #13: typeof timeUnixNano === 'string' (precision-loss mitigation)
- SC #13: strukturell shape-assertion
- Empty input → valid envelope, tomt metrics-array
- isSum heuristic counter vs gauge
- Allowlist-redaksjon sanity (command_excerpt + session_id leaker ikke)
Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
This commit is contained in:
parent
2349d1d431
commit
08ecdc918d
2 changed files with 307 additions and 0 deletions
197
plugins/voyage/lib/exporters/otlp-format.mjs
Normal file
197
plugins/voyage/lib/exporters/otlp-format.mjs
Normal file
|
|
@ -0,0 +1,197 @@
|
|||
// lib/exporters/otlp-format.mjs
|
||||
// Pure transform: voyage JSONL stats records → OTLP/JSON v1.0 metrics payload.
|
||||
//
|
||||
// Per OpenTelemetry Protocol § metrics.proto + research/01 dim 4 (CRITICAL):
|
||||
// AggregationTemporality enum values are INTEGERS in JSON, NOT strings.
|
||||
// "CUMULATIVE" → 2 (not the string)
|
||||
// "DELTA" → 1
|
||||
// timeUnixNano is uint64 over the wire — emit as decimal STRING in JSON to
|
||||
// avoid JS Number precision loss (per research/01 + risk-assessor CRITICAL 2).
|
||||
//
|
||||
// Output contract:
|
||||
// {
|
||||
// resourceMetrics: [{
|
||||
// resource: { attributes: [...] },
|
||||
// scopeMetrics: [{
|
||||
// scope: { name: 'voyage', version: '...' },
|
||||
// metrics: [{
|
||||
// name: 'voyage.<metric>',
|
||||
// description: '...',
|
||||
// unit: '1' | 'ms' | ...,
|
||||
// sum: { dataPoints: [{ ... aggregationTemporality: <int> ...}] }
|
||||
// | gauge: { dataPoints: [...] }
|
||||
// }]
|
||||
// }]
|
||||
// }]
|
||||
// }
|
||||
|
||||
// ---- Inline integer enum constants (CRITICAL: integers, NOT strings) -------
|
||||
|
||||
const AGG_TEMPORALITY_UNSPECIFIED = 0;
|
||||
const AGG_TEMPORALITY_DELTA = 1;
|
||||
const AGG_TEMPORALITY_CUMULATIVE = 2;
|
||||
|
||||
const DATA_POINT_FLAGS_NONE = 0;
|
||||
const DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK = 1;
|
||||
|
||||
const VOYAGE_SCOPE_NAME = 'voyage';
|
||||
const VOYAGE_SCOPE_VERSION = '4.1.0';
|
||||
|
||||
// ---- Helpers ---------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Convert ISO-8601 timestamp to OTLP timeUnixNano (uint64 as decimal STRING).
|
||||
* Avoids Number precision loss for nanosecond-scale values.
|
||||
*/
|
||||
function toUnixNanoString(iso) {
|
||||
const ms = Date.parse(iso);
|
||||
if (Number.isNaN(ms)) return '0';
|
||||
// ms × 1e6 = nanoseconds; use BigInt for precision
|
||||
return (BigInt(ms) * 1000000n).toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Build OTLP attribute object: {key, value: {stringValue: "..."}} or numeric variants.
|
||||
*/
|
||||
function attribute(key, value) {
|
||||
if (typeof value === 'string') return { key, value: { stringValue: value } };
|
||||
if (typeof value === 'boolean') return { key, value: { boolValue: value } };
|
||||
if (typeof value === 'number' && Number.isInteger(value)) return { key, value: { intValue: String(value) } };
|
||||
if (typeof value === 'number') return { key, value: { doubleValue: value } };
|
||||
return { key, value: { stringValue: String(value) } };
|
||||
}
|
||||
|
||||
/**
|
||||
* Partition record into numeric metrics and string/bool labels.
|
||||
* (Same convention as textfile-format.mjs.)
|
||||
*/
|
||||
function partitionRecord(record) {
|
||||
const labels = {};
|
||||
const metrics = {};
|
||||
for (const [k, v] of Object.entries(record)) {
|
||||
if (k === 'ts' || k === '_schema_id') continue;
|
||||
if (typeof v === 'number') metrics[k] = v;
|
||||
else if (typeof v === 'boolean') metrics[k] = v ? 1 : 0;
|
||||
else if (typeof v === 'string') labels[k] = v;
|
||||
}
|
||||
return { labels, metrics };
|
||||
}
|
||||
|
||||
/**
|
||||
* Build OTLP DataPoint object for a numeric value.
|
||||
*/
|
||||
function dataPoint(value, ts, labels) {
|
||||
const tsNano = toUnixNanoString(ts);
|
||||
return {
|
||||
attributes: Object.entries(labels).map(([k, v]) => attribute(k, v)),
|
||||
startTimeUnixNano: tsNano,
|
||||
timeUnixNano: tsNano,
|
||||
asDouble: Number.isInteger(value) ? undefined : value,
|
||||
asInt: Number.isInteger(value) ? String(value) : undefined,
|
||||
flags: DATA_POINT_FLAGS_NONE,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Decide metric "kind": sum (for *_total/*_count/*_passed/*_failed) or gauge.
|
||||
* Sum metrics get aggregationTemporality + isMonotonic; gauges get neither.
|
||||
*/
|
||||
function isSumMetric(name) {
|
||||
return /_total$|_count$|_passed$|_failed$|_skipped$/.test(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform JSONL records → OTLP/JSON metrics payload. Pure function.
|
||||
*
|
||||
* @param {Array<object>} records Allowlist-redacted records (caller responsibility).
|
||||
* @param {{help?: object}} [opts]
|
||||
* @returns {object} OTLP-shaped payload (POST body for /v1/metrics).
|
||||
*/
|
||||
export function transformToOtlpJson(records, opts = {}) {
|
||||
const helpMap = opts.help || {};
|
||||
|
||||
if (!Array.isArray(records) || records.length === 0) {
|
||||
return {
|
||||
resourceMetrics: [{
|
||||
resource: { attributes: [attribute('service.name', VOYAGE_SCOPE_NAME)] },
|
||||
scopeMetrics: [{
|
||||
scope: { name: VOYAGE_SCOPE_NAME, version: VOYAGE_SCOPE_VERSION },
|
||||
metrics: [],
|
||||
}],
|
||||
}],
|
||||
};
|
||||
}
|
||||
|
||||
// Group all data points by metric name (schema_id_field).
|
||||
const metricsMap = new Map();
|
||||
|
||||
for (const record of records) {
|
||||
const schemaId = (record && typeof record._schema_id === 'string') ? record._schema_id : 'unknown';
|
||||
const ts = record.ts || new Date().toISOString();
|
||||
const { labels, metrics } = partitionRecord(record);
|
||||
const allLabels = { ...labels, _schema_id: schemaId };
|
||||
|
||||
for (const [field, value] of Object.entries(metrics)) {
|
||||
const name = `${VOYAGE_SCOPE_NAME}.${schemaId}.${field}`;
|
||||
if (!metricsMap.has(name)) {
|
||||
metricsMap.set(name, {
|
||||
name,
|
||||
description: helpMap[name] || `voyage stats — ${schemaId}.${field}`,
|
||||
unit: /_ms$|_duration/.test(field) ? 'ms' : (/_seconds$/.test(field) ? 's' : '1'),
|
||||
dataPoints: [],
|
||||
isSum: isSumMetric(name),
|
||||
});
|
||||
}
|
||||
metricsMap.get(name).dataPoints.push(dataPoint(value, ts, allLabels));
|
||||
}
|
||||
}
|
||||
|
||||
// Sort metrics for deterministic output
|
||||
const sortedNames = [...metricsMap.keys()].sort();
|
||||
const otlpMetrics = sortedNames.map(name => {
|
||||
const m = metricsMap.get(name);
|
||||
if (m.isSum) {
|
||||
return {
|
||||
name: m.name,
|
||||
description: m.description,
|
||||
unit: m.unit,
|
||||
sum: {
|
||||
dataPoints: m.dataPoints,
|
||||
aggregationTemporality: AGG_TEMPORALITY_CUMULATIVE, // INTEGER 2
|
||||
isMonotonic: true,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
name: m.name,
|
||||
description: m.description,
|
||||
unit: m.unit,
|
||||
gauge: {
|
||||
dataPoints: m.dataPoints,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
resourceMetrics: [{
|
||||
resource: {
|
||||
attributes: [
|
||||
attribute('service.name', VOYAGE_SCOPE_NAME),
|
||||
attribute('service.version', VOYAGE_SCOPE_VERSION),
|
||||
],
|
||||
},
|
||||
scopeMetrics: [{
|
||||
scope: { name: VOYAGE_SCOPE_NAME, version: VOYAGE_SCOPE_VERSION },
|
||||
metrics: otlpMetrics,
|
||||
}],
|
||||
}],
|
||||
};
|
||||
}
|
||||
|
||||
export {
|
||||
AGG_TEMPORALITY_UNSPECIFIED,
|
||||
AGG_TEMPORALITY_DELTA,
|
||||
AGG_TEMPORALITY_CUMULATIVE,
|
||||
DATA_POINT_FLAGS_NONE,
|
||||
DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK,
|
||||
};
|
||||
110
plugins/voyage/tests/hooks/otel-export-otlp.test.mjs
Normal file
110
plugins/voyage/tests/hooks/otel-export-otlp.test.mjs
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
// tests/hooks/otel-export-otlp.test.mjs
|
||||
// SC #13: lib/exporters/otlp-format.mjs returns OTLP/JSON v1.0 metrics payload
|
||||
// with INTEGER (not string) enum constants and timeUnixNano as decimal STRING
|
||||
// (JS precision-loss mitigation per research/01 + risk-assessor CRITICAL 2).
|
||||
|
||||
import { test } from 'node:test';
|
||||
import { strict as assert } from 'node:assert';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { join, dirname } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import {
|
||||
transformToOtlpJson,
|
||||
AGG_TEMPORALITY_CUMULATIVE,
|
||||
AGG_TEMPORALITY_DELTA,
|
||||
AGG_TEMPORALITY_UNSPECIFIED,
|
||||
} from '../../lib/exporters/otlp-format.mjs';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const FIXTURES = join(__dirname, '..', 'fixtures');
|
||||
|
||||
function loadJsonl(name) {
|
||||
const text = readFileSync(join(FIXTURES, name), 'utf-8');
|
||||
return text.trim().split('\n').filter(Boolean).map(l => JSON.parse(l));
|
||||
}
|
||||
|
||||
test('SC #13: aggregationTemporality is INTEGER (typeof === "number"), not string', () => {
|
||||
const records = loadJsonl('stats-sample.jsonl');
|
||||
const payload = transformToOtlpJson(records);
|
||||
// Find a sum metric (steps_passed is a counter)
|
||||
const metrics = payload.resourceMetrics[0].scopeMetrics[0].metrics;
|
||||
const sumMetric = metrics.find(m => 'sum' in m);
|
||||
assert.ok(sumMetric, `expected at least one sum-metric in payload, got ${metrics.length} metrics`);
|
||||
// CRITICAL: this assertion is the heart of SC #13 — typeof MUST be 'number'
|
||||
assert.equal(typeof sumMetric.sum.aggregationTemporality, 'number',
|
||||
`aggregationTemporality must be INTEGER (typeof number), got ${typeof sumMetric.sum.aggregationTemporality}`);
|
||||
assert.equal(sumMetric.sum.aggregationTemporality, AGG_TEMPORALITY_CUMULATIVE);
|
||||
assert.equal(sumMetric.sum.aggregationTemporality, 2);
|
||||
});
|
||||
|
||||
test('SC #13: enum constants exported as integer literals (drift-pin)', () => {
|
||||
assert.equal(typeof AGG_TEMPORALITY_UNSPECIFIED, 'number');
|
||||
assert.equal(AGG_TEMPORALITY_UNSPECIFIED, 0);
|
||||
assert.equal(typeof AGG_TEMPORALITY_DELTA, 'number');
|
||||
assert.equal(AGG_TEMPORALITY_DELTA, 1);
|
||||
assert.equal(typeof AGG_TEMPORALITY_CUMULATIVE, 'number');
|
||||
assert.equal(AGG_TEMPORALITY_CUMULATIVE, 2);
|
||||
});
|
||||
|
||||
test('SC #13: timeUnixNano is decimal STRING (typeof === "string"), JS precision-loss mitigation', () => {
|
||||
const records = loadJsonl('stats-sample.jsonl');
|
||||
const payload = transformToOtlpJson(records);
|
||||
const metrics = payload.resourceMetrics[0].scopeMetrics[0].metrics;
|
||||
// Pick first metric with a data point
|
||||
const m = metrics.find(x => (x.sum?.dataPoints?.length || x.gauge?.dataPoints?.length) > 0);
|
||||
const dp = (m.sum || m.gauge).dataPoints[0];
|
||||
assert.equal(typeof dp.timeUnixNano, 'string',
|
||||
`timeUnixNano must be decimal STRING, got ${typeof dp.timeUnixNano}: ${dp.timeUnixNano}`);
|
||||
assert.equal(typeof dp.startTimeUnixNano, 'string');
|
||||
// Should be a valid decimal-digit string
|
||||
assert.match(dp.timeUnixNano, /^\d+$/);
|
||||
});
|
||||
|
||||
test('SC #13: structural shape — resourceMetrics[].scopeMetrics[].metrics[] hierarchy', () => {
|
||||
const records = loadJsonl('stats-sample.jsonl');
|
||||
const payload = transformToOtlpJson(records);
|
||||
assert.ok(Array.isArray(payload.resourceMetrics));
|
||||
assert.ok(payload.resourceMetrics.length >= 1);
|
||||
assert.ok(payload.resourceMetrics[0].resource);
|
||||
assert.ok(Array.isArray(payload.resourceMetrics[0].scopeMetrics));
|
||||
assert.ok(payload.resourceMetrics[0].scopeMetrics[0].scope);
|
||||
assert.equal(payload.resourceMetrics[0].scopeMetrics[0].scope.name, 'voyage');
|
||||
assert.ok(Array.isArray(payload.resourceMetrics[0].scopeMetrics[0].metrics));
|
||||
});
|
||||
|
||||
test('Empty input: returns valid OTLP envelope with empty metrics array', () => {
|
||||
const payload = transformToOtlpJson([]);
|
||||
assert.ok(Array.isArray(payload.resourceMetrics));
|
||||
assert.equal(payload.resourceMetrics[0].scopeMetrics[0].metrics.length, 0);
|
||||
});
|
||||
|
||||
test('isSum heuristic: counter-named metrics get sum + isMonotonic; others get gauge', () => {
|
||||
const records = [
|
||||
{ _schema_id: 'test', ts: '2026-05-09T08:00:00.000Z', steps_total: 10 }, // counter
|
||||
{ _schema_id: 'test', ts: '2026-05-09T08:00:00.000Z', cpu_pct: 42.5 }, // gauge
|
||||
];
|
||||
const payload = transformToOtlpJson(records);
|
||||
const metrics = payload.resourceMetrics[0].scopeMetrics[0].metrics;
|
||||
const totalMetric = metrics.find(m => m.name.endsWith('steps_total'));
|
||||
const cpuMetric = metrics.find(m => m.name.endsWith('cpu_pct'));
|
||||
assert.ok(totalMetric.sum, 'counter should have sum');
|
||||
assert.equal(totalMetric.sum.isMonotonic, true);
|
||||
assert.equal(typeof totalMetric.sum.aggregationTemporality, 'number');
|
||||
assert.ok(cpuMetric.gauge, 'non-counter should have gauge');
|
||||
assert.ok(!cpuMetric.sum, 'gauge should not have sum');
|
||||
});
|
||||
|
||||
test('Allowlist redacted: callers strip command_excerpt before passing — verify nothing leaks', () => {
|
||||
const record = {
|
||||
_schema_id: 'post_bash_stats',
|
||||
ts: '2026-05-09T08:00:00.000Z',
|
||||
duration_ms: 152,
|
||||
success: true,
|
||||
};
|
||||
const payload = transformToOtlpJson([record]);
|
||||
const json = JSON.stringify(payload);
|
||||
assert.ok(!json.includes('command_excerpt'));
|
||||
assert.ok(!json.includes('session_id'));
|
||||
// Should contain duration_ms metric
|
||||
assert.match(json, /post_bash_stats\.duration_ms/);
|
||||
});
|
||||
Loading…
Add table
Add a link
Reference in a new issue