claude-code

This commit is contained in:
ashutoshpythoncs@gmail.com
2026-03-31 18:58:05 +05:30
parent a2a44a5841
commit b564857c0b
2148 changed files with 564518 additions and 2 deletions

View File

@@ -0,0 +1,176 @@
/**
* Core Web Vitals + custom chat performance metrics.
*
* Observed metrics are forwarded to an analytics sink (no-op by default;
* swap in your analytics provider via `setMetricSink`).
*/
export interface PerformanceMetric {
name: string;
value: number;
rating?: "good" | "needs-improvement" | "poor";
/** Additional context (e.g. conversationId, messageCount) */
meta?: Record<string, unknown>;
}
type MetricSink = (metric: PerformanceMetric) => void;
let sink: MetricSink = () => {};
/** Register a custom analytics sink (e.g. PostHog, Datadog, console). */
export function setMetricSink(fn: MetricSink): void {
sink = fn;
}
function report(metric: PerformanceMetric): void {
if (process.env.NODE_ENV === "development") {
// eslint-disable-next-line no-console
console.debug("[perf]", metric.name, metric.value.toFixed(1), metric.rating ?? "");
}
sink(metric);
}
// ─── Core Web Vitals ────────────────────────────────────────────────────────
function rateVital(name: string, value: number): PerformanceMetric["rating"] {
const thresholds: Record<string, [number, number]> = {
LCP: [2500, 4000],
FID: [100, 300],
CLS: [0.1, 0.25],
INP: [200, 500],
TTFB: [800, 1800],
FCP: [1800, 3000],
};
const [good, poor] = thresholds[name] ?? [0, Infinity];
if (value <= good) return "good";
if (value <= poor) return "needs-improvement";
return "poor";
}
export function observeWebVitals(): void {
if (typeof window === "undefined" || !("PerformanceObserver" in window)) return;
// LCP
try {
const lcpObs = new PerformanceObserver((list) => {
const entries = list.getEntries();
const last = entries[entries.length - 1] as PerformancePaintTiming;
const value = last.startTime;
report({ name: "LCP", value, rating: rateVital("LCP", value) });
});
lcpObs.observe({ type: "largest-contentful-paint", buffered: true });
} catch {}
// FID / INP
try {
const fidObs = new PerformanceObserver((list) => {
for (const entry of list.getEntries()) {
const e = entry as PerformanceEventTiming;
const value = e.processingStart - e.startTime;
report({ name: "FID", value, rating: rateVital("FID", value) });
}
});
fidObs.observe({ type: "first-input", buffered: true });
} catch {}
// CLS
try {
let clsValue = 0;
let clsSessionGap = 0;
let clsSessionValue = 0;
const clsObs = new PerformanceObserver((list) => {
for (const entry of list.getEntries()) {
const e = entry as LayoutShift;
if (!e.hadRecentInput) {
const now = e.startTime;
if (now - clsSessionGap > 1000 || clsValue === 0) {
clsSessionValue = e.value;
} else {
clsSessionValue += e.value;
}
clsSessionGap = now;
clsValue = Math.max(clsValue, clsSessionValue);
report({ name: "CLS", value: clsValue, rating: rateVital("CLS", clsValue) });
}
}
});
clsObs.observe({ type: "layout-shift", buffered: true });
} catch {}
// TTFB
try {
const navObs = new PerformanceObserver((list) => {
for (const entry of list.getEntries()) {
const nav = entry as PerformanceNavigationTiming;
const value = nav.responseStart - nav.requestStart;
report({ name: "TTFB", value, rating: rateVital("TTFB", value) });
}
});
navObs.observe({ type: "navigation", buffered: true });
} catch {}
}
// ─── Custom Chat Metrics ─────────────────────────────────────────────────────
/** Call when the chat input becomes interactive. */
export function markTimeToInteractive(): void {
if (typeof performance === "undefined") return;
const value = performance.now();
report({ name: "time_to_interactive", value });
}
/** Call when the first message bubble finishes rendering. */
export function markFirstMessageRender(): void {
if (typeof performance === "undefined") return;
const value = performance.now();
report({ name: "first_message_render", value });
}
/**
* Measures streaming token latency: time from when the server sends the
* first chunk to when it appears in the DOM.
*
* Usage:
* const end = startStreamingLatencyMeasurement();
* // … after DOM update …
* end();
*/
export function startStreamingLatencyMeasurement(): () => void {
const start = performance.now();
return () => {
const value = performance.now() - start;
report({ name: "streaming_token_latency_ms", value });
};
}
/** Monitor scroll FPS during user scrolling. Returns a cleanup fn. */
export function monitorScrollFps(element: HTMLElement): () => void {
let frameCount = 0;
let lastTime = performance.now();
let rafId: number;
let scrolling = false;
const onScroll = () => { scrolling = true; };
const loop = () => {
rafId = requestAnimationFrame(loop);
if (!scrolling) return;
frameCount++;
const now = performance.now();
if (now - lastTime >= 1000) {
const fps = (frameCount / (now - lastTime)) * 1000;
report({ name: "scroll_fps", value: fps, rating: fps >= 55 ? "good" : fps >= 30 ? "needs-improvement" : "poor" });
frameCount = 0;
lastTime = now;
scrolling = false;
}
};
element.addEventListener("scroll", onScroll, { passive: true });
rafId = requestAnimationFrame(loop);
return () => {
cancelAnimationFrame(rafId);
element.removeEventListener("scroll", onScroll);
};
}

View File

@@ -0,0 +1,66 @@
/**
* Batches streaming token updates via requestAnimationFrame to avoid
* per-token re-renders. Flushes accumulated text on each animation frame
* rather than on every chunk, keeping the UI smooth at 60fps during streaming.
*/
export class StreamingOptimizer {
private buffer = "";
private rafId: number | null = null;
private onFlush: (text: string) => void;
private lastFlushTime = 0;
/** Max ms to wait before forcing a flush regardless of rAF timing */
private readonly maxDelay: number;
constructor(onFlush: (text: string) => void, maxDelay = 50) {
this.onFlush = onFlush;
this.maxDelay = maxDelay;
}
push(chunk: string): void {
this.buffer += chunk;
if (this.rafId !== null) return; // rAF already scheduled
const now = performance.now();
const timeSinceLast = now - this.lastFlushTime;
if (timeSinceLast >= this.maxDelay) {
// Flush is overdue — do it synchronously to avoid latency buildup
this.flush();
} else {
this.rafId = requestAnimationFrame(() => {
this.rafId = null;
this.flush();
});
}
}
flush(): void {
if (!this.buffer) return;
const text = this.buffer;
this.buffer = "";
this.lastFlushTime = performance.now();
this.onFlush(text);
}
destroy(): void {
if (this.rafId !== null) {
cancelAnimationFrame(this.rafId);
this.rafId = null;
}
// Flush any remaining buffered text
this.flush();
}
}
/**
* Hook-friendly factory that returns a stable optimizer instance
* tied to a callback ref so the callback can change without recreating
* the optimizer (avoids stale closure issues).
*/
export function createStreamingOptimizer(
onFlush: (accumulated: string) => void,
maxDelay = 50
): StreamingOptimizer {
return new StreamingOptimizer(onFlush, maxDelay);
}

View File

@@ -0,0 +1,115 @@
/**
* Lightweight web worker pool. Keeps a fixed number of workers alive
* and queues tasks when all workers are busy.
*/
interface WorkerTask<T> {
payload: unknown;
resolve: (value: T) => void;
reject: (reason: unknown) => void;
transferables?: Transferable[];
}
interface WorkerSlot {
worker: Worker;
busy: boolean;
}
export class WorkerPool<T = unknown> {
private slots: WorkerSlot[] = [];
private queue: WorkerTask<T>[] = [];
private readonly size: number;
private readonly workerFactory: () => Worker;
constructor(workerFactory: () => Worker, size = navigator.hardwareConcurrency ?? 2) {
this.workerFactory = workerFactory;
this.size = Math.min(size, 4); // Cap at 4 to avoid too many threads
}
private createSlot(): WorkerSlot {
const worker = this.workerFactory();
const slot: WorkerSlot = { worker, busy: false };
return slot;
}
private getFreeSlot(): WorkerSlot | null {
return this.slots.find((s) => !s.busy) ?? null;
}
private ensureSlots(): void {
while (this.slots.length < this.size) {
this.slots.push(this.createSlot());
}
}
run(payload: unknown, transferables?: Transferable[]): Promise<T> {
return new Promise<T>((resolve, reject) => {
this.ensureSlots();
const task: WorkerTask<T> = { payload, resolve, reject, transferables };
const slot = this.getFreeSlot();
if (slot) {
this.dispatch(slot, task);
} else {
this.queue.push(task);
}
});
}
private dispatch(slot: WorkerSlot, task: WorkerTask<T>): void {
slot.busy = true;
const handleMessage = (e: MessageEvent) => {
slot.worker.removeEventListener("message", handleMessage);
slot.worker.removeEventListener("error", handleError);
slot.busy = false;
task.resolve(e.data as T);
this.dequeue();
};
const handleError = (e: ErrorEvent) => {
slot.worker.removeEventListener("message", handleMessage);
slot.worker.removeEventListener("error", handleError);
slot.busy = false;
task.reject(new Error(e.message));
this.dequeue();
};
slot.worker.addEventListener("message", handleMessage);
slot.worker.addEventListener("error", handleError);
if (task.transferables?.length) {
slot.worker.postMessage(task.payload, task.transferables);
} else {
slot.worker.postMessage(task.payload);
}
}
private dequeue(): void {
if (!this.queue.length) return;
const task = this.queue.shift()!;
const slot = this.getFreeSlot();
if (slot) this.dispatch(slot, task);
}
terminate(): void {
for (const slot of this.slots) {
slot.worker.terminate();
}
this.slots = [];
this.queue = [];
}
}
/**
* Singleton pools lazily initialized per worker module URL.
* Avoids spawning duplicate workers when multiple components
* import the same pool.
*/
const pools = new Map<string, WorkerPool>();
export function getWorkerPool<T>(key: string, factory: () => Worker): WorkerPool<T> {
if (!pools.has(key)) {
pools.set(key, new WorkerPool<T>(factory));
}
return pools.get(key) as WorkerPool<T>;
}