Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
110 changes: 110 additions & 0 deletions apps/api/src/index.test.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,21 @@
import { describe, it, expect } from "vitest";
import type { InteractionStore, StoredInteraction } from "@learnpro/shared";
import type { SandboxProvider, SandboxRunRequest, SandboxRunResponse } from "@learnpro/sandbox";
import { buildServer } from "./index.js";

class FakeInteractionStore implements InteractionStore {
public batches: StoredInteraction[][] = [];
public failNext = false;

async recordBatch(events: StoredInteraction[]): Promise<void> {
if (this.failNext) {
this.failNext = false;
throw new Error("simulated DB outage");
}
this.batches.push(events);
}
}

class FakeSandbox implements SandboxProvider {
readonly name = "fake-sandbox";
public lastReq: SandboxRunRequest | null = null;
Expand Down Expand Up @@ -116,4 +130,100 @@ describe("apps/api", () => {
expect(sandbox.lastReq?.language).toBe("typescript");
await app.close();
});

describe("POST /v1/interactions (STORY-055)", () => {
it("forwards a valid batch to the store and replies 202 with accepted count", async () => {
const store = new FakeInteractionStore();
const app = buildServer({ sandbox: new FakeSandbox(), interactionStore: store });
const res = await app.inject({
method: "POST",
url: "/v1/interactions",
payload: {
events: [
{ type: "cursor_focus", payload: { line_start: 1, line_end: 2, duration_ms: 250 } },
{ type: "submit", payload: { passed: true } },
],
},
});
expect(res.statusCode).toBe(202);
expect(res.json()).toEqual({ accepted: 2 });
expect(store.batches).toHaveLength(1);
const batch = store.batches[0]!;
expect(batch.map((e) => e.type)).toEqual(["cursor_focus", "submit"]);
expect(batch.every((e) => e.t instanceof Date)).toBe(true);
expect(batch.every((e) => e.user_id === null)).toBe(true); // anonymous until STORY-005
await app.close();
});

it("preserves a client-supplied `t` (ISO string → Date)", async () => {
const store = new FakeInteractionStore();
const app = buildServer({ sandbox: new FakeSandbox(), interactionStore: store });
const t = "2026-04-26T12:34:56.000Z";
const res = await app.inject({
method: "POST",
url: "/v1/interactions",
payload: { events: [{ type: "submit", payload: { passed: false }, t }] },
});
expect(res.statusCode).toBe(202);
const batch = store.batches[0]!;
expect(batch[0]!.t.toISOString()).toBe(t);
await app.close();
});

it("rejects a malformed payload with 400 and the Zod issues", async () => {
const store = new FakeInteractionStore();
const app = buildServer({ sandbox: new FakeSandbox(), interactionStore: store });
const res = await app.inject({
method: "POST",
url: "/v1/interactions",
payload: { events: [{ type: "cursor_focus", payload: { rung: 7 } }] },
});
expect(res.statusCode).toBe(400);
const body = res.json() as { error: string; issues: unknown[] };
expect(body.error).toBe("invalid_request");
expect(Array.isArray(body.issues)).toBe(true);
expect(store.batches).toHaveLength(0);
await app.close();
});

it("rejects an empty batch with 400 (don't pay a round-trip for nothing)", async () => {
const app = buildServer({ sandbox: new FakeSandbox() });
const res = await app.inject({
method: "POST",
url: "/v1/interactions",
payload: { events: [] },
});
expect(res.statusCode).toBe(400);
await app.close();
});

it("returns 503 (not 500) when the store throws — client retries, no internal-error leak", async () => {
const store = new FakeInteractionStore();
store.failNext = true;
const app = buildServer({ sandbox: new FakeSandbox(), interactionStore: store });
const res = await app.inject({
method: "POST",
url: "/v1/interactions",
payload: { events: [{ type: "submit", payload: { passed: true } }] },
});
expect(res.statusCode).toBe(503);
expect(res.json()).toEqual({
error: "interactions_unavailable",
message: "telemetry store rejected the batch",
});
await app.close();
});

it("default store (none injected) accepts events without crashing — Noop drop", async () => {
const app = buildServer({ sandbox: new FakeSandbox() });
const res = await app.inject({
method: "POST",
url: "/v1/interactions",
payload: { events: [{ type: "submit", payload: { passed: true } }] },
});
expect(res.statusCode).toBe(202);
expect(res.json()).toEqual({ accepted: 1 });
await app.close();
});
});
});
46 changes: 45 additions & 1 deletion apps/api/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
import Fastify from "fastify";
import { healthPayload } from "@learnpro/shared";
import {
healthPayload,
InteractionsBatchSchema,
type InteractionEvent,
type InteractionStore,
type StoredInteraction,
} from "@learnpro/shared";
import {
buildPolicyRegistry,
loadPolicyConfigFromEnv,
Expand All @@ -26,6 +32,16 @@ export interface BuildServerOptions {
policies?: PolicyRegistry;
llm?: LLMProvider;
sandbox?: SandboxProvider;
interactionStore?: InteractionStore;
}

// Default impl when no store is provided — drops events on the floor. Useful for tests and
// for the dev playground when no DB is configured. The DB-backed `DrizzleInteractionStore`
// gets wired in once apps/api gets a DB client (post-STORY-005).
class NoopInteractionStore implements InteractionStore {
async recordBatch(): Promise<void> {
// intentional drop
}
}

function defaultLLM(): LLMProvider {
Expand Down Expand Up @@ -67,6 +83,7 @@ export function buildServer(opts: BuildServerOptions = {}) {
opts.policies ?? buildPolicyRegistry({ config: loadPolicyConfigFromEnv(process.env) });
const llm = opts.llm ?? defaultLLM();
const sandbox = opts.sandbox ?? defaultSandbox();
const interactionStore = opts.interactionStore ?? new NoopInteractionStore();

app.get("/health", async () => healthPayload({ service: "api" }));

Expand Down Expand Up @@ -102,6 +119,33 @@ export function buildServer(opts: BuildServerOptions = {}) {
}
});

// STORY-055 — batched ingestion of rich interaction telemetry (cursor focus / edits / reverts /
// run / submit / hint / autonomy decisions). Auth attribution lands with STORY-005; until then
// the route accepts anonymous events (`user_id` null) so the playground can ship telemetry today.
app.post("/v1/interactions", async (req, reply) => {
const parsed = InteractionsBatchSchema.safeParse(req.body);
if (!parsed.success) {
return reply.code(400).send({ error: "invalid_request", issues: parsed.error.issues });
}
const now = new Date();
const stored: StoredInteraction[] = parsed.data.events.map((e: InteractionEvent) => ({
type: e.type,
payload: e.payload,
t: e.t ? new Date(e.t) : now,
user_id: null,
episode_id: e.episode_id ?? null,
}));
try {
await interactionStore.recordBatch(stored);
return reply.code(202).send({ accepted: stored.length });
} catch (err) {
req.log.error({ err }, "interaction store error");
return reply
.code(503)
.send({ error: "interactions_unavailable", message: "telemetry store rejected the batch" });
}
});

return app;
}

Expand Down
113 changes: 113 additions & 0 deletions apps/web/src/app/api/interactions/route.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
import type { InteractionEvent } from "@learnpro/shared";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { POST } from "./route";

function postRequest(body: unknown): Request {
return new Request("http://localhost/api/interactions", {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify(body),
});
}

describe("POST /api/interactions (Next.js Route Handler)", () => {
const realFetch = globalThis.fetch;

beforeEach(() => {
process.env["LEARNPRO_API_URL"] = "http://api.test";
});

afterEach(() => {
globalThis.fetch = realFetch;
delete process.env["LEARNPRO_API_URL"];
});

it("forwards a valid batch to the API and pipes the upstream 202 response back", async () => {
const fakeFetch = vi.fn().mockResolvedValue(
new Response(JSON.stringify({ accepted: 2 }), {
status: 202,
headers: { "content-type": "application/json" },
}),
);
globalThis.fetch = fakeFetch as unknown as typeof fetch;

const events: InteractionEvent[] = [
{ type: "cursor_focus", payload: { line_start: 1, line_end: 2, duration_ms: 100 } },
{ type: "submit", payload: { passed: true } },
];
const res = await POST(postRequest({ events }));

expect(res.status).toBe(202);
const json = (await res.json()) as { accepted: number };
expect(json.accepted).toBe(2);
expect(fakeFetch).toHaveBeenCalledWith(
"http://api.test/v1/interactions",
expect.objectContaining({ method: "POST" }),
);
});

it("returns 400 with error=invalid_json when the body is not JSON", async () => {
const req = new Request("http://localhost/api/interactions", {
method: "POST",
headers: { "content-type": "application/json" },
body: "not json",
});
const res = await POST(req);
expect(res.status).toBe(400);
const json = (await res.json()) as { error: string };
expect(json.error).toBe("invalid_json");
});

it("returns 400 with error=invalid_request for an empty batch", async () => {
const res = await POST(postRequest({ events: [] }));
expect(res.status).toBe(400);
const json = (await res.json()) as { error: string };
expect(json.error).toBe("invalid_request");
});

it("returns 502 when the upstream fetch throws", async () => {
globalThis.fetch = vi
.fn()
.mockRejectedValue(new Error("connect ECONNREFUSED")) as unknown as typeof fetch;

const res = await POST(
postRequest({ events: [{ type: "submit", payload: { passed: true } }] }),
);
expect(res.status).toBe(502);
const json = (await res.json()) as { error: string };
expect(json.error).toBe("api_unreachable");
});

it("propagates upstream non-2xx status codes through to the client", async () => {
globalThis.fetch = vi.fn().mockResolvedValue(
new Response(JSON.stringify({ error: "interactions_unavailable", message: "store down" }), {
status: 503,
headers: { "content-type": "application/json" },
}),
) as unknown as typeof fetch;

const res = await POST(
postRequest({ events: [{ type: "submit", payload: { passed: true } }] }),
);
expect(res.status).toBe(503);
const json = (await res.json()) as { error: string };
expect(json.error).toBe("interactions_unavailable");
});

it("defaults to http://localhost:4000 when LEARNPRO_API_URL is unset", async () => {
delete process.env["LEARNPRO_API_URL"];
const fakeFetch = vi.fn().mockResolvedValue(
new Response(JSON.stringify({ accepted: 1 }), {
status: 202,
headers: { "content-type": "application/json" },
}),
);
globalThis.fetch = fakeFetch as unknown as typeof fetch;

await POST(postRequest({ events: [{ type: "submit", payload: { passed: true } }] }));
expect(fakeFetch).toHaveBeenCalledWith(
"http://localhost:4000/v1/interactions",
expect.any(Object),
);
});
});
53 changes: 53 additions & 0 deletions apps/web/src/app/api/interactions/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import { InteractionsBatchSchema } from "@learnpro/shared";
import { NextResponse } from "next/server";

export const runtime = "nodejs";

const DEFAULT_API_URL = "http://localhost:4000";

// Browser → Next.js → Fastify proxy. Same shape as the /api/sandbox/run handler:
// validate the body with Zod here so a malformed batch doesn't even leave the box,
// then forward the (now trusted) JSON upstream and pipe the response back.
export async function POST(req: Request) {
let json: unknown;
try {
json = await req.json();
} catch {
return NextResponse.json(
{ error: "invalid_json", message: "Request body must be valid JSON." },
{ status: 400 },
);
}

const parsed = InteractionsBatchSchema.safeParse(json);
if (!parsed.success) {
return NextResponse.json(
{ error: "invalid_request", issues: parsed.error.issues },
{ status: 400 },
);
}

const apiUrl = process.env["LEARNPRO_API_URL"] ?? DEFAULT_API_URL;
let upstream: Response;
try {
upstream = await fetch(`${apiUrl}/v1/interactions`, {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify(parsed.data),
});
} catch (err) {
return NextResponse.json(
{
error: "api_unreachable",
message: err instanceof Error ? err.message : String(err),
},
{ status: 502 },
);
}

const body = await upstream.text();
return new NextResponse(body, {
status: upstream.status,
headers: { "content-type": upstream.headers.get("content-type") ?? "application/json" },
});
}
Loading
Loading