Tool UI is a React component framework for conversation‑native UIs inside AI chats.
Tools return JSON; Tool UI renders it as inline, narrated, referenceable surfaces.
Radix/shadcn (primitives) → Tool UI (conversation‑native components & schema) → AI SDK / LangGraph / etc. (LLM orchestration)
InferUITools.See UI Guidelines for the full philosophy. Quick summary:
information, decision, control, state, compositeinvocation → output‑pending → interactive → committing → receipt → erroredThe assistant calls a tool, the tool returns JSON matching a schema, and the UI renders inline.
Assistant calls tool → JSON output → <Component {...props} /> → User interacts → Your app handles effectsServer side: Define a tool that returns schema-validated JSON.
What this demonstrates:
serializableMediaCardSchema from @tool-ui/media-card ensures type-safe outputasOf) for freshness trackingimport { streamText, tool } from "ai";import { openai } from "@ai-sdk/openai";import { z } from "zod";import { serializableMediaCardSchema } from "@tool-ui/media-card";export async function POST(req: Request) { const { messages } = await req.json(); const result = streamText({ model: openai("gpt-4o"), messages, tools: { previewLink: tool({ description: "Show a preview card for a URL", inputSchema: z.object({ url: z.string().url() }), outputSchema: serializableMediaCardSchema, async execute({ url }) { return { id: "link-1", kind: "link", href: url, title: "Example Site", description: "A description of the linked content", thumb: "https://example.com/image.jpg", asOf: new Date().toISOString(), actions: [{ id: "open", label: "Open", sentence: "Open the link" }], }; }, }), }, }); return result.toUIMessageStreamResponse();}Client side: Register the component and let assistant-ui handle rendering.
What this demonstrates:
makeAssistantToolUI connects the tool name to a React componentresult object is automatically typed and validated against your schema<PreviewLinkUI /> registers it—no manual plumbing required"use client";import { AssistantRuntimeProvider } from "@assistant-ui/react";import { useChatRuntime, AssistantChatTransport, makeAssistantToolUI } from "@assistant-ui/react-ai-sdk";import { MediaCard } from "@tool-ui/media-card";const PreviewLinkUI = makeAssistantToolUI({ toolName: "previewLink", render: ({ result }) => <MediaCard {...result} maxWidth="420px" />,});export default function App() { const runtime = useChatRuntime({ transport: new AssistantChatTransport({ api: "/api/chat" }) }); return ( <AssistantRuntimeProvider runtime={runtime}> <PreviewLinkUI /> {/* your <Thread /> component here */} </AssistantRuntimeProvider> );}Every Tool UI surface is addressable and reconstructable. Here's the common schema structure:
What this shows:
id makes the surface referenceable ("the second card")role declares the primary purpose (information, decision, control, state, or composite)asOf enables temporal honesty and staleness detectionactions with sentence fields enable natural language interactionreceipt provides durable proof of side effects with timestamps and identifiers{ id: string; // stable across turns role: "information"|"decision"|"control"|"state"|"composite"; asOf?: string; // ISO time for freshness actions?: Array<{ id: string; label: string; sentence: string }>; // optional receipt after side effects: receipt?: { outcome: "success"|"partial"|"failed"|"cancelled"; summary: string; identifiers?: Record<string, string>; at: string; // ISO timestamp };}Assistant calls tool → JSON output → <Component {...props} /> → User interacts → Your app handles effectsimport { streamText, tool } from "ai";
import { openai } from "@ai-sdk/openai";
import { z } from "zod";
import { serializableMediaCardSchema } from "@tool-ui/media-card";
export async function POST(req: Request) {
const { messages } = await req.json();
const result = streamText({
model: openai("gpt-4o"),
messages,
tools: {
previewLink: tool({
description: "Show a preview card for a URL",
inputSchema: z.object({ url: z.string().url() }),
outputSchema: serializableMediaCardSchema,
async execute({ url }) {
return {
id: "link-1",
kind: "link",
href: url,
title: "Example Site",
description: "A description of the linked content",
thumb: "https://example.com/image.jpg",
asOf: new Date().toISOString(),
actions: [{ id: "open", label: "Open", sentence: "Open the link" }],
};
},
}),
},
});
return result.toUIMessageStreamResponse();
}"use client";
import { AssistantRuntimeProvider } from "@assistant-ui/react";
import { useChatRuntime, AssistantChatTransport, makeAssistantToolUI } from "@assistant-ui/react-ai-sdk";
import { MediaCard } from "@tool-ui/media-card";
const PreviewLinkUI = makeAssistantToolUI({
toolName: "previewLink",
render: ({ result }) => <MediaCard {...result} maxWidth="420px" />,
});
export default function App() {
const runtime = useChatRuntime({ transport: new AssistantChatTransport({ api: "/api/chat" }) });
return (
<AssistantRuntimeProvider runtime={runtime}>
<PreviewLinkUI />
{/* your <Thread /> component here */}
</AssistantRuntimeProvider>
);
}{
id: string; // stable across turns
role: "information"|"decision"|"control"|"state"|"composite";
asOf?: string; // ISO time for freshness
actions?: Array<{ id: string; label: string; sentence: string }>;
// optional receipt after side effects:
receipt?: {
outcome: "success"|"partial"|"failed"|"cancelled";
summary: string;
identifiers?: Record<string, string>;
at: string; // ISO timestamp
};
}