Chat Example
This example shows how to build a streaming chat UI using BYOK API and MobX.
Setup
import { ByokClient } from "@byokapi/client"
import { streamText } from "ai"
import { makeAutoObservable, runInAction } from "mobx"
class ChatStore {
client: ByokClient
messages: Array<{ role: "user" | "assistant"; content: string }> = []
streaming = false
constructor() {
this.client = new ByokClient({
bridgeUrl: "http://localhost:8881/bridge",
appName: "Chat Demo",
})
makeAutoObservable(this)
}
async init() {
await this.client.connect()
await this.client.requestGrant({ capabilities: ["language"] })
}
async send(userMessage: string) {
this.messages.push({ role: "user", content: userMessage })
this.messages.push({ role: "assistant", content: "" })
this.streaming = true
const provider = this.client.getProvider()
const { textStream } = streamText({
model: provider("gpt-4o"),
messages: this.messages.slice(0, -1).map((m) => ({
role: m.role,
content: m.content,
})),
})
for await (const chunk of textStream) {
runInAction(() => {
const last = this.messages[this.messages.length - 1]
if (last) last.content += chunk
})
}
runInAction(() => {
this.streaming = false
})
}
}React component
import { observer } from "mobx-react-lite"
const Chat = observer(({ store }: { store: ChatStore }) => {
const [input, setInput] = useState("")
return (
<div>
{store.messages.map((msg, i) => (
<div key={i} className={msg.role === "user" ? "text-right" : ""}>
<strong>{msg.role}:</strong> {msg.content}
</div>
))}
<form onSubmit={(e) => {
e.preventDefault()
store.send(input)
setInput("")
}}>
<input
value={input}
onChange={(e) => setInput(e.target.value)}
disabled={store.streaming}
/>
<button type="submit">Send</button>
</form>
</div>
)
})Key points
- MobX for state — the store is observable, so React re-renders automatically on message updates
- Streaming —
streamTextreturns an async iterable of chunks; we append each chunk to the last message runInAction— required for MobX strict mode when updating state inside async functions- AI SDK v6 — the provider function returns a standard
LanguageModelV3, sostreamTextworks exactly like with any other provider