by Ahmed Megahd
@Sse()
to stream tokens from OpenAI and chunks from Gemini.EventSource()
with auto-reconnect.retry()
and interval()
.delta.content
, Gemini's streamPart
.@Get('ai/stream')
async stream(@Query() { provider:p, q }, @Res() res) {
res.writeHead(200, { 'Content-Type':'text/event-stream','X-Accel-Buffering':'no' });
if (p==='openai')
(await openai.chat.completions.create({model:'gpt-3.5-turbo',stream:true,
messages:[{role:'user',content:q}]}))
.on('data', c => res.write(`data:${c.choices[0]?.delta?.content}\n\n`));
if (p==='gemini')
for await (const part of gemini.generateContentStream({prompt:q}))
res.write(`data:${part.text}\n\n`);
}
export function useAiWriter(prompt, provider='openai') {
const [txt,setTxt] = useState('');
useEffect(() => { if (!prompt) return;
const es = new EventSource(`/ai/stream?provider=${provider}&q=${encodeURIComponent(prompt)}`);
es.onmessage = e => setTxt(t => t + e.data); es.onerror = () => es.close();
return () => es.close();
}, [prompt, provider]); return txt;
}
function Typewriter({ streamText, speed = 20 }) {
const [display,setDisplay] = useState('');
useEffect(() => {
let i=0, id=setInterval(()=>{ setDisplay(p=>p+streamText[i++]||''); if(i>=streamText.length) clearInterval(id); }, speed);
return () => clearInterval(id);
}, [streamText, speed]);
return <pre className="whitespace-pre-wrap leading-6">{display}</pre>;
}