feat: Add live streaming to Chat page
- Replace blocking execute_agent_task() with live subprocess streaming - Use Popen() to read opencode output line-by-line in real-time - Send 'chunk' events to frontend as agent thinks - Frontend appends chunks incrementally for live response - Matches Orchestrator's streaming UX - No more waiting for complete response before seeing output
This commit is contained in:
parent
d620f496a8
commit
e2a853ffde
3 changed files with 408 additions and 7 deletions
|
|
@ -178,6 +178,11 @@ function sendChat(event) {
|
|||
agentBadge.textContent = currentAgent;
|
||||
} else if (data.type === 'processing') {
|
||||
chatStatus.innerHTML = `<span style="color:var(--info);">${data.message}</span>`;
|
||||
} else if (data.type === 'chunk') {
|
||||
// Live-Chunk empfangen - append to response
|
||||
currentResponse += data.text;
|
||||
responseText.textContent = currentResponse;
|
||||
chatContainer.scrollTop = chatContainer.scrollHeight;
|
||||
} else if (data.type === 'response') {
|
||||
currentResponse = data.text;
|
||||
responseText.textContent = currentResponse;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue