fix(studio): improve chat context, thinking tags, streaming, and tool results
All checks were successful
Beta Release / beta (push) Successful in 39s

- Fix cleanThinkingTags to use proper regex instead of naive ReplaceAll
- Send conversation history (last 20 messages + summary) to AI instead of single message
- Store tool results alongside tool calls so history shows complete execution info
- Stream words instead of characters for smoother SSE rendering
- Add stop button to cancel in-progress AI requests (AbortController)
- Fix markdown rendering: add h2 support, use div for bullets
- Add i18n keys for cancel/stop (EN + FR)

💘 Generated with Crush

Assisted-by: GLM-5.1 via Crush <crush@charm.land>
This commit is contained in:
Augustin
2026-04-22 22:37:45 +02:00
parent 2e50366cd8
commit 65804aae4e
7 changed files with 149 additions and 26 deletions

View File

@@ -4,12 +4,15 @@ import (
"context"
"encoding/json"
"net/http"
"regexp"
"strings"
"github.com/muyue/muyue/internal/agent"
"github.com/muyue/muyue/internal/orchestrator"
)
var thinkingTagRegex = regexp.MustCompile(`(?s)<[Tt]hink[^>]*>.*?</[Tt]hink>`)
const maxToolIterations = 15
func (s *Server) handleChat(w http.ResponseWriter, r *http.Request) {
@@ -68,12 +71,11 @@ func (s *Server) handleStreamChat(w http.ResponseWriter, orb *orchestrator.Orche
}
ctx := context.Background()
messages := []orchestrator.Message{
{Role: "user", Content: userMessage},
}
messages := s.buildContextMessages(userMessage)
var finalContent string
var allToolCalls []map[string]interface{}
var allToolResults []map[string]interface{}
for i := 0; i < maxToolIterations; i++ {
resp, err := orb.SendWithTools(messages)
@@ -86,8 +88,13 @@ func (s *Server) handleStreamChat(w http.ResponseWriter, orb *orchestrator.Orche
content := cleanThinkingTags(choice.Message.Content)
if content != "" {
for _, ch := range strings.Split(content, "") {
writeSSE(map[string]interface{}{"content": ch})
words := strings.Fields(content)
for i, w := range words {
chunk := w
if i < len(words)-1 {
chunk += " "
}
writeSSE(map[string]interface{}{"content": chunk})
}
finalContent = content
}
@@ -133,6 +140,14 @@ func (s *Server) handleStreamChat(w http.ResponseWriter, orb *orchestrator.Orche
}
writeSSE(map[string]interface{}{"tool_result": resultData})
allToolResults = append(allToolResults, map[string]interface{}{
"tool_call_id": tc.ID,
"name": tc.Function.Name,
"args": tc.Function.Arguments,
"result": result.Content,
"is_error": result.IsError,
})
messages = append(messages, orchestrator.Message{
Role: "tool",
Content: result.Content,
@@ -146,7 +161,11 @@ func (s *Server) handleStreamChat(w http.ResponseWriter, orb *orchestrator.Orche
storeContent := finalContent
if len(allToolCalls) > 0 {
storeObj := map[string]interface{}{"content": storeContent, "tool_calls": allToolCalls}
storeObj := map[string]interface{}{
"content": storeContent,
"tool_calls": allToolCalls,
"tool_results": allToolResults,
}
storeJSON, _ := json.Marshal(storeObj)
storeContent = string(storeJSON)
}
@@ -157,9 +176,7 @@ func (s *Server) handleStreamChat(w http.ResponseWriter, orb *orchestrator.Orche
func (s *Server) handleNonStreamChat(w http.ResponseWriter, orb *orchestrator.Orchestrator, userMessage string) {
ctx := context.Background()
messages := []orchestrator.Message{
{Role: "user", Content: userMessage},
}
messages := s.buildContextMessages(userMessage)
var finalContent string
@@ -223,7 +240,59 @@ func (s *Server) handleNonStreamChat(w http.ResponseWriter, orb *orchestrator.Or
}
func cleanThinkingTags(content string) string {
return strings.ReplaceAll(content, "<think", "")
return strings.TrimSpace(thinkingTagRegex.ReplaceAllString(content, ""))
}
const contextWindowMessages = 20
func (s *Server) buildContextMessages(userMessage string) []orchestrator.Message {
history := s.convStore.Get()
start := 0
if len(history) > contextWindowMessages {
start = len(history) - contextWindowMessages
}
messages := make([]orchestrator.Message, 0, len(history[start:])+1)
summary := s.convStore.GetSummary()
if summary != "" {
messages = append(messages, orchestrator.Message{
Role: "system",
Content: "Résumé de la conversation précédente:\n" + summary,
})
}
for _, m := range history[start:] {
content := m.Content
if m.Role == "assistant" {
var parsed struct {
Content string `json:"content"`
ToolCalls []struct {
ToolCallID string `json:"tool_call_id"`
Name string `json:"name"`
Args string `json:"args"`
} `json:"tool_calls"`
}
if err := json.Unmarshal([]byte(content), &parsed); err == nil && parsed.Content != "" {
content = parsed.Content
}
}
role := m.Role
if role == "system" {
continue
}
messages = append(messages, orchestrator.Message{
Role: role,
Content: content,
})
}
messages = append(messages, orchestrator.Message{
Role: "user",
Content: userMessage,
})
return messages
}
func (s *Server) autoSummarize() {

View File

@@ -136,8 +136,13 @@ func (s *Server) handleShellChatStream(w http.ResponseWriter, orb *orchestrator.
content := cleanThinkingTags(choice.Message.Content)
if content != "" {
for _, ch := range strings.Split(content, "") {
writeSSE(map[string]interface{}{"content": ch})
words := strings.Fields(content)
for i, w := range words {
chunk := w
if i < len(words)-1 {
chunk += " "
}
writeSSE(map[string]interface{}{"content": chunk})
}
finalContent = content
}