From e927de1821f06015703f3f8a0ca434ddbbdfd0da Mon Sep 17 00:00:00 2001 From: Jonas Francisco Date: Thu, 13 Nov 2025 13:54:26 -0300 Subject: [PATCH] fix(ia) adicionei um componente Voice Visualization --- .../features/pacientes/chat-widget.tsx | 95 ++++++++++++++++++- susconecta/package.json | 1 + susconecta/pnpm-lock.yaml | 8 ++ 3 files changed, 99 insertions(+), 5 deletions(-) diff --git a/susconecta/components/features/pacientes/chat-widget.tsx b/susconecta/components/features/pacientes/chat-widget.tsx index 99e8ee9..a270f5d 100644 --- a/susconecta/components/features/pacientes/chat-widget.tsx +++ b/susconecta/components/features/pacientes/chat-widget.tsx @@ -1,9 +1,12 @@ + + "use client"; import { useEffect, useMemo, useState } from "react"; -import { ArrowLeft, Sparkles } from "lucide-react"; +import { ArrowLeft, Mic, MicOff, Sparkles } from "lucide-react"; import { Button } from "@/components/ui/button"; import { AIAssistantInterface } from "@/components/ZoeIA/ai-assistant-interface"; +import { VoicePoweredOrb } from "@/components/ZoeIA/voice-powered-orb"; interface HistoryEntry { id: string; @@ -13,10 +16,13 @@ interface HistoryEntry { export function ChatWidget() { const [assistantOpen, setAssistantOpen] = useState(false); + const [realtimeOpen, setRealtimeOpen] = useState(false); + const [isRecording, setIsRecording] = useState(false); + const [voiceDetected, setVoiceDetected] = useState(false); const [history, setHistory] = useState([]); useEffect(() => { - if (!assistantOpen) return; + if (!assistantOpen && !realtimeOpen) return; const original = document.body.style.overflow; document.body.style.overflow = "hidden"; @@ -24,7 +30,7 @@ export function ChatWidget() { return () => { document.body.style.overflow = original; }; - }, [assistantOpen]); + }, [assistantOpen, realtimeOpen]); const gradientRing = useMemo( () => ( @@ -39,14 +45,33 @@ export function ChatWidget() { const openAssistant = () => setAssistantOpen(true); const closeAssistant = () => setAssistantOpen(false); + const openRealtime = () => setRealtimeOpen(true); + const closeRealtime = () => { + setRealtimeOpen(false); + setAssistantOpen(true); + setIsRecording(false); + setVoiceDetected(false); + }; + + const toggleRecording = () => { + setIsRecording((prev) => { + const next = !prev; + if (!next) { + setVoiceDetected(false); + } + return next; + }); + }; + const handleOpenDocuments = () => { console.log("[ChatWidget] Abrindo fluxo de documentos"); closeAssistant(); }; const handleOpenChat = () => { - console.log("[ChatWidget] Encaminhando para chat humano"); - closeAssistant(); + console.log("[ChatWidget] Encaminhando para chat em tempo real"); + setAssistantOpen(false); + openRealtime(); }; const handleAddHistory = (entry: HistoryEntry) => { @@ -87,6 +112,66 @@ export function ChatWidget() { )} + {realtimeOpen && ( +
+
+ +
+ +
+
+
+ + {voiceDetected && ( + + Ouvindo… + + )} +
+ +
+ +

+ Ative a captura para falar com a equipe em tempo real. Assim que sua voz for detectada, a Zoe sinaliza visualmente e encaminha o atendimento. +

+
+
+
+
+ )} +