diff --git a/susconecta/app/(main-routes)/pacientes/layout.tsx b/susconecta/app/(main-routes)/pacientes/layout.tsx new file mode 100644 index 0000000..4e9bd5c --- /dev/null +++ b/susconecta/app/(main-routes)/pacientes/layout.tsx @@ -0,0 +1,11 @@ +import type { ReactNode } from "react"; +import { ChatWidget } from "@/components/features/pacientes/chat-widget"; + +export default function PacientesLayout({ children }: { children: ReactNode }) { + return ( + <> + {children} + + + ); +} diff --git a/susconecta/app/paciente/layout.tsx b/susconecta/app/paciente/layout.tsx new file mode 100644 index 0000000..1ff2978 --- /dev/null +++ b/susconecta/app/paciente/layout.tsx @@ -0,0 +1,11 @@ +import type { ReactNode } from "react"; +import { ChatWidget } from "@/components/features/pacientes/chat-widget"; + +export default function PacienteLayout({ children }: { children: ReactNode }) { + return ( + <> + {children} + + + ); +} diff --git a/susconecta/components/ZoeIA/ai-assistant-interface.tsx b/susconecta/components/ZoeIA/ai-assistant-interface.tsx new file mode 100644 index 0000000..b99d520 --- /dev/null +++ b/susconecta/components/ZoeIA/ai-assistant-interface.tsx @@ -0,0 +1,512 @@ +"use client"; + +import type React from "react"; + +import { useState, useRef } from "react"; +import { + Search, + Mic, + ArrowUp, + Plus, + FileText, + Code, + BookOpen, + PenTool, + BrainCircuit, + Sparkles, +} from "lucide-react"; +import { motion, AnimatePresence } from "framer-motion"; + +export function AIAssistantInterface() { + const [inputValue, setInputValue] = useState(""); + const [searchEnabled, setSearchEnabled] = useState(false); + const [deepResearchEnabled, setDeepResearchEnabled] = useState(false); + const [reasonEnabled, setReasonEnabled] = useState(false); + const [uploadedFiles, setUploadedFiles] = useState([]); + const [showUploadAnimation, setShowUploadAnimation] = useState(false); + const [activeCommandCategory, setActiveCommandCategory] = useState< + string | null + >(null); + const inputRef = useRef(null); + + const commandSuggestions = { + learn: [ + "Explain the Big Bang theory", + "How does photosynthesis work?", + "What are black holes?", + "Explain quantum computing", + "How does the human brain work?", + ], + code: [ + "Create a React component for a todo list", + "Write a Python function to sort a list", + "How to implement authentication in Next.js", + "Explain async/await in JavaScript", + "Create a CSS animation for a button", + ], + write: [ + "Write a professional email to a client", + "Create a product description for a smartphone", + "Draft a blog post about AI", + "Write a creative story about space exploration", + "Create a social media post about sustainability", + ], + }; + + const handleUploadFile = () => { + setShowUploadAnimation(true); + + // Simulate file upload with timeout + setTimeout(() => { + const newFile = `Document.pdf`; + setUploadedFiles((prev) => [...prev, newFile]); + setShowUploadAnimation(false); + }, 1500); + }; + + const handleCommandSelect = (command: string) => { + setInputValue(command); + setActiveCommandCategory(null); + + if (inputRef.current) { + inputRef.current.focus(); + } + }; + + const handleSendMessage = () => { + if (inputValue.trim()) { + console.log("Sending message:", inputValue); + setInputValue(""); + } + }; + + return ( +
+
+ {/* Logo with animated gradient */} +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + {/* Welcome message */} +
+ +

+ Ready to assist you +

+

+ Ask me anything or try one of the suggestions below +

+
+
+ + {/* Input area with integrated functions and file upload */} +
+
+ setInputValue(e.target.value)} + className="w-full text-gray-700 text-base outline-none placeholder:text-gray-400" + /> +
+ + {/* Uploaded files */} + {uploadedFiles.length > 0 && ( +
+
+ {uploadedFiles.map((file, index) => ( +
+ + {file} + +
+ ))} +
+
+ )} + + {/* Search, Deep Research, Reason functions and actions */} +
+
+ + + +
+
+ + +
+
+ + {/* Upload files */} +
+ +
+
+ + {/* Command categories */} +
+ } + label="Learn" + isActive={activeCommandCategory === "learn"} + onClick={() => + setActiveCommandCategory( + activeCommandCategory === "learn" ? null : "learn" + ) + } + /> + } + label="Code" + isActive={activeCommandCategory === "code"} + onClick={() => + setActiveCommandCategory( + activeCommandCategory === "code" ? null : "code" + ) + } + /> + } + label="Write" + isActive={activeCommandCategory === "write"} + onClick={() => + setActiveCommandCategory( + activeCommandCategory === "write" ? null : "write" + ) + } + /> +
+ + {/* Command suggestions */} + + {activeCommandCategory && ( + +
+
+

+ {activeCommandCategory === "learn" + ? "Learning suggestions" + : activeCommandCategory === "code" + ? "Coding suggestions" + : "Writing suggestions"} +

+
+
    + {commandSuggestions[ + activeCommandCategory as keyof typeof commandSuggestions + ].map((suggestion, index) => ( + handleCommandSelect(suggestion)} + className="p-3 hover:bg-gray-50 cursor-pointer transition-colors duration-75" + > +
    + {activeCommandCategory === "learn" ? ( + + ) : activeCommandCategory === "code" ? ( + + ) : ( + + )} + + {suggestion} + +
    +
    + ))} +
+
+
+ )} +
+
+
+ ); +} + +interface CommandButtonProps { + icon: React.ReactNode; + label: string; + isActive: boolean; + onClick: () => void; +} + +function CommandButton({ icon, label, isActive, onClick }: CommandButtonProps) { + return ( + +
+ {icon} +
+ + {label} + +
+ ); +} diff --git a/susconecta/components/ZoeIA/demo-voice-orb.tsx b/susconecta/components/ZoeIA/demo-voice-orb.tsx new file mode 100644 index 0000000..66087c0 --- /dev/null +++ b/susconecta/components/ZoeIA/demo-voice-orb.tsx @@ -0,0 +1,107 @@ +"use client"; + +import { useEffect, useState } from "react"; +import { VoicePoweredOrb } from "@/components/ZoeIA/voice-powered-orb"; +import { AIAssistantInterface } from "@/components/ZoeIA/ai-assistant-interface"; +import { Button } from "@/components/ui/button"; +import { ArrowLeft, Mic, MicOff } from "lucide-react"; + +export default function VoicePoweredOrbPage() { + const [isRecording, setIsRecording] = useState(false); + const [voiceDetected, setVoiceDetected] = useState(false); + const [assistantOpen, setAssistantOpen] = useState(false); + + const toggleRecording = () => { + setIsRecording(!isRecording); + }; + + useEffect(() => { + if (!assistantOpen) return; + + const original = document.body.style.overflow; + document.body.style.overflow = "hidden"; + + return () => { + document.body.style.overflow = original; + }; + }, [assistantOpen]); + + const openAssistant = () => setAssistantOpen(true); + const closeAssistant = () => setAssistantOpen(false); + + return ( +
+
+ {assistantOpen && ( +
+
+ +
+
+ +
+
+ )} + + {/* Orb */} +
{ + if (event.key === "Enter" || event.key === " ") { + event.preventDefault(); + openAssistant(); + } + }} + > + + {voiceDetected && ( + + Ouvindo… + + )} +
+ + {/* Control Button */} + + + {/* Simple Instructions */} +

+ Click the button to enable voice control. Speak to see the orb respond to your voice with subtle movements. +

+
+
+ ); +} diff --git a/susconecta/components/ZoeIA/demo.tsx b/susconecta/components/ZoeIA/demo.tsx new file mode 100644 index 0000000..be4c889 --- /dev/null +++ b/susconecta/components/ZoeIA/demo.tsx @@ -0,0 +1,10 @@ +import * as React from "react" +import { AIAssistantInterface } from "@/components/ZoeIA/ai-assistant-interface" + +export function Demo() { + return ( +
+ +
+ ) +} diff --git a/susconecta/components/ZoeIA/voice-powered-orb.tsx b/susconecta/components/ZoeIA/voice-powered-orb.tsx new file mode 100644 index 0000000..ca28076 --- /dev/null +++ b/susconecta/components/ZoeIA/voice-powered-orb.tsx @@ -0,0 +1,493 @@ +"use client"; + +import React, { useEffect, useRef, FC } from "react"; +import { Renderer, Program, Mesh, Triangle, Vec3 } from "ogl"; +import { cn } from "@/lib/utils"; + +interface VoicePoweredOrbProps { + className?: string; + hue?: number; + enableVoiceControl?: boolean; + voiceSensitivity?: number; + maxRotationSpeed?: number; + maxHoverIntensity?: number; + onVoiceDetected?: (detected: boolean) => void; +} + +export const VoicePoweredOrb: FC = ({ + className, + hue = 0, + enableVoiceControl = true, + voiceSensitivity = 1.5, + maxRotationSpeed = 1.2, + maxHoverIntensity = 0.8, + onVoiceDetected, +}) => { + const ctnDom = useRef(null); + const audioContextRef = useRef(null); + const analyserRef = useRef(null); + const microphoneRef = useRef(null); + const dataArrayRef = useRef(null); + const animationFrameRef = useRef(); + const mediaStreamRef = useRef(null); + + const vert = /* glsl */ ` + precision highp float; + attribute vec2 position; + attribute vec2 uv; + varying vec2 vUv; + void main() { + vUv = uv; + gl_Position = vec4(position, 0.0, 1.0); + } + `; + + const frag = /* glsl */ ` + precision highp float; + + uniform float iTime; + uniform vec3 iResolution; + uniform float hue; + uniform float hover; + uniform float rot; + uniform float hoverIntensity; + varying vec2 vUv; + + vec3 rgb2yiq(vec3 c) { + float y = dot(c, vec3(0.299, 0.587, 0.114)); + float i = dot(c, vec3(0.596, -0.274, -0.322)); + float q = dot(c, vec3(0.211, -0.523, 0.312)); + return vec3(y, i, q); + } + + vec3 yiq2rgb(vec3 c) { + float r = c.x + 0.956 * c.y + 0.621 * c.z; + float g = c.x - 0.272 * c.y - 0.647 * c.z; + float b = c.x - 1.106 * c.y + 1.703 * c.z; + return vec3(r, g, b); + } + + vec3 adjustHue(vec3 color, float hueDeg) { + float hueRad = hueDeg * 3.14159265 / 180.0; + vec3 yiq = rgb2yiq(color); + float cosA = cos(hueRad); + float sinA = sin(hueRad); + float i = yiq.y * cosA - yiq.z * sinA; + float q = yiq.y * sinA + yiq.z * cosA; + yiq.y = i; + yiq.z = q; + return yiq2rgb(yiq); + } + + vec3 hash33(vec3 p3) { + p3 = fract(p3 * vec3(0.1031, 0.11369, 0.13787)); + p3 += dot(p3, p3.yxz + 19.19); + return -1.0 + 2.0 * fract(vec3( + p3.x + p3.y, + p3.x + p3.z, + p3.y + p3.z + ) * p3.zyx); + } + + float snoise3(vec3 p) { + const float K1 = 0.333333333; + const float K2 = 0.166666667; + vec3 i = floor(p + (p.x + p.y + p.z) * K1); + vec3 d0 = p - (i - (i.x + i.y + i.z) * K2); + vec3 e = step(vec3(0.0), d0 - d0.yzx); + vec3 i1 = e * (1.0 - e.zxy); + vec3 i2 = 1.0 - e.zxy * (1.0 - e); + vec3 d1 = d0 - (i1 - K2); + vec3 d2 = d0 - (i2 - K1); + vec3 d3 = d0 - 0.5; + vec4 h = max(0.6 - vec4( + dot(d0, d0), + dot(d1, d1), + dot(d2, d2), + dot(d3, d3) + ), 0.0); + vec4 n = h * h * h * h * vec4( + dot(d0, hash33(i)), + dot(d1, hash33(i + i1)), + dot(d2, hash33(i + i2)), + dot(d3, hash33(i + 1.0)) + ); + return dot(vec4(31.316), n); + } + + vec4 extractAlpha(vec3 colorIn) { + float a = max(max(colorIn.r, colorIn.g), colorIn.b); + return vec4(colorIn.rgb / (a + 1e-5), a); + } + + const vec3 baseColor1 = vec3(0.611765, 0.262745, 0.996078); + const vec3 baseColor2 = vec3(0.298039, 0.760784, 0.913725); + const vec3 baseColor3 = vec3(0.062745, 0.078431, 0.600000); + const float innerRadius = 0.6; + const float noiseScale = 0.65; + + float light1(float intensity, float attenuation, float dist) { + return intensity / (1.0 + dist * attenuation); + } + + float light2(float intensity, float attenuation, float dist) { + return intensity / (1.0 + dist * dist * attenuation); + } + + vec4 draw(vec2 uv) { + vec3 color1 = adjustHue(baseColor1, hue); + vec3 color2 = adjustHue(baseColor2, hue); + vec3 color3 = adjustHue(baseColor3, hue); + + float ang = atan(uv.y, uv.x); + float len = length(uv); + float invLen = len > 0.0 ? 1.0 / len : 0.0; + + float n0 = snoise3(vec3(uv * noiseScale, iTime * 0.5)) * 0.5 + 0.5; + float r0 = mix(mix(innerRadius, 1.0, 0.4), mix(innerRadius, 1.0, 0.6), n0); + float d0 = distance(uv, (r0 * invLen) * uv); + float v0 = light1(1.0, 10.0, d0); + v0 *= smoothstep(r0 * 1.05, r0, len); + float cl = cos(ang + iTime * 2.0) * 0.5 + 0.5; + + float a = iTime * -1.0; + vec2 pos = vec2(cos(a), sin(a)) * r0; + float d = distance(uv, pos); + float v1 = light2(1.5, 5.0, d); + v1 *= light1(1.0, 50.0, d0); + + float v2 = smoothstep(1.0, mix(innerRadius, 1.0, n0 * 0.5), len); + float v3 = smoothstep(innerRadius, mix(innerRadius, 1.0, 0.5), len); + + vec3 col = mix(color1, color2, cl); + col = mix(color3, col, v0); + col = (col + v1) * v2 * v3; + col = clamp(col, 0.0, 1.0); + + return extractAlpha(col); + } + + vec4 mainImage(vec2 fragCoord) { + vec2 center = iResolution.xy * 0.5; + float size = min(iResolution.x, iResolution.y); + vec2 uv = (fragCoord - center) / size * 2.0; + + float angle = rot; + float s = sin(angle); + float c = cos(angle); + uv = vec2(c * uv.x - s * uv.y, s * uv.x + c * uv.y); + + uv.x += hover * hoverIntensity * 0.1 * sin(uv.y * 10.0 + iTime); + uv.y += hover * hoverIntensity * 0.1 * sin(uv.x * 10.0 + iTime); + + return draw(uv); + } + + void main() { + vec2 fragCoord = vUv * iResolution.xy; + vec4 col = mainImage(fragCoord); + gl_FragColor = vec4(col.rgb * col.a, col.a); + } + `; + + // Voice analysis function + const analyzeAudio = () => { + if (!analyserRef.current || !dataArrayRef.current) return 0; + + // To avoid type incompatibilities between different ArrayBuffer-like types + // (Uint8Array vs Uint8Array), create a + // standard Uint8Array copy with an ArrayBuffer backing it. This satisfies + // the Web Audio API typing and is safe (small cost to copy). + const src = dataArrayRef.current as Uint8Array; + const buffer = Uint8Array.from(src); + analyserRef.current.getByteFrequencyData(buffer); + + // Calculate RMS (Root Mean Square) for better voice detection + let sum = 0; + for (let i = 0; i < buffer.length; i++) { + const value = buffer[i] / 255; + sum += value * value; + } + const rms = Math.sqrt(sum / buffer.length); + + // Apply sensitivity and boost the signal + const level = Math.min(rms * voiceSensitivity * 3.0, 1); + + return level; + }; + + // Stop microphone and cleanup + const stopMicrophone = () => { + try { + // Stop all tracks in the media stream + if (mediaStreamRef.current) { + mediaStreamRef.current.getTracks().forEach(track => { + track.stop(); + }); + mediaStreamRef.current = null; + } + + // Disconnect and cleanup audio nodes + if (microphoneRef.current) { + microphoneRef.current.disconnect(); + microphoneRef.current = null; + } + + if (analyserRef.current) { + analyserRef.current.disconnect(); + analyserRef.current = null; + } + + // Close audio context + if (audioContextRef.current && audioContextRef.current.state !== 'closed') { + audioContextRef.current.close(); + audioContextRef.current = null; + } + + dataArrayRef.current = null; + console.log('Microphone stopped and cleaned up'); + } catch (error) { + console.warn('Error stopping microphone:', error); + } + }; + + // Initialize microphone access + const initMicrophone = async () => { + try { + // Clean up any existing microphone first + stopMicrophone(); + + const stream = await navigator.mediaDevices.getUserMedia({ + audio: { + echoCancellation: false, + noiseSuppression: false, + autoGainControl: false, + sampleRate: 44100, + }, + }); + + mediaStreamRef.current = stream; + + audioContextRef.current = new (window.AudioContext || (window as any).webkitAudioContext)(); + + if (audioContextRef.current.state === 'suspended') { + await audioContextRef.current.resume(); + } + + analyserRef.current = audioContextRef.current.createAnalyser(); + microphoneRef.current = audioContextRef.current.createMediaStreamSource(stream); + + analyserRef.current.fftSize = 512; + analyserRef.current.smoothingTimeConstant = 0.3; + analyserRef.current.minDecibels = -90; + analyserRef.current.maxDecibels = -10; + + microphoneRef.current.connect(analyserRef.current); + dataArrayRef.current = new Uint8Array(analyserRef.current.frequencyBinCount); + + console.log('Microphone initialized successfully'); + return true; + } catch (error) { + console.warn("Microphone access denied or not available:", error); + return false; + } + }; + + useEffect(() => { + const container = ctnDom.current; + if (!container) return; + + let rendererInstance: any = null; + let glContext: WebGLRenderingContext | WebGL2RenderingContext | null = null; + let rafId: number; + let program: any = null; + + try { + rendererInstance = new Renderer({ + alpha: true, + premultipliedAlpha: false, + antialias: true, + dpr: window.devicePixelRatio || 1 + }); + glContext = rendererInstance.gl as WebGLRenderingContext; + glContext.clearColor(0, 0, 0, 0); + glContext.enable((glContext as any).BLEND); + glContext.blendFunc((glContext as any).SRC_ALPHA, (glContext as any).ONE_MINUS_SRC_ALPHA); + + while (container.firstChild) { + container.removeChild(container.firstChild); + } + container.appendChild((glContext as any).canvas); + + const geometry = new Triangle(glContext as any); + program = new Program(glContext as any, { + vertex: vert, + fragment: frag, + uniforms: { + iTime: { value: 0 }, + iResolution: { + value: new Vec3( + (glContext as any).canvas.width, + (glContext as any).canvas.height, + (glContext as any).canvas.width / (glContext as any).canvas.height + ), + }, + hue: { value: hue }, + hover: { value: 0 }, + rot: { value: 0 }, + hoverIntensity: { value: 0 }, + }, + }); + + const mesh = new Mesh(glContext as any, { geometry, program }); + + const resize = () => { + if (!container || !rendererInstance || !glContext) return; + const dpr = window.devicePixelRatio || 1; + const width = container.clientWidth; + const height = container.clientHeight; + + if (width === 0 || height === 0) return; + + rendererInstance.setSize(width * dpr, height * dpr); + (glContext as any).canvas.style.width = width + "px"; + (glContext as any).canvas.style.height = height + "px"; + + if (program) { + program.uniforms.iResolution.value.set( + (glContext as any).canvas.width, + (glContext as any).canvas.height, + (glContext as any).canvas.width / (glContext as any).canvas.height + ); + } + }; + window.addEventListener("resize", resize); + resize(); + + let lastTime = 0; + let currentRot = 0; + let voiceLevel = 0; + const baseRotationSpeed = 0.3; + let isMicrophoneInitialized = false; + + if (enableVoiceControl) { + initMicrophone().then((success) => { + isMicrophoneInitialized = success; + }); + } else { + stopMicrophone(); + isMicrophoneInitialized = false; + } + + const update = (t: number) => { + rafId = requestAnimationFrame(update); + if (!program) return; + + const dt = (t - lastTime) * 0.001; + lastTime = t; + program.uniforms.iTime.value = t * 0.001; + program.uniforms.hue.value = hue; + + if (enableVoiceControl && isMicrophoneInitialized) { + voiceLevel = analyzeAudio(); + + if (onVoiceDetected) { + onVoiceDetected(voiceLevel > 0.1); + } + + const voiceRotationSpeed = baseRotationSpeed + (voiceLevel * maxRotationSpeed * 2.0); + + if (voiceLevel > 0.05) { + currentRot += dt * voiceRotationSpeed; + } + + program.uniforms.hover.value = Math.min(voiceLevel * 2.0, 1.0); + program.uniforms.hoverIntensity.value = Math.min(voiceLevel * maxHoverIntensity * 0.8, maxHoverIntensity); + } else { + program.uniforms.hover.value = 0; + program.uniforms.hoverIntensity.value = 0; + if (onVoiceDetected) { + onVoiceDetected(false); + } + } + + program.uniforms.rot.value = currentRot; + + if (rendererInstance && glContext) { + glContext.clear((glContext as any).COLOR_BUFFER_BIT | (glContext as any).DEPTH_BUFFER_BIT); + rendererInstance.render({ scene: mesh }); + } + }; + + rafId = requestAnimationFrame(update); + + return () => { + cancelAnimationFrame(rafId); + window.removeEventListener("resize", resize); + + try { + if (container && glContext && (glContext as any).canvas) { + if (container.contains((glContext as any).canvas)) { + container.removeChild((glContext as any).canvas); + } + } + } catch (error) { + console.warn("Canvas cleanup error:", error); + } + + stopMicrophone(); + + if (glContext) { + (glContext as any).getExtension("WEBGL_lose_context")?.loseContext(); + } + }; + + } catch (error) { + console.error("Error initializing Voice Powered Orb:", error); + if (container && container.firstChild) { + container.removeChild(container.firstChild); + } + return () => { + window.removeEventListener("resize", () => {}); + }; + } + }, [ + hue, + enableVoiceControl, + voiceSensitivity, + maxRotationSpeed, + maxHoverIntensity, + vert, + frag, + ]); + + useEffect(() => { + let isMounted = true; + + const handleMicrophoneState = async () => { + if (enableVoiceControl) { + const success = await initMicrophone(); + if (!isMounted) return; + } else { + stopMicrophone(); + } + }; + + handleMicrophoneState(); + + return () => { + isMounted = false; + }; + }, [enableVoiceControl]); + + return ( +
+
+ ); +}; diff --git a/susconecta/components/features/pacientes/chat-widget.tsx b/susconecta/components/features/pacientes/chat-widget.tsx new file mode 100644 index 0000000..e80dd4b --- /dev/null +++ b/susconecta/components/features/pacientes/chat-widget.tsx @@ -0,0 +1,182 @@ +"use client"; + +import { useMemo, useState } from "react"; +import { Sparkles, MessageCircle, X, Send } from "lucide-react"; +import { Input } from "@/components/ui/input"; +import { Button } from "@/components/ui/button"; + +const cannedSuggestions = [ + "Como remarcar minha consulta?", + "Quais documentos preciso levar?", + "Quero falar com suporte humano", +]; + +const supportAvailability = { + title: "Equipe disponível", + description: "Seg–Sex das 08h às 18h", +}; + +interface ChatMessage { + id: string; + author: "assistant" | "user"; + text: string; + timestamp: string; +} + +export function ChatWidget() { + const [open, setOpen] = useState(false); + const [input, setInput] = useState(""); + const [messages, setMessages] = useState(() => [ + { + id: "welcome", + author: "assistant", + text: "Olá! Sou sua assistente virtual. Posso ajudar a acompanhar consultas, exames e suporte geral.", + timestamp: new Date().toISOString(), + }, + ]); + + const toggle = () => setOpen((prev) => !prev); + + const handleSend = () => { + const trimmed = input.trim(); + if (!trimmed) return; + const now = new Date().toISOString(); + setMessages((prev) => [ + ...prev, + { id: `user-${now}`, author: "user", text: trimmed, timestamp: now }, + { + id: `assistant-${now}`, + author: "assistant", + text: "Recebi sua mensagem! Nossa equipe retornará em breve.", + timestamp: now, + }, + ]); + setInput(""); + }; + + const gradientRing = useMemo( + () => ( + + ), + [] + ); + + return ( +
+ {open && ( +
+
+
+ +
+
+

Assistente RiseUp

+

Pronta para ajudar no que você precisar

+
+ +
+ +
+ {messages.map((message) => ( +
+ + {message.author === "assistant" ? : } + +
+ {message.text} +
+
+ ))} +
+ +
+
+

{supportAvailability.title}

+

{supportAvailability.description}

+
+
+ {cannedSuggestions.map((suggestion) => ( + + ))} +
+
+ setInput(event.target.value)} + placeholder="Escreva sua mensagem" + className="border-none px-0 text-sm focus-visible:ring-0" + onKeyDown={(event) => { + if (event.key === "Enter") { + event.preventDefault(); + handleSend(); + } + }} + /> + +
+
+
+ )} + + +
+ ); +} diff --git a/susconecta/types/ogl.d.ts b/susconecta/types/ogl.d.ts new file mode 100644 index 0000000..e897bbb --- /dev/null +++ b/susconecta/types/ogl.d.ts @@ -0,0 +1 @@ +declare module 'ogl';