develop #83

Merged
M-Gabrielly merged 426 commits from develop into main 2025-12-04 04:13:15 +00:00
8 changed files with 1327 additions and 0 deletions
Showing only changes of commit 20e3253472 - Show all commits

View File

@ -0,0 +1,11 @@
import type { ReactNode } from "react";
import { ChatWidget } from "@/components/features/pacientes/chat-widget";
export default function PacientesLayout({ children }: { children: ReactNode }) {
return (
<>
{children}
<ChatWidget />
</>
);
}

View File

@ -0,0 +1,11 @@
import type { ReactNode } from "react";
import { ChatWidget } from "@/components/features/pacientes/chat-widget";
export default function PacienteLayout({ children }: { children: ReactNode }) {
return (
<>
{children}
<ChatWidget />
</>
);
}

View File

@ -0,0 +1,512 @@
"use client";
import type React from "react";
import { useState, useRef } from "react";
import {
Search,
Mic,
ArrowUp,
Plus,
FileText,
Code,
BookOpen,
PenTool,
BrainCircuit,
Sparkles,
} from "lucide-react";
import { motion, AnimatePresence } from "framer-motion";
export function AIAssistantInterface() {
const [inputValue, setInputValue] = useState("");
const [searchEnabled, setSearchEnabled] = useState(false);
const [deepResearchEnabled, setDeepResearchEnabled] = useState(false);
const [reasonEnabled, setReasonEnabled] = useState(false);
const [uploadedFiles, setUploadedFiles] = useState<string[]>([]);
const [showUploadAnimation, setShowUploadAnimation] = useState(false);
const [activeCommandCategory, setActiveCommandCategory] = useState<
string | null
>(null);
const inputRef = useRef<HTMLInputElement>(null);
const commandSuggestions = {
learn: [
"Explain the Big Bang theory",
"How does photosynthesis work?",
"What are black holes?",
"Explain quantum computing",
"How does the human brain work?",
],
code: [
"Create a React component for a todo list",
"Write a Python function to sort a list",
"How to implement authentication in Next.js",
"Explain async/await in JavaScript",
"Create a CSS animation for a button",
],
write: [
"Write a professional email to a client",
"Create a product description for a smartphone",
"Draft a blog post about AI",
"Write a creative story about space exploration",
"Create a social media post about sustainability",
],
};
const handleUploadFile = () => {
setShowUploadAnimation(true);
// Simulate file upload with timeout
setTimeout(() => {
const newFile = `Document.pdf`;
setUploadedFiles((prev) => [...prev, newFile]);
setShowUploadAnimation(false);
}, 1500);
};
const handleCommandSelect = (command: string) => {
setInputValue(command);
setActiveCommandCategory(null);
if (inputRef.current) {
inputRef.current.focus();
}
};
const handleSendMessage = () => {
if (inputValue.trim()) {
console.log("Sending message:", inputValue);
setInputValue("");
}
};
return (
<div className="min-h-screen flex flex-col items-center justify-center bg-white p-6">
<div className="w-full max-w-3xl mx-auto flex flex-col items-center">
{/* Logo with animated gradient */}
<div className="mb-8 w-20 h-20 relative">
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 200 200"
width="100%"
height="100%"
className="w-full h-full"
>
<g clipPath="url(#cs_clip_1_ellipse-12)">
<mask
id="cs_mask_1_ellipse-12"
style={{ maskType: "alpha" }}
width="200"
height="200"
x="0"
y="0"
maskUnits="userSpaceOnUse"
>
<path
fill="#fff"
fillRule="evenodd"
d="M100 150c27.614 0 50-22.386 50-50s-22.386-50-50-50-50 22.386-50 50 22.386 50 50 50zm0 50c55.228 0 100-44.772 100-100S155.228 0 100 0 0 44.772 0 100s44.772 100 100 100z"
clipRule="evenodd"
></path>
</mask>
<g mask="url(#cs_mask_1_ellipse-12)">
<path fill="#fff" d="M200 0H0v200h200V0z"></path>
<path
fill="#0066FF"
fillOpacity="0.33"
d="M200 0H0v200h200V0z"
></path>
<g
filter="url(#filter0_f_844_2811)"
className="animate-gradient"
>
<path fill="#0066FF" d="M110 32H18v68h92V32z"></path>
<path fill="#0044FF" d="M188-24H15v98h173v-98z"></path>
<path fill="#0099FF" d="M175 70H5v156h170V70z"></path>
<path fill="#00CCFF" d="M230 51H100v103h130V51z"></path>
</g>
</g>
</g>
<defs>
<filter
id="filter0_f_844_2811"
width="385"
height="410"
x="-75"
y="-104"
colorInterpolationFilters="sRGB"
filterUnits="userSpaceOnUse"
>
<feFlood floodOpacity="0" result="BackgroundImageFix"></feFlood>
<feBlend
in="SourceGraphic"
in2="BackgroundImageFix"
result="shape"
></feBlend>
<feGaussianBlur
result="effect1_foregroundBlur_844_2811"
stdDeviation="40"
></feGaussianBlur>
</filter>
<clipPath id="cs_clip_1_ellipse-12">
<path fill="#fff" d="M0 0H200V200H0z"></path>
</clipPath>
</defs>
<g
style={{ mixBlendMode: "overlay" }}
mask="url(#cs_mask_1_ellipse-12)"
>
<path
fill="gray"
stroke="transparent"
d="M200 0H0v200h200V0z"
filter="url(#cs_noise_1_ellipse-12)"
></path>
</g>
<defs>
<filter
id="cs_noise_1_ellipse-12"
width="100%"
height="100%"
x="0%"
y="0%"
filterUnits="objectBoundingBox"
>
<feTurbulence
baseFrequency="0.6"
numOctaves="5"
result="out1"
seed="4"
></feTurbulence>
<feComposite
in="out1"
in2="SourceGraphic"
operator="in"
result="out2"
></feComposite>
<feBlend
in="SourceGraphic"
in2="out2"
mode="overlay"
result="out3"
></feBlend>
</filter>
</defs>
</svg>
</div>
{/* Welcome message */}
<div className="mb-10 text-center">
<motion.div
initial={{ opacity: 0, y: 10 }}
animate={{ opacity: 1, y: 0 }}
transition={{ duration: 0.3 }}
className="flex flex-col items-center"
>
<h1 className="text-3xl font-bold bg-clip-text text-transparent bg-gradient-to-r from-blue-600 to-blue-400 mb-2">
Ready to assist you
</h1>
<p className="text-gray-500 max-w-md">
Ask me anything or try one of the suggestions below
</p>
</motion.div>
</div>
{/* Input area with integrated functions and file upload */}
<div className="w-full bg-white border border-gray-200 rounded-xl shadow-sm overflow-hidden mb-4">
<div className="p-4">
<input
ref={inputRef}
type="text"
placeholder="Ask me anything..."
value={inputValue}
onChange={(e) => setInputValue(e.target.value)}
className="w-full text-gray-700 text-base outline-none placeholder:text-gray-400"
/>
</div>
{/* Uploaded files */}
{uploadedFiles.length > 0 && (
<div className="px-4 pb-3">
<div className="flex flex-wrap gap-2">
{uploadedFiles.map((file, index) => (
<div
key={index}
className="flex items-center gap-2 bg-gray-50 py-1 px-2 rounded-md border border-gray-200"
>
<FileText className="w-3 h-3 text-blue-600" />
<span className="text-xs text-gray-700">{file}</span>
<button
onClick={() =>
setUploadedFiles((prev) =>
prev.filter((_, i) => i !== index)
)
}
className="text-gray-400 hover:text-gray-600"
>
<svg
xmlns="http://www.w3.org/2000/svg"
width="12"
height="12"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
>
<line x1="18" y1="6" x2="6" y2="18"></line>
<line x1="6" y1="6" x2="18" y2="18"></line>
</svg>
</button>
</div>
))}
</div>
</div>
)}
{/* Search, Deep Research, Reason functions and actions */}
<div className="px-4 py-3 flex items-center justify-between">
<div className="flex items-center gap-2">
<button
onClick={() => setSearchEnabled(!searchEnabled)}
className={`flex items-center gap-2 px-3 py-1.5 rounded-full text-sm font-medium transition-colors ${
searchEnabled
? "bg-blue-50 text-blue-600 hover:bg-blue-100"
: "bg-gray-100 text-gray-400 hover:bg-gray-200"
}`}
>
<Search className="w-4 h-4" />
<span>Search</span>
</button>
<button
onClick={() => setDeepResearchEnabled(!deepResearchEnabled)}
className={`flex items-center gap-2 px-3 py-1.5 rounded-full text-sm font-medium transition-colors ${
deepResearchEnabled
? "bg-blue-50 text-blue-600 hover:bg-blue-100"
: "bg-gray-100 text-gray-400 hover:bg-gray-200"
}`}
>
<svg
width="16"
height="16"
viewBox="0 0 16 16"
fill="none"
xmlns="http://www.w3.org/2000/svg"
className={
deepResearchEnabled ? "text-blue-600" : "text-gray-400"
}
>
<circle
cx="8"
cy="8"
r="7"
stroke="currentColor"
strokeWidth="2"
/>
<circle cx="8" cy="8" r="3" fill="currentColor" />
</svg>
<span>Deep Research</span>
</button>
<button
onClick={() => setReasonEnabled(!reasonEnabled)}
className={`flex items-center gap-2 px-3 py-1.5 rounded-full text-sm font-medium transition-colors ${
reasonEnabled
? "bg-blue-50 text-blue-600 hover:bg-blue-100"
: "bg-gray-100 text-gray-400 hover:bg-gray-200"
}`}
>
<BrainCircuit
className={`w-4 h-4 ${
reasonEnabled ? "text-blue-600" : "text-gray-400"
}`}
/>
<span>Reason</span>
</button>
</div>
<div className="flex items-center gap-2">
<button className="p-2 text-gray-400 hover:text-gray-600 transition-colors">
<Mic className="w-5 h-5" />
</button>
<button
onClick={handleSendMessage}
disabled={!inputValue.trim()}
className={`w-8 h-8 flex items-center justify-center rounded-full transition-colors ${
inputValue.trim()
? "bg-blue-600 text-white hover:bg-blue-700"
: "bg-gray-100 text-gray-400 cursor-not-allowed"
}`}
>
<ArrowUp className="w-4 h-4" />
</button>
</div>
</div>
{/* Upload files */}
<div className="px-4 py-2 border-t border-gray-100">
<button
onClick={handleUploadFile}
className="flex items-center gap-2 text-gray-600 text-sm hover:text-gray-900 transition-colors"
>
{showUploadAnimation ? (
<motion.div
className="flex space-x-1"
initial="hidden"
animate="visible"
variants={{
hidden: {},
visible: {
transition: {
staggerChildren: 0.1,
},
},
}}
>
{[...Array(3)].map((_, i) => (
<motion.div
key={i}
className="w-1.5 h-1.5 bg-blue-600 rounded-full"
variants={{
hidden: { opacity: 0, y: 5 },
visible: {
opacity: 1,
y: 0,
transition: {
duration: 0.4,
repeat: Infinity,
repeatType: "mirror",
delay: i * 0.1,
},
},
}}
/>
))}
</motion.div>
) : (
<Plus className="w-4 h-4" />
)}
<span>Upload Files</span>
</button>
</div>
</div>
{/* Command categories */}
<div className="w-full grid grid-cols-3 gap-4 mb-4">
<CommandButton
icon={<BookOpen className="w-5 h-5" />}
label="Learn"
isActive={activeCommandCategory === "learn"}
onClick={() =>
setActiveCommandCategory(
activeCommandCategory === "learn" ? null : "learn"
)
}
/>
<CommandButton
icon={<Code className="w-5 h-5" />}
label="Code"
isActive={activeCommandCategory === "code"}
onClick={() =>
setActiveCommandCategory(
activeCommandCategory === "code" ? null : "code"
)
}
/>
<CommandButton
icon={<PenTool className="w-5 h-5" />}
label="Write"
isActive={activeCommandCategory === "write"}
onClick={() =>
setActiveCommandCategory(
activeCommandCategory === "write" ? null : "write"
)
}
/>
</div>
{/* Command suggestions */}
<AnimatePresence>
{activeCommandCategory && (
<motion.div
initial={{ opacity: 0, height: 0 }}
animate={{ opacity: 1, height: "auto" }}
exit={{ opacity: 0, height: 0 }}
className="w-full mb-6 overflow-hidden"
>
<div className="bg-white rounded-xl border border-gray-200 shadow-sm overflow-hidden">
<div className="p-3 border-b border-gray-100">
<h3 className="text-sm font-medium text-gray-700">
{activeCommandCategory === "learn"
? "Learning suggestions"
: activeCommandCategory === "code"
? "Coding suggestions"
: "Writing suggestions"}
</h3>
</div>
<ul className="divide-y divide-gray-100">
{commandSuggestions[
activeCommandCategory as keyof typeof commandSuggestions
].map((suggestion, index) => (
<motion.li
key={index}
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
transition={{ delay: index * 0.03 }}
onClick={() => handleCommandSelect(suggestion)}
className="p-3 hover:bg-gray-50 cursor-pointer transition-colors duration-75"
>
<div className="flex items-center gap-3">
{activeCommandCategory === "learn" ? (
<BookOpen className="w-4 h-4 text-blue-600" />
) : activeCommandCategory === "code" ? (
<Code className="w-4 h-4 text-blue-600" />
) : (
<PenTool className="w-4 h-4 text-blue-600" />
)}
<span className="text-sm text-gray-700">
{suggestion}
</span>
</div>
</motion.li>
))}
</ul>
</div>
</motion.div>
)}
</AnimatePresence>
</div>
</div>
);
}
interface CommandButtonProps {
icon: React.ReactNode;
label: string;
isActive: boolean;
onClick: () => void;
}
function CommandButton({ icon, label, isActive, onClick }: CommandButtonProps) {
return (
<motion.button
onClick={onClick}
className={`flex flex-col items-center justify-center gap-2 p-4 rounded-xl border transition-all ${
isActive
? "bg-blue-50 border-blue-200 shadow-sm"
: "bg-white border-gray-200 hover:border-gray-300"
}`}
>
<div className={`${isActive ? "text-blue-600" : "text-gray-500"}`}>
{icon}
</div>
<span
className={`text-sm font-medium ${
isActive ? "text-blue-700" : "text-gray-700"
}`}
>
{label}
</span>
</motion.button>
);
}

View File

@ -0,0 +1,107 @@
"use client";
import { useEffect, useState } from "react";
import { VoicePoweredOrb } from "@/components/ZoeIA/voice-powered-orb";
import { AIAssistantInterface } from "@/components/ZoeIA/ai-assistant-interface";
import { Button } from "@/components/ui/button";
import { ArrowLeft, Mic, MicOff } from "lucide-react";
export default function VoicePoweredOrbPage() {
const [isRecording, setIsRecording] = useState(false);
const [voiceDetected, setVoiceDetected] = useState(false);
const [assistantOpen, setAssistantOpen] = useState(false);
const toggleRecording = () => {
setIsRecording(!isRecording);
};
useEffect(() => {
if (!assistantOpen) return;
const original = document.body.style.overflow;
document.body.style.overflow = "hidden";
return () => {
document.body.style.overflow = original;
};
}, [assistantOpen]);
const openAssistant = () => setAssistantOpen(true);
const closeAssistant = () => setAssistantOpen(false);
return (
<div className="min-h-screen d flex items-center justify-center p-8">
<div className="flex flex-col items-center space-y-8">
{assistantOpen && (
<div className="fixed inset-0 z-50 flex flex-col bg-background">
<div className="flex items-center justify-between border-b border-border px-4 py-3">
<Button
type="button"
variant="ghost"
className="flex items-center gap-2"
onClick={closeAssistant}
>
<ArrowLeft className="h-4 w-4" />
Voltar
</Button>
</div>
<div className="flex-1 overflow-auto">
<AIAssistantInterface />
</div>
</div>
)}
{/* Orb */}
<div
className="w-96 h-96 relative cursor-pointer focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-primary focus-visible:ring-offset-2"
role="button"
tabIndex={0}
aria-label="Abrir assistente virtual"
onClick={openAssistant}
onKeyDown={(event) => {
if (event.key === "Enter" || event.key === " ") {
event.preventDefault();
openAssistant();
}
}}
>
<VoicePoweredOrb
enableVoiceControl={isRecording}
className="rounded-xl overflow-hidden shadow-2xl"
onVoiceDetected={setVoiceDetected}
/>
{voiceDetected && (
<span className="absolute bottom-4 right-4 rounded-full bg-primary/90 px-3 py-1 text-xs font-medium text-primary-foreground shadow-lg">
Ouvindo
</span>
)}
</div>
{/* Control Button */}
<Button
onClick={toggleRecording}
variant={isRecording ? "destructive" : "default"}
size="lg"
className="px-8 py-3"
>
{isRecording ? (
<>
<MicOff className="w-5 h-5 mr-3" />
Stop Recording
</>
) : (
<>
<Mic className="w-5 h-5 mr-3" />
Start Recording
</>
)}
</Button>
{/* Simple Instructions */}
<p className="text-muted-foreground text-center max-w-md">
Click the button to enable voice control. Speak to see the orb respond to your voice with subtle movements.
</p>
</div>
</div>
);
}

View File

@ -0,0 +1,10 @@
import * as React from "react"
import { AIAssistantInterface } from "@/components/ZoeIA/ai-assistant-interface"
export function Demo() {
return (
<div className="w-screen">
<AIAssistantInterface />
</div>
)
}

View File

@ -0,0 +1,493 @@
"use client";
import React, { useEffect, useRef, FC } from "react";
import { Renderer, Program, Mesh, Triangle, Vec3 } from "ogl";
import { cn } from "@/lib/utils";
interface VoicePoweredOrbProps {
className?: string;
hue?: number;
enableVoiceControl?: boolean;
voiceSensitivity?: number;
maxRotationSpeed?: number;
maxHoverIntensity?: number;
onVoiceDetected?: (detected: boolean) => void;
}
export const VoicePoweredOrb: FC<VoicePoweredOrbProps> = ({
className,
hue = 0,
enableVoiceControl = true,
voiceSensitivity = 1.5,
maxRotationSpeed = 1.2,
maxHoverIntensity = 0.8,
onVoiceDetected,
}) => {
const ctnDom = useRef<HTMLDivElement>(null);
const audioContextRef = useRef<AudioContext | null>(null);
const analyserRef = useRef<AnalyserNode | null>(null);
const microphoneRef = useRef<MediaStreamAudioSourceNode | null>(null);
const dataArrayRef = useRef<Uint8Array | null>(null);
const animationFrameRef = useRef<number>();
const mediaStreamRef = useRef<MediaStream | null>(null);
const vert = /* glsl */ `
precision highp float;
attribute vec2 position;
attribute vec2 uv;
varying vec2 vUv;
void main() {
vUv = uv;
gl_Position = vec4(position, 0.0, 1.0);
}
`;
const frag = /* glsl */ `
precision highp float;
uniform float iTime;
uniform vec3 iResolution;
uniform float hue;
uniform float hover;
uniform float rot;
uniform float hoverIntensity;
varying vec2 vUv;
vec3 rgb2yiq(vec3 c) {
float y = dot(c, vec3(0.299, 0.587, 0.114));
float i = dot(c, vec3(0.596, -0.274, -0.322));
float q = dot(c, vec3(0.211, -0.523, 0.312));
return vec3(y, i, q);
}
vec3 yiq2rgb(vec3 c) {
float r = c.x + 0.956 * c.y + 0.621 * c.z;
float g = c.x - 0.272 * c.y - 0.647 * c.z;
float b = c.x - 1.106 * c.y + 1.703 * c.z;
return vec3(r, g, b);
}
vec3 adjustHue(vec3 color, float hueDeg) {
float hueRad = hueDeg * 3.14159265 / 180.0;
vec3 yiq = rgb2yiq(color);
float cosA = cos(hueRad);
float sinA = sin(hueRad);
float i = yiq.y * cosA - yiq.z * sinA;
float q = yiq.y * sinA + yiq.z * cosA;
yiq.y = i;
yiq.z = q;
return yiq2rgb(yiq);
}
vec3 hash33(vec3 p3) {
p3 = fract(p3 * vec3(0.1031, 0.11369, 0.13787));
p3 += dot(p3, p3.yxz + 19.19);
return -1.0 + 2.0 * fract(vec3(
p3.x + p3.y,
p3.x + p3.z,
p3.y + p3.z
) * p3.zyx);
}
float snoise3(vec3 p) {
const float K1 = 0.333333333;
const float K2 = 0.166666667;
vec3 i = floor(p + (p.x + p.y + p.z) * K1);
vec3 d0 = p - (i - (i.x + i.y + i.z) * K2);
vec3 e = step(vec3(0.0), d0 - d0.yzx);
vec3 i1 = e * (1.0 - e.zxy);
vec3 i2 = 1.0 - e.zxy * (1.0 - e);
vec3 d1 = d0 - (i1 - K2);
vec3 d2 = d0 - (i2 - K1);
vec3 d3 = d0 - 0.5;
vec4 h = max(0.6 - vec4(
dot(d0, d0),
dot(d1, d1),
dot(d2, d2),
dot(d3, d3)
), 0.0);
vec4 n = h * h * h * h * vec4(
dot(d0, hash33(i)),
dot(d1, hash33(i + i1)),
dot(d2, hash33(i + i2)),
dot(d3, hash33(i + 1.0))
);
return dot(vec4(31.316), n);
}
vec4 extractAlpha(vec3 colorIn) {
float a = max(max(colorIn.r, colorIn.g), colorIn.b);
return vec4(colorIn.rgb / (a + 1e-5), a);
}
const vec3 baseColor1 = vec3(0.611765, 0.262745, 0.996078);
const vec3 baseColor2 = vec3(0.298039, 0.760784, 0.913725);
const vec3 baseColor3 = vec3(0.062745, 0.078431, 0.600000);
const float innerRadius = 0.6;
const float noiseScale = 0.65;
float light1(float intensity, float attenuation, float dist) {
return intensity / (1.0 + dist * attenuation);
}
float light2(float intensity, float attenuation, float dist) {
return intensity / (1.0 + dist * dist * attenuation);
}
vec4 draw(vec2 uv) {
vec3 color1 = adjustHue(baseColor1, hue);
vec3 color2 = adjustHue(baseColor2, hue);
vec3 color3 = adjustHue(baseColor3, hue);
float ang = atan(uv.y, uv.x);
float len = length(uv);
float invLen = len > 0.0 ? 1.0 / len : 0.0;
float n0 = snoise3(vec3(uv * noiseScale, iTime * 0.5)) * 0.5 + 0.5;
float r0 = mix(mix(innerRadius, 1.0, 0.4), mix(innerRadius, 1.0, 0.6), n0);
float d0 = distance(uv, (r0 * invLen) * uv);
float v0 = light1(1.0, 10.0, d0);
v0 *= smoothstep(r0 * 1.05, r0, len);
float cl = cos(ang + iTime * 2.0) * 0.5 + 0.5;
float a = iTime * -1.0;
vec2 pos = vec2(cos(a), sin(a)) * r0;
float d = distance(uv, pos);
float v1 = light2(1.5, 5.0, d);
v1 *= light1(1.0, 50.0, d0);
float v2 = smoothstep(1.0, mix(innerRadius, 1.0, n0 * 0.5), len);
float v3 = smoothstep(innerRadius, mix(innerRadius, 1.0, 0.5), len);
vec3 col = mix(color1, color2, cl);
col = mix(color3, col, v0);
col = (col + v1) * v2 * v3;
col = clamp(col, 0.0, 1.0);
return extractAlpha(col);
}
vec4 mainImage(vec2 fragCoord) {
vec2 center = iResolution.xy * 0.5;
float size = min(iResolution.x, iResolution.y);
vec2 uv = (fragCoord - center) / size * 2.0;
float angle = rot;
float s = sin(angle);
float c = cos(angle);
uv = vec2(c * uv.x - s * uv.y, s * uv.x + c * uv.y);
uv.x += hover * hoverIntensity * 0.1 * sin(uv.y * 10.0 + iTime);
uv.y += hover * hoverIntensity * 0.1 * sin(uv.x * 10.0 + iTime);
return draw(uv);
}
void main() {
vec2 fragCoord = vUv * iResolution.xy;
vec4 col = mainImage(fragCoord);
gl_FragColor = vec4(col.rgb * col.a, col.a);
}
`;
// Voice analysis function
const analyzeAudio = () => {
if (!analyserRef.current || !dataArrayRef.current) return 0;
// To avoid type incompatibilities between different ArrayBuffer-like types
// (Uint8Array<ArrayBufferLike> vs Uint8Array<ArrayBuffer>), create a
// standard Uint8Array copy with an ArrayBuffer backing it. This satisfies
// the Web Audio API typing and is safe (small cost to copy).
const src = dataArrayRef.current as Uint8Array;
const buffer = Uint8Array.from(src);
analyserRef.current.getByteFrequencyData(buffer);
// Calculate RMS (Root Mean Square) for better voice detection
let sum = 0;
for (let i = 0; i < buffer.length; i++) {
const value = buffer[i] / 255;
sum += value * value;
}
const rms = Math.sqrt(sum / buffer.length);
// Apply sensitivity and boost the signal
const level = Math.min(rms * voiceSensitivity * 3.0, 1);
return level;
};
// Stop microphone and cleanup
const stopMicrophone = () => {
try {
// Stop all tracks in the media stream
if (mediaStreamRef.current) {
mediaStreamRef.current.getTracks().forEach(track => {
track.stop();
});
mediaStreamRef.current = null;
}
// Disconnect and cleanup audio nodes
if (microphoneRef.current) {
microphoneRef.current.disconnect();
microphoneRef.current = null;
}
if (analyserRef.current) {
analyserRef.current.disconnect();
analyserRef.current = null;
}
// Close audio context
if (audioContextRef.current && audioContextRef.current.state !== 'closed') {
audioContextRef.current.close();
audioContextRef.current = null;
}
dataArrayRef.current = null;
console.log('Microphone stopped and cleaned up');
} catch (error) {
console.warn('Error stopping microphone:', error);
}
};
// Initialize microphone access
const initMicrophone = async () => {
try {
// Clean up any existing microphone first
stopMicrophone();
const stream = await navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false,
sampleRate: 44100,
},
});
mediaStreamRef.current = stream;
audioContextRef.current = new (window.AudioContext || (window as any).webkitAudioContext)();
if (audioContextRef.current.state === 'suspended') {
await audioContextRef.current.resume();
}
analyserRef.current = audioContextRef.current.createAnalyser();
microphoneRef.current = audioContextRef.current.createMediaStreamSource(stream);
analyserRef.current.fftSize = 512;
analyserRef.current.smoothingTimeConstant = 0.3;
analyserRef.current.minDecibels = -90;
analyserRef.current.maxDecibels = -10;
microphoneRef.current.connect(analyserRef.current);
dataArrayRef.current = new Uint8Array(analyserRef.current.frequencyBinCount);
console.log('Microphone initialized successfully');
return true;
} catch (error) {
console.warn("Microphone access denied or not available:", error);
return false;
}
};
useEffect(() => {
const container = ctnDom.current;
if (!container) return;
let rendererInstance: any = null;
let glContext: WebGLRenderingContext | WebGL2RenderingContext | null = null;
let rafId: number;
let program: any = null;
try {
rendererInstance = new Renderer({
alpha: true,
premultipliedAlpha: false,
antialias: true,
dpr: window.devicePixelRatio || 1
});
glContext = rendererInstance.gl as WebGLRenderingContext;
glContext.clearColor(0, 0, 0, 0);
glContext.enable((glContext as any).BLEND);
glContext.blendFunc((glContext as any).SRC_ALPHA, (glContext as any).ONE_MINUS_SRC_ALPHA);
while (container.firstChild) {
container.removeChild(container.firstChild);
}
container.appendChild((glContext as any).canvas);
const geometry = new Triangle(glContext as any);
program = new Program(glContext as any, {
vertex: vert,
fragment: frag,
uniforms: {
iTime: { value: 0 },
iResolution: {
value: new Vec3(
(glContext as any).canvas.width,
(glContext as any).canvas.height,
(glContext as any).canvas.width / (glContext as any).canvas.height
),
},
hue: { value: hue },
hover: { value: 0 },
rot: { value: 0 },
hoverIntensity: { value: 0 },
},
});
const mesh = new Mesh(glContext as any, { geometry, program });
const resize = () => {
if (!container || !rendererInstance || !glContext) return;
const dpr = window.devicePixelRatio || 1;
const width = container.clientWidth;
const height = container.clientHeight;
if (width === 0 || height === 0) return;
rendererInstance.setSize(width * dpr, height * dpr);
(glContext as any).canvas.style.width = width + "px";
(glContext as any).canvas.style.height = height + "px";
if (program) {
program.uniforms.iResolution.value.set(
(glContext as any).canvas.width,
(glContext as any).canvas.height,
(glContext as any).canvas.width / (glContext as any).canvas.height
);
}
};
window.addEventListener("resize", resize);
resize();
let lastTime = 0;
let currentRot = 0;
let voiceLevel = 0;
const baseRotationSpeed = 0.3;
let isMicrophoneInitialized = false;
if (enableVoiceControl) {
initMicrophone().then((success) => {
isMicrophoneInitialized = success;
});
} else {
stopMicrophone();
isMicrophoneInitialized = false;
}
const update = (t: number) => {
rafId = requestAnimationFrame(update);
if (!program) return;
const dt = (t - lastTime) * 0.001;
lastTime = t;
program.uniforms.iTime.value = t * 0.001;
program.uniforms.hue.value = hue;
if (enableVoiceControl && isMicrophoneInitialized) {
voiceLevel = analyzeAudio();
if (onVoiceDetected) {
onVoiceDetected(voiceLevel > 0.1);
}
const voiceRotationSpeed = baseRotationSpeed + (voiceLevel * maxRotationSpeed * 2.0);
if (voiceLevel > 0.05) {
currentRot += dt * voiceRotationSpeed;
}
program.uniforms.hover.value = Math.min(voiceLevel * 2.0, 1.0);
program.uniforms.hoverIntensity.value = Math.min(voiceLevel * maxHoverIntensity * 0.8, maxHoverIntensity);
} else {
program.uniforms.hover.value = 0;
program.uniforms.hoverIntensity.value = 0;
if (onVoiceDetected) {
onVoiceDetected(false);
}
}
program.uniforms.rot.value = currentRot;
if (rendererInstance && glContext) {
glContext.clear((glContext as any).COLOR_BUFFER_BIT | (glContext as any).DEPTH_BUFFER_BIT);
rendererInstance.render({ scene: mesh });
}
};
rafId = requestAnimationFrame(update);
return () => {
cancelAnimationFrame(rafId);
window.removeEventListener("resize", resize);
try {
if (container && glContext && (glContext as any).canvas) {
if (container.contains((glContext as any).canvas)) {
container.removeChild((glContext as any).canvas);
}
}
} catch (error) {
console.warn("Canvas cleanup error:", error);
}
stopMicrophone();
if (glContext) {
(glContext as any).getExtension("WEBGL_lose_context")?.loseContext();
}
};
} catch (error) {
console.error("Error initializing Voice Powered Orb:", error);
if (container && container.firstChild) {
container.removeChild(container.firstChild);
}
return () => {
window.removeEventListener("resize", () => {});
};
}
}, [
hue,
enableVoiceControl,
voiceSensitivity,
maxRotationSpeed,
maxHoverIntensity,
vert,
frag,
]);
useEffect(() => {
let isMounted = true;
const handleMicrophoneState = async () => {
if (enableVoiceControl) {
const success = await initMicrophone();
if (!isMounted) return;
} else {
stopMicrophone();
}
};
handleMicrophoneState();
return () => {
isMounted = false;
};
}, [enableVoiceControl]);
return (
<div
ref={ctnDom}
className={cn(
"w-full h-full relative",
className
)}
>
</div>
);
};

View File

@ -0,0 +1,182 @@
"use client";
import { useMemo, useState } from "react";
import { Sparkles, MessageCircle, X, Send } from "lucide-react";
import { Input } from "@/components/ui/input";
import { Button } from "@/components/ui/button";
const cannedSuggestions = [
"Como remarcar minha consulta?",
"Quais documentos preciso levar?",
"Quero falar com suporte humano",
];
const supportAvailability = {
title: "Equipe disponível",
description: "SegSex das 08h às 18h",
};
interface ChatMessage {
id: string;
author: "assistant" | "user";
text: string;
timestamp: string;
}
export function ChatWidget() {
const [open, setOpen] = useState(false);
const [input, setInput] = useState("");
const [messages, setMessages] = useState<ChatMessage[]>(() => [
{
id: "welcome",
author: "assistant",
text: "Olá! Sou sua assistente virtual. Posso ajudar a acompanhar consultas, exames e suporte geral.",
timestamp: new Date().toISOString(),
},
]);
const toggle = () => setOpen((prev) => !prev);
const handleSend = () => {
const trimmed = input.trim();
if (!trimmed) return;
const now = new Date().toISOString();
setMessages((prev) => [
...prev,
{ id: `user-${now}`, author: "user", text: trimmed, timestamp: now },
{
id: `assistant-${now}`,
author: "assistant",
text: "Recebi sua mensagem! Nossa equipe retornará em breve.",
timestamp: now,
},
]);
setInput("");
};
const gradientRing = useMemo(
() => (
<span
aria-hidden
className="absolute inset-0 rounded-full bg-gradient-to-br from-primary via-sky-500 to-emerald-400 opacity-90 blur-sm transition group-hover:blur group-hover:opacity-100"
/>
),
[]
);
return (
<div className="fixed bottom-6 right-6 z-50 flex flex-col items-end gap-2 sm:bottom-8 sm:right-8">
{open && (
<div
id="chat-widget"
className="w-[min(22rem,90vw)] rounded-3xl border border-primary/20 bg-background shadow-[0_20px_60px_rgba(30,64,175,0.25)] ring-1 ring-primary/10"
>
<header className="flex items-start gap-3 rounded-t-3xl bg-gradient-to-r from-primary via-blue-600 to-emerald-500 px-5 py-4 text-primary-foreground">
<div className="flex h-12 w-12 items-center justify-center rounded-full bg-white/15">
<Sparkles className="h-6 w-6" aria-hidden />
</div>
<div className="flex-1">
<p className="text-sm font-semibold">Assistente RiseUp</p>
<p className="text-xs text-white/80">Pronta para ajudar no que você precisar</p>
</div>
<button
type="button"
onClick={toggle}
className="rounded-full border border-white/20 p-1.5 text-white/80 transition hover:bg-white/10 hover:text-white"
aria-label="Fechar chat"
>
<X className="h-4 w-4" />
</button>
</header>
<div className="max-h-[22rem] overflow-y-auto px-5 py-4 space-y-3 text-sm">
{messages.map((message) => (
<div
key={message.id}
className={
message.author === "assistant"
? "flex items-start gap-3"
: "flex flex-row-reverse items-start gap-3"
}
>
<span
className={`flex h-8 w-8 shrink-0 items-center justify-center rounded-full ${
message.author === "assistant"
? "bg-primary/10 text-primary"
: "bg-gradient-to-br from-primary/10 to-emerald-100 text-primary"
}`}
>
{message.author === "assistant" ? <Sparkles className="h-4 w-4" /> : <MessageCircle className="h-4 w-4" />}
</span>
<div
className={`rounded-2xl px-4 py-2 leading-relaxed shadow-sm ${
message.author === "assistant"
? "bg-primary/5 text-muted-foreground"
: "bg-primary text-primary-foreground"
}`}
>
{message.text}
</div>
</div>
))}
</div>
<div className="px-5 pb-4">
<div className="mb-3 text-xs text-muted-foreground/80">
<p className="font-medium text-primary">{supportAvailability.title}</p>
<p>{supportAvailability.description}</p>
</div>
<div className="flex flex-wrap gap-2 pb-3">
{cannedSuggestions.map((suggestion) => (
<button
key={suggestion}
type="button"
onClick={() => setInput(suggestion)}
className="rounded-full border border-primary/20 px-3 py-1 text-xs text-muted-foreground transition hover:border-primary hover:text-primary"
>
{suggestion}
</button>
))}
</div>
<div className="flex items-center gap-2 rounded-full border border-border bg-background px-3 py-2 shadow-inner">
<Input
value={input}
onChange={(event) => setInput(event.target.value)}
placeholder="Escreva sua mensagem"
className="border-none px-0 text-sm focus-visible:ring-0"
onKeyDown={(event) => {
if (event.key === "Enter") {
event.preventDefault();
handleSend();
}
}}
/>
<Button
size="icon"
className="rounded-full bg-primary text-primary-foreground shadow-md transition hover:bg-primary/90"
onClick={handleSend}
aria-label="Enviar mensagem"
>
<Send className="h-4 w-4" />
</Button>
</div>
</div>
</div>
)}
<button
type="button"
onClick={toggle}
className="group relative flex h-16 w-16 items-center justify-center rounded-full"
aria-haspopup="dialog"
aria-expanded={open}
aria-controls="chat-widget"
>
{gradientRing}
<span className="relative flex h-16 w-16 items-center justify-center rounded-full bg-background text-primary shadow-[0_12px_30px_rgba(37,99,235,0.25)] ring-1 ring-primary/10 transition group-hover:scale-[1.03] group-active:scale-95">
<Sparkles className="h-7 w-7" />
</span>
</button>
</div>
);
}

1
susconecta/types/ogl.d.ts vendored Normal file
View File

@ -0,0 +1 @@
declare module 'ogl';