// LifeCoPilot visual — live voice conversation using the Web Speech API // for mic capture + TTS, and window.claude.complete for the reply. // Falls back to a scripted demo loop when idle or when speech APIs aren't // available (Safari, etc). function CoPilotVisual() { const domains = React.useMemo(() => ( ['CALENDAR', 'WORK', 'HOME', 'FAMILY', 'WELLBEING', 'MEMORY'] ), []); const demoExchanges = React.useMemo(() => ([ { domain: 'CALENDAR', you: "Reschedule my 3pm today.", bot: "Moved to Thursday 10am. Everyone's free." }, { domain: 'WORK', you: "Draft the design brief.", bot: "320 words, three directions. Review?" }, { domain: 'HOME', you: "I'm heading home soon.", bot: "Set thermostat to 21°, lights to warm." }, { domain: 'FAMILY', you: "Remind me to call Ana.", bot: "I'll surface it between your meetings." }, { domain: 'WELLBEING',you: "I'm feeling stressed.", bot: "Let's breathe. Four in, six out, ten minutes." }, { domain: 'MEMORY', you: "What did I promise Jake?", bot: "You said you'd send the slides by Friday." }, ]), []); // Mode: demo (scripted loop) or live (real voice conversation) const [mode, setMode] = React.useState('demo'); const [supported, setSupported] = React.useState(true); // Shared UI state const [idx, setIdx] = React.useState(0); const [phase, setPhase] = React.useState('idle'); // idle | listening | thinking | bot | done const [typed, setTyped] = React.useState(''); const [liveYou, setLiveYou] = React.useState(''); const [liveBot, setLiveBot] = React.useState(''); const [liveInterim, setLiveInterim] = React.useState(''); const [domain, setDomain] = React.useState('CALENDAR'); const recRef = React.useRef(null); const utterRef = React.useRef(null); // Feature detection React.useEffect(() => { const SR = window.SpeechRecognition || window.webkitSpeechRecognition; setSupported(!!SR && 'speechSynthesis' in window); }, []); // Scripted demo loop (original behavior) React.useEffect(() => { if (mode !== 'demo') return; let cancelled = false; const ex = demoExchanges[idx]; async function run() { setPhase('you'); setTyped(''); await wait(600); if (cancelled) return; setPhase('thinking'); await wait(700); if (cancelled) return; setPhase('bot'); setTyped(''); const text = ex.bot; for (let i = 1; i <= text.length; i++) { if (cancelled) return; setTyped(text.slice(0, i)); await wait(22 + Math.random()*28); } setPhase('done'); await wait(1800); if (cancelled) return; setIdx(v => (v + 1) % demoExchanges.length); } run(); return () => { cancelled = true; }; }, [idx, demoExchanges, mode]); // Classify user utterance to a domain (used only in live mode) function classify(text) { const t = text.toLowerCase(); if (/(schedule|calendar|meeting|appointment|reschedul|monday|tuesday|wednesday|thursday|friday|pm|am|tomorrow)/.test(t)) return 'CALENDAR'; if (/(draft|email|report|project|work|brief|deck|slide|presentation|client)/.test(t)) return 'WORK'; if (/(home|thermostat|lights|door|lock|oven|coffee|fridge)/.test(t)) return 'HOME'; if (/(mom|dad|wife|husband|kid|son|daughter|family|partner|ana|jake)/.test(t)) return 'FAMILY'; if (/(stress|anxious|tired|sleep|breathe|calm|meditat|wellbeing|feel)/.test(t)) return 'WELLBEING'; if (/(remember|remind|what did|recall|memory|promise)/.test(t)) return 'MEMORY'; return 'CALENDAR'; } async function handleUserTurn(text) { setLiveYou(text); setLiveInterim(''); setLiveBot(''); setDomain(classify(text)); setPhase('thinking'); let reply = ''; try { reply = await window.claude.complete({ messages: [{ role: 'user', content: `You are LifeCoPilot, a voice-first ambient AI assistant for everyday life. Respond to the user in ONE short, warm, conversational sentence (max 20 words). Do not use markdown, lists, or emoji. Just a direct spoken reply.\n\nUser said: "${text}"` }] }); reply = (reply || '').trim().replace(/^["']|["']$/g, ''); if (!reply) reply = "I'm here. What do you need?"; } catch(e) { reply = "I'm having trouble reaching Cortex. Try again in a moment."; } setPhase('bot'); setLiveBot(''); // typed reveal for (let i = 1; i <= reply.length; i++) { setLiveBot(reply.slice(0, i)); await wait(16 + Math.random()*20); } // speak it try { const u = new SpeechSynthesisUtterance(reply); u.rate = 1.02; u.pitch = 1.0; // Prefer a neutral English voice if available const voices = window.speechSynthesis.getVoices(); const v = voices.find(v => /en[-_]?US/i.test(v.lang) && /female|samantha|google us/i.test(v.name)) || voices.find(v => /en[-_]?US/i.test(v.lang)) || voices[0]; if (v) u.voice = v; utterRef.current = u; window.speechSynthesis.speak(u); await new Promise(res => { u.onend = res; u.onerror = res; }); } catch(e) {} setPhase('done'); } function startLive() { const SR = window.SpeechRecognition || window.webkitSpeechRecognition; if (!SR) return; // stop scripted loop setMode('live'); setLiveYou(''); setLiveBot(''); setLiveInterim(''); setPhase('listening'); const rec = new SR(); rec.lang = 'en-US'; rec.interimResults = true; rec.continuous = false; rec.maxAlternatives = 1; let finalTranscript = ''; rec.onresult = (e) => { let interim = ''; for (let i = e.resultIndex; i < e.results.length; i++) { const r = e.results[i]; if (r.isFinal) finalTranscript += r[0].transcript; else interim += r[0].transcript; } setLiveInterim(interim); if (finalTranscript) setLiveYou(finalTranscript); }; rec.onerror = (e) => { setPhase('idle'); setLiveBot('Mic error: ' + e.error); }; rec.onend = () => { const text = (finalTranscript || '').trim(); if (text) handleUserTurn(text); else { setPhase('idle'); setLiveInterim(''); } }; try { rec.start(); } catch(e) {} recRef.current = rec; } function stopLive() { try { recRef.current && recRef.current.stop(); } catch(e) {} try { window.speechSynthesis.cancel(); } catch(e) {} setPhase('idle'); } function resetToDemo() { stopLive(); setMode('demo'); setLiveYou(''); setLiveBot(''); setLiveInterim(''); setIdx(0); } // Cleanup on unmount React.useEffect(() => () => { try { recRef.current && recRef.current.abort(); } catch(e) {} try { window.speechSynthesis.cancel(); } catch(e) {} }, []); const demoEx = demoExchanges[idx]; const activeDomain = mode === 'live' ? domain : demoEx.domain; const youText = mode === 'live' ? (liveYou || liveInterim) : demoEx.you; const botText = mode === 'live' ? liveBot : (phase === 'done' ? demoEx.bot : typed); const showBot = mode === 'live' ? (phase === 'thinking' || phase === 'bot' || phase === 'done') : (phase === 'thinking' || phase === 'bot' || phase === 'done'); const orbState = phase === 'listening' ? 'listening' : phase === 'thinking' ? 'thinking' : phase === 'bot' ? 'speaking' : 'idle'; return (
VEIRON CORTEX · V.04
{mode === 'live' ? 'LIVE · VOICE' : 'LIVE'}
{Array.from({length: 22}).map((_, i) => ( ))}
{domains.map(d => ( {d} ))}
{(mode === 'demo' || youText) && (
{youText || }
)} {showBot && (
{phase === 'thinking' ? ( ) : ( <>{botText}{phase === 'bot' && } )}
)}
{/* Mic control */}
{supported ? ( phase === 'listening' ? ( ) : ( ) ) : ( Voice requires Chrome or Edge · demo running )}
); } function wait(ms) { return new Promise(r => setTimeout(r, ms)); } // Forge visual — three.js + ColladaLoader loading threejs.org's stormtrooper // with its built-in walk animation. Mouse/touch drag to orbit. function ForgeVisual() { const mountRef = React.useRef(null); React.useEffect(() => { const mount = mountRef.current; if (!mount) return; let renderer, scene, camera, mixer, controls, clock, raf; let disposed = false; const cleanup = {}; function init() { const THREE = window.THREE; if (!THREE) return false; const ColladaLoader = window.ColladaLoader; const OrbitControls = window.OrbitControls; const w = mount.clientWidth, h = mount.clientHeight; scene = new THREE.Scene(); scene.background = null; camera = new THREE.PerspectiveCamera(38, w / h, 0.1, 100); camera.position.set(6.5, 2.0, 11.5); renderer = new THREE.WebGLRenderer({ antialias: true, alpha: true }); renderer.setPixelRatio(Math.min(window.devicePixelRatio, 2)); renderer.setSize(w, h); renderer.outputColorSpace = THREE.SRGBColorSpace; mount.appendChild(renderer.domElement); // Lights — three-point setup for a cinematic look scene.add(new THREE.AmbientLight(0x8ab4ff, 0.6)); const key = new THREE.DirectionalLight(0xffffff, 2.2); key.position.set(3, 5, 4); scene.add(key); const rim = new THREE.DirectionalLight(0x66ccff, 1.4); rim.position.set(-3, 2, -4); scene.add(rim); const fill = new THREE.DirectionalLight(0xffb066, 0.8); fill.position.set(-2, 1, 3); scene.add(fill); controls = new OrbitControls(camera, renderer.domElement); controls.enableDamping = true; controls.dampingFactor = 0.08; controls.enablePan = false; controls.enableZoom = false; controls.target.set(0, 1.6, 0); controls.minPolarAngle = Math.PI * 0.2; controls.maxPolarAngle = Math.PI * 0.55; clock = new THREE.Clock(); const loader = new ColladaLoader(); loader.load( 'https://threejs.org/examples/models/collada/stormtrooper/stormtrooper.dae', (collada) => { if (disposed) return; const avatar = collada.scene; const animations = avatar.animations; scene.add(avatar); if (animations && animations.length) { mixer = new THREE.AnimationMixer(avatar); mixer.clipAction(animations[0]).play(); } }, undefined, (err) => console.error('Collada load failed', err) ); function animate() { if (disposed) return; raf = requestAnimationFrame(animate); const dt = clock.getDelta(); if (mixer) mixer.update(dt); controls.update(); renderer.render(scene, camera); } animate(); const ro = new ResizeObserver(() => { const W = mount.clientWidth, H = mount.clientHeight; renderer.setSize(W, H); camera.aspect = W / H; camera.updateProjectionMatrix(); }); ro.observe(mount); cleanup.ro = ro; return true; } if (!init()) { const onReady = () => init(); window.addEventListener('three-ready', onReady, { once: true }); cleanup.onReady = onReady; } return () => { disposed = true; cancelAnimationFrame(raf); if (cleanup.ro) cleanup.ro.disconnect(); if (cleanup.onReady) window.removeEventListener('three-ready', cleanup.onReady); if (renderer) { renderer.dispose(); if (renderer.domElement && renderer.domElement.parentNode) { renderer.domElement.parentNode.removeChild(renderer.domElement); } } if (controls) controls.dispose(); }; }, []); return (
RIG
MESH
TEXTURE
ANIMATION
); } function Products() { return (
01 / The Ecosystem

Two products.
One living intelligence.

LifeCoPilot meets the everyday you. Forge lets builders shape on the same mind. Both speak one language — your signals.

{/* -------- LifeCoPilot block -------- */}
/ 01 · FOR INDIVIDUALS VEIRON CORTEX

LifeCoPilot

Talk to it. It learns you. It acts ahead of you.

LifeCoPilot is a voice-first, adaptive assistant for your whole life — calendar, work, home, family, memory, and yes, wellbeing too. Proactive when it matters, invisible when it doesn't.

  • Voice-first interaction — just speak. It drafts, plans, schedules, reminds.
  • Adaptive & proactive — learns your patterns and surfaces the next move before you ask.
  • Whole-life context — calendar, work, relationships, errands, memory, wellbeing.
  • Private by architecture — signals stay encrypted; your keys never leave you.
Discover LifeCoPilot
{/* -------- Forge block -------- */}
/ 02 · FOR BUILDERS GEN AI PLATFORM

Forge

Generate characters, voices, worlds — on one sovereign stack.

Forge is the generative AI platform for creators and businesses. Spin up 3D characters, rigged and animated, spawn voices, worlds, and agents — all running on the Veiron Grid with structured pipelines and first-class SDKs.

  • Gen 3D characters — rig, mesh, texture, and animate from a prompt.
  • Voices, worlds & agents — compose generative experiences end-to-end.
  • Fine-tune Cortex on your domain with just-in-time weights.
  • Sovereign GPU grid — elastic, private, sub-50ms inference.
Discover Forge
GENERATING · HUMAN
RIG + DANCE · LIVE
); } window.Products = Products;