lively-minister-89835
09/10/2025, 11:34 AMtall-airline-16756
09/10/2025, 11:34 AMtall-airline-16756
09/10/2025, 11:35 AMlively-minister-89835
09/10/2025, 11:37 AMtall-airline-16756
09/10/2025, 11:38 AMfrom livekit import agents, JobContext, JobRequest, WorkerOptions
from livekit.plugins import openai
async def entrypoint(ctx: JobContext):
# Connect to the room with audio subscription only
await ctx.connect(auto_subscribe=agents.AutoSubscribe.AUDIO_ONLY)
# Create a multimodal agent using OpenAI's Realtime model
agent = agents.MultimodalAgent(
model=openai.models.RealtimeModel(
instructions="You are a helpful assistant.",
voice="alloy",
temperature=0.8,
modalities=["text", "audio"],
)
)
# Start the agent in the room
agent.start(ctx.room)
if __name__ == "__main__":
# Run the worker with the entrypoint function
agents.cli.run_app(WorkerOptions(entrypoint_fnc=entrypoint))
React Frontend Components
Session View Component
'use client';
import React, { useEffect, useState } from 'react';
import { AnimatePresence, motion } from 'motion/react';
import {
type AgentState,
type ReceivedChatMessage,
useRoomContext,
useVoiceAssistant,
} from '@livekit/components-react';
import { AgentControlBar } from '@/components/livekit/agent-control-bar/agent-control-bar';
import { ChatEntry } from '@/components/livekit/chat/chat-entry';
import { ChatMessageView } from '@/components/livekit/chat/chat-message-view';
import { MediaTiles } from '@/components/livekit/media-tiles';
import useChatAndTranscription from '@/hooks/useChatAndTranscription';
function isAgentAvailable(agentState: AgentState) {
return agentState == 'listening' || agentState == 'thinking' || agentState == 'speaking';
}
export const SessionView = ({ disabled, capabilities, sessionStarted, ref }) => {
const { state: agentState } = useVoiceAssistant();
const [chatOpen, setChatOpen] = useState(false);
const { messages, send } = useChatAndTranscription();
const room = useRoomContext();
async function handleSendMessage(message: string) {
await send(message);
}
useEffect(() => {
if (sessionStarted) {
const timeout = setTimeout(() => {
if (!isAgentAvailable(agentState)) {
room.disconnect();
}
}, 10_000);
return () => clearTimeout(timeout);
}
}, [agentState, sessionStarted, room]);
return (
);
}
To get you started smoothly:
• Create the agent by running lk app create --template agent-starter-python
.
• Set up the frontend with lk app create --template agent-starter-react
.
• Once that's done, just install the dependencies and run both applications.
If you have any questions or need a hand with anything, please don’t hesitate to ask—I’m here to help!
Sources: OpenAI and LiveKit partner to turn Advanced Voice into an API | complex-agents/role-playing/role_playing_frontend/components/livekit/agent-... | AI Voice Assistant Quickstart | LiveKit Docs | OpenAI and LiveKit partner to turn Advanced Voice into an API