hey is there a way to consistently get the `RoomEv...
# sdk-rust
b
hey is there a way to consistently get the
RoomEvent::TrackSubscribed
event after connecting to a room. When I run my livekit server locally I get the event like 6/10 times. feels random at the moment. Can I make a request to livekit to get the room event if i don't receive it after a while?
b
Hey victor, can you share some code example here?
Are you just listening to the events channel?
b
yh like the demo in the repo, i actually switched to the new voice assistant python example last night and I'm using a custom backend plugin insteam of the
openai.LLM()
class, but I get
connection closed error
Copy code
import asyncio
import aiohttp
from livekit.agents import llm


class LLM(llm.LLM):
    async def chat(
        self,
        history: llm.ChatContext,
        temperature: float | None = None,
        n: int | None = None,
        fnc_ctx: llm.FunctionContext | None = None,
    ) -> "LLMStream":
        query = history.messages[-1].text
        async with aiohttp.ClientSession() as session:
            async with <http://session.post|session.post>(
                "<https://example.com/v1/chat-http>", json={"name": "example", "query": query}
            ) as response:
                return LLMStream(response.content)


class LLMStream(llm.LLMStream):
    def __init__(self, response_stream: aiohttp.StreamReader) -> None:
        super().__init__()
        self._response_stream = response_stream

    def __aiter__(self) -> "LLMStream":
        return self

    async def __anext__(self) -> llm.ChatChunk:
        print("__anext__ is __anext__")
        async for chunk in self._response_stream:
            print(f"__anext__  chunk is {chunk}")
            chunk_data = chunk.decode()
            return llm.ChatChunk(
                choices=[
                    llm.Choice(
                        delta=llm.ChoiceDelta(content=chunk_data),
                        index=0,
                    )
                ]
            )

        raise StopAsyncIteration

    async def aclose(self) -> None:
        # self._response.close()
        pass
async for chunk in self._response_stream:\n  File \"/Users/vic8or/dev/livekit_voice/env/lib/python3.11/site-packages/aiohttp/streams.py\", line 50, in __anext__\n    rv = await self.read_func()\n         ^^^^^^^^^^^^^^^^^^^^^^\n  File \"/Users/vic8or/dev/livekit_voice/env/lib/python3.11/site-packages/aiohttp/streams.py\", line 317, in readline\n    return await self.readuntil()\n           ^^^^^^^^^^^^^^^^^^^^^^\n  File \"/Users/vic8or/dev/livekit_voice/env/lib/python3.11/site-packages/aiohttp/streams.py\", line 325, in readuntil\n    raise self._exception\naiohttp.client_exceptions.ClientConnectionError: Connection closed\n", "job_id": "AJ_5Pt8wW6qoUgm", "pid": 92470}
👀 1