glamorous-thailand-53063
06/30/2025, 6:27 PMincalculable-stone-39027
07/02/2025, 6:15 AMrhythmic-plumber-94044
07/03/2025, 7:12 AMstocky-application-19701
07/03/2025, 9:52 AMboundless-branch-10227
07/03/2025, 8:45 PMflaky-city-98337
07/04/2025, 12:24 PMnumerous-cat-27865
07/09/2025, 2:57 PMmysterious-mouse-33403
07/10/2025, 2:31 AMlively-keyboard-94143
07/10/2025, 6:56 AMlemon-city-87922
07/10/2025, 12:12 PMlemon-city-87922
07/10/2025, 12:14 PMlemon-city-87922
07/10/2025, 12:18 PMrhythmic-plumber-379
07/11/2025, 4:13 AMprehistoric-hospital-42098
07/11/2025, 6:06 AMmelodic-zoo-29246
07/11/2025, 6:41 AMlittle-honey-89160
07/11/2025, 11:50 AMimport * as React from 'react';
import {
StyleSheet,
View,
Text,
FlatList,
ListRenderItem,
} from 'react-native';
import { useEffect } from 'react';
import {
AudioSession,
LiveKitRoom,
useTracks,
TrackReferenceOrPlaceholder,
VideoTrack,
isTrackReference,
registerGlobals,
} from '@livekit/react-native';
import { Track } from 'livekit-client';
registerGlobals()
const LiveViewScreen = () => {
useEffect(() => {
let start = async () => {
await AudioSession.startAudioSession();
};
start();
return () => {
AudioSession.stopAudioSession();
};
}, []);
return (
<LiveKitRoom
serverUrl={wsURL}
token={token}
connect={true}
options={{
adaptiveStream: { pixelDensity: 'screen' },
}}
audio={true}
video={true}
>
<RoomView />
</LiveKitRoom>
);
}
const RoomView = () => {
// Get all camera tracks.
// The useTracks hook grabs the tracks from LiveKitRoom component
// providing the context for the Room object.
const tracks = useTracks([Track.Source.Camera]);
const renderTrack: ListRenderItem<TrackReferenceOrPlaceholder> = ({ item }) => {
// Render using the VideoTrack component.
if (isTrackReference(item)) {
return (<VideoTrack trackRef={item} style={styles.participantView} />)
} else {
return (<View style={styles.participantView} />)
}
};
return (
<View style={styles.container}>
<FlatList
data={tracks}
renderItem={renderTrack}
/>
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: 'stretch',
justifyContent: 'center',
},
participantView: {
height: 300,
},
});
export default LiveViewScreen
but I get an error
TypeError: r.addEventListener is not a function (it is undefined)
and then some warnings
WARN Sending `onAnimatedValueUpdate` with no listeners registered.
I am using react-native with expo
"@livekit/react-native": "^2.7.6",
"@livekit/react-native-expo-plugin": "^1.0.1",
"@livekit/react-native-webrtc": "^125.0.11",
"expo": "~52.0.31",
I installed it through expo plugin https://github.com/livekit/client-sdk-react-native-expo-plugin
any tips what might be wrong here? The plugin doesn't seem to be updated in a while should it still be working?wonderful-nightfall-7721
07/12/2025, 7:19 AMreact-native-meet
on my MacOs.
For some reasons, I am not able to run the App. I am facing Build Error.
Can someone help here.
PFA the error screenshot.
I am stuck here. Some help/direction will help me make progress.brash-judge-83910
07/13/2025, 12:12 PMstrong-dentist-37201
07/15/2025, 10:50 AMstocky-portugal-86826
07/15/2025, 12:53 PMSpeechServiceConnection_EndSilenceTimeoutMs
important-psychiatrist-73895
07/15/2025, 7:29 PMpublic var inputDevice: AudioDevice {
get {
#if os(macOS)
AudioDevice(ioDevice: RTC.audioDeviceModule.inputDevice)
#else
AudioDevice(ioDevice: LKRTCIODevice.defaultDevice(with: .input))
#endif
}
set {
#if
os(macOS) RTC.audioDeviceModule.inputDevice = newValue._ioDevice
#endif
}
}
And then RTCIODevice.h doesn't seem to have a way to create from a specified input.
Is this intentional because there's some underlying issue with supporting a specific device on iOS (e.g. a plugged in microphone)? Or just not implemented because it wasn't a priority yet?
From what I can tell so far, I'd need to make a branch with some edits to enable this in livekit's branch of webrtc, build the ltwebrtc framework for iOS and then implement this in client-sdk-swift
BUT if there's an easier way would love to know it!many-helmet-9770
07/15/2025, 10:37 PMalert-chef-13304
07/16/2025, 5:14 AMwooden-scientist-55429
07/16/2025, 12:10 PMwooden-scientist-55429
07/16/2025, 12:11 PMwooden-scientist-55429
07/16/2025, 12:11 PMfierce-knife-30651
07/16/2025, 2:46 PMThe following build commands failed:
CompileC /Users/expo/Library/Developer/Xcode/DerivedData/ShinyLive-hhdpofmyzlqvkjbxuvvmqpvjyccg/Build/Intermediates.noindex/ArchiveIntermediates/ShinyLive/IntermediateBuildFilesPath/Pods.build/Release-iphoneos/livekit-react-native-webrtc.build/Objects-normal/arm64/WebRTCModule.o /Users/expo/workingdir/build/node_modules/@livekit/react-native-webrtc/ios/RCTWebRTC/WebRTCModule.m normal arm64 objective-c com.apple.compilers.llvm.clang.1_0.compiler (in target 'livekit-react-native-webrtc' from project 'Pods')
• Expo SDK 51.0.38
• @livekit/react-native-webrtc : 125.0.9worried-oyster-72658
07/17/2025, 4:50 AMdelightful-telephone-54565
07/17/2025, 6:34 AMdefaultCameraCaptureOptions: CameraCaptureOptions(
cameraPosition: CameraPosition.back,
params: VideoParameters(
dimensions: VideoDimensionsPresets.h1440_169,
encoding: VideoEncoding(
maxBitrate: 25000000, // 25 Mbps
maxFramerate: 60,
),
),
),
defaultVideoPublishOptions: VideoPublishOptions(
videoCodec: "H.264",
videoEncoding: VideoEncoding(
maxBitrate: 25000000, // 25 Mbps
maxFramerate: 60,
),
simulcast: false
)
Thanks in advance.fast-thailand-41521
07/17/2025, 11:14 AMPackages/io.livekit.livekit-sdk/Runtime/Plugins/Google.Protobuf.dll
.
For example: "the namespace Google could not be found"
The same error does not occur on Windows OS. How can I fix this? Thanks.