Skip to content

Always getting "LiveKitKrispNoiseFilter Process failed, channel: 0" on iOS #323

@kusalkalinga00

Description

@kusalkalinga00

On iOS, every conversation attempt produces the following Xcode log:
LiveKitKrispNoiseFilter Process failed, channel: 0
The noise filter appears to have no effect and the error repeats continuously during the session.

Environment

Package Version
@livekit/react-native-krisp-noise-filter 0.0.3
Native pod LiveKitKrispNoiseFilter 0.0.8
@livekit/react-native 2.9.6
livekit-client 2.17.1
React Native 0.79.2
Platform iOS (physical device)

code

`import {
SafeAreaView,
Text,
TouchableOpacity,
View,
Platform,
} from 'react-native';
import React, {useCallback, useEffect, useRef, useState} from 'react';
import styles from './TestScreen.styles';
import LottieView from 'lottie-react-native';
import MainButton from '../../../components/MainButton/MainButton';
import {useKeepAwake} from '@sayem314/react-native-keep-awake';
import {Mic, MicOff, X} from 'lucide-react-native';
import {requestMicrophonePermission} from '../../../util/permissions.util';
import {
Room,
RoomEvent,
RemoteParticipant,
ParticipantEvent,
Track,
LocalAudioTrack,
DisconnectReason,
} from 'livekit-client';
import {
AgentState,
AudioSession,
getDefaultAppleAudioConfigurationForMode,
} from '@livekit/react-native';
import {
KrispNoiseFilter,
isKrispNoiseFilterSupported,
type KrispNoiseFilterProcessor,
} from '@livekit/react-native-krisp-noise-filter';
import {COLORS} from '../../../common';
import {useGetLiveKitStagingAgentChatSessionTokenMutation} from '../../../redux/api/chatConfigAPI';

const TestScreen = () => {
useKeepAwake();
const [lottieSpeed, setLottieSpeed] = useState(0);
const [startButtonLoading, setStartButtonLoading] = useState(false);
const [isMicMute, setIsMicMute] = useState(false);
const [conversationStarted, setConversationStarted] =
useState(false);

const [agentState, setAgentState] = useState('idle');
const [conversationError, setConversationError] = useState<string | null>(
null,
);

const lottieRef = useRef(null);
const roomRef = useRef(new Room());
const agentParticipantRef = useRef<RemoteParticipant | null>(null);
const krispProcessorRef = useRef<KrispNoiseFilterProcessor | null>(null);
const krispTrackIdRef = useRef<string | null>(null);
const isManualEndRef = useRef(false);

const [getLiveKitAiChatSessionToken] =
useGetLiveKitStagingAgentChatSessionTokenMutation();

const getSafeErrorMessage = useCallback(
(error: unknown, fallback: string): string => {
if (error instanceof Error && error.message) return error.message;
return fallback;
},
[],
);

const getDisconnectMessage = useCallback(
(reason?: DisconnectReason): string => {
switch (reason) {
case DisconnectReason.DUPLICATE_IDENTITY:
return 'Joined from another device.';
case DisconnectReason.ROOM_DELETED:
return 'Conversation room ended.';
default:
return 'Connection lost. Please restart.';
}
},
[],
);

const wait = useCallback(
(ms: number) =>
new Promise(resolve => {
setTimeout(resolve, ms);
}),
[],
);

const getMicrophoneLocalAudioTrack = useCallback(
(room: Room): LocalAudioTrack | null => {
const microphonePublication = Array.from(
room.localParticipant.trackPublications.values(),
).find(publication => publication.source === Track.Source.Microphone);

  if (!(microphonePublication?.track instanceof LocalAudioTrack))
    return null;
  return microphonePublication.track;
},
[],

);

const attachKrispNoiseFilter = useCallback(
async (room: Room) => {
if (!isKrispNoiseFilterSupported()) return;
if (!krispProcessorRef.current) {
krispProcessorRef.current = KrispNoiseFilter();
await krispProcessorRef.current.setEnabled(true);
}
for (let attempt = 0; attempt < 6; attempt += 1) {
const localAudioTrack = getMicrophoneLocalAudioTrack(room);
if (localAudioTrack) {
const currentTrackId = localAudioTrack.mediaStreamTrack.id;
if (krispTrackIdRef.current !== currentTrackId) {
await localAudioTrack.setProcessor(krispProcessorRef.current);
krispTrackIdRef.current = currentTrackId;
}
return;
}
await wait(250);
}
},
[getMicrophoneLocalAudioTrack, wait],
);

const cleanupKrispNoiseFilter = useCallback(
async (room: Room) => {
const localAudioTrack = getMicrophoneLocalAudioTrack(room);
if (localAudioTrack) {
try {
await localAudioTrack.stopProcessor();
} catch (e) {
console.warn(e);
}
}
if (krispProcessorRef.current) {
try {
await krispProcessorRef.current.destroy();
} catch (e) {
console.warn(e);
}
krispProcessorRef.current = null;
}
krispTrackIdRef.current = null;
},
[getMicrophoneLocalAudioTrack],
);

const attachAgentListeners = (participant: RemoteParticipant) => {
agentParticipantRef.current = participant;

// Set initial state from existing attributes
const state = participant.attributes['lk.agent.state'] as AgentState;
if (state) setAgentState(state);

// Listen for attribute updates from LiveKit Agent
participant.on(
  ParticipantEvent.AttributesChanged,
  (changedAttributes: Record<string, string>) => {
    const newState = changedAttributes['lk.agent.state'] as AgentState;
    if (newState) {
      setAgentState(newState);
    }
  },
);

};

const detachAgentListeners = () => {
if (agentParticipantRef.current) {
agentParticipantRef.current.removeAllListeners();
agentParticipantRef.current = null;
}
};

// Audio Session Setup for iOS
useEffect(() => {
if (Platform.OS !== 'ios') return;
const start = async () => {
const config = getDefaultAppleAudioConfigurationForMode(
'localAndRemote',
true,
);
await AudioSession.setAppleAudioConfiguration(config);
await AudioSession.startAudioSession();
};
start();
return () => {
void AudioSession.stopAudioSession();
};
}, []);

// Audio Session Setup for Android
useEffect(() => {
if (Platform.OS !== 'android') return;
const start = async () => {
await AudioSession.configureAudio({
android: {
preferredOutputList: ['speaker', 'bluetooth', 'earpiece'],
audioTypeOptions: {
manageAudioFocus: true,
audioMode: 'normal',
audioFocusMode: 'gain',
audioStreamType: 'music',
},
},
});
await AudioSession.startAudioSession();
};
start();
return () => {
void AudioSession.stopAudioSession();
};
}, []);

// Cleanup on Unmount
useEffect(() => {
return () => {
detachAgentListeners();
void cleanupKrispNoiseFilter(roomRef.current);
void roomRef.current.disconnect();
};
}, [cleanupKrispNoiseFilter]);

// Handle Lottie Animation Speed based on Speaking state
useEffect(() => {
if (agentState === 'speaking') {
setLottieSpeed(1);
lottieRef.current?.play();
} else {
setLottieSpeed(0);
lottieRef.current?.pause();
}
}, [agentState]);

const handleStartButton = async () => {
const hasPermission = await requestMicrophonePermission();
if (!hasPermission) throw new Error('microphone permission denied');

setStartButtonLoading(true);
setConversationError(null);
isManualEndRef.current = false;

try {
  const response = await getLiveKitAiChatSessionToken({
    unit_no: 1,
    lesson_no: 1,
    activation_no: 2,
  }).unwrap();

  const room = roomRef.current;
  room.removeAllListeners();

  room.on(
    RoomEvent.ParticipantConnected,
    (participant: RemoteParticipant) => {
      attachAgentListeners(participant);
    },
  );

  room.on(RoomEvent.Connected, () => {
    room.remoteParticipants.forEach(p => attachAgentListeners(p));
  });

  room.on(RoomEvent.Disconnected, (reason?: DisconnectReason) => {
    console.log('Disconnected from room, reason:', reason);
    setAgentState('idle');
    detachAgentListeners();
    if (!isManualEndRef.current) {
      if (reason === DisconnectReason.ROOM_DELETED) {
        setConversationStarted(false);
      } else {
        setConversationStarted(false);
        setConversationError(getDisconnectMessage(reason));
      }
    }
  });

  await room.connect(
    response.payload.server_url,
    response.payload.participant_token,
  );

  await room.localParticipant.setMicrophoneEnabled(true);
  await attachKrispNoiseFilter(room);

  setConversationStarted(true);
} catch (error) {
  setConversationError(
    getSafeErrorMessage(error, 'Failed to start conversation.'),
  );
  setConversationStarted(false);
} finally {
  setStartButtonLoading(false);
}

};

const handleEndButton = async () => {
isManualEndRef.current = true;
detachAgentListeners();
await cleanupKrispNoiseFilter(roomRef.current);
await roomRef.current.disconnect();
isManualEndRef.current = false;
setAgentState('idle');
setConversationStarted(false);
};

const handleMicMute = async () => {
const room = roomRef.current;
if (!room.localParticipant) return;
const newMutedState = !isMicMute;
await room.localParticipant.setMicrophoneEnabled(!newMutedState);
if (!newMutedState) await attachKrispNoiseFilter(room);
setIsMicMute(newMutedState);
};

const getStatusLabel = (): string | null => {
if (!conversationStarted) return null;
switch (agentState) {
case 'thinking':
return 'Thinking...';
case 'speaking':
return 'Speaking';
case 'listening':
return 'Listening';
default:
return null;
}
};

return (

{conversationError && (
<View
style={{
backgroundColor: '#FDECEA',
borderLeftWidth: 4,
borderLeftColor: '#E62727',
marginHorizontal: 16,
marginTop: 8,
padding: 12,
borderRadius: 8,
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'space-between',
}}>
<Text style={{flex: 1, color: '#B71C1C', fontSize: 13}}>
{conversationError}

<TouchableOpacity
onPress={handleStartButton}
style={{backgroundColor: '#E62727', padding: 8, borderRadius: 6}}>
<Text style={{color: '#fff', fontSize: 13}}>Restart


)}

  <View
    style={{
      flex: 2.3,
      justifyContent: 'center',
      alignItems: 'center',
    }}>
    <LottieView
      ref={lottieRef}
      source={require('../../../assets/lottie/Animation_3d_blob.json')}
      loop
      speed={lottieSpeed}
      renderMode="HARDWARE"
      style={{width: 250, height: 250}}
    />

    <Text
      style={{
        marginTop: 12,
        fontSize: 16,
        fontWeight: '500',
        color: agentState === 'thinking' ? COLORS.primary : '#333',
      }}>
      {agentState === 'speaking' ? '' : getStatusLabel()}
    </Text>
  </View>

  <View style={{flex: 1}} />

  <View style={styles.buttonContainer}>
    <View style={styles.recorderButtonContainer}>
      {!conversationStarted ? (
        <View style={styles.startButton}>
          <MainButton
            title="Start"
            onPress={handleStartButton}
            isLoading={startButtonLoading}
            style={{width: 250}}
          />
        </View>
      ) : (
        <View
          style={{
            flexDirection: 'row',
            justifyContent: 'space-between',
            alignItems: 'center',
            paddingHorizontal: 50,
            paddingVertical: 20,
          }}>
          <TouchableOpacity
            onPress={handleMicMute}
            style={{
              backgroundColor: COLORS.primary,
              padding: 10,
              borderRadius: 999,
              width: 80,
              height: 80,
              justifyContent: 'center',
              alignItems: 'center',
            }}>
            {isMicMute ? (
              <MicOff size={34} color="#fff" />
            ) : (
              <Mic size={34} color="#fff" />
            )}
          </TouchableOpacity>

          <TouchableOpacity
            onPress={handleEndButton}
            style={{
              backgroundColor: '#E62727',
              padding: 10,
              borderRadius: 999,
              width: 80,
              height: 80,
              justifyContent: 'center',
              alignItems: 'center',
            }}>
            <X size={40} color="#fff" />
          </TouchableOpacity>
        </View>
      )}
    </View>
  </View>
</View>

);
};

export default TestScreen;
`

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions