Skip to content

Commit 8dc7df5

Browse files
mute independent from voice state
1 parent cd39b84 commit 8dc7df5

5 files changed

Lines changed: 47 additions & 47 deletions

File tree

__tests__/html2/speechToSpeech/mute.unmute.html

Lines changed: 25 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,11 @@
1616
Test: Mute/Unmute functionality for Speech-to-Speech
1717
1818
This test validates:
19-
1. Listening state can transition to muted and back to listening
20-
2. Other states (idle) cannot transition to muted
21-
3. Muted chunks contain all zeros (silent audio)
22-
4. Uses useVoiceRecordingMuted hook via Composer pattern for mute/unmute control
19+
1. Mute is allowed from all state except idle
20+
2. When muted during listening, chunks contain all zeros (silent audio)
21+
3. When unmuted, chunks contain real audio
22+
4. Mute resets to false when recording stops
23+
5. Uses useVoiceRecordingMuted hook for mute/unmute control
2324
-->
2425
<script type="module">
2526
import { setupMockMediaDevices } from '/assets/esm/speechToSpeech/mockMediaDevices.js';
@@ -51,26 +52,18 @@
5152
return bytes.every(byte => byte === 0);
5253
}
5354

54-
// Helper to check if audio has non-zero data (real audio)
55-
function hasNonZeroAudio(base64Content) {
56-
const binaryString = atob(base64Content);
57-
const bytes = new Uint8Array(binaryString.length);
58-
for (let i = 0; i < binaryString.length; i++) {
59-
bytes[i] = binaryString.charCodeAt(i);
60-
}
61-
return bytes.some(byte => byte !== 0);
62-
}
63-
6455
const audioChunks = [];
6556
let currentVoiceState = 'idle';
57+
let currentMicrophoneMuted = false;
6658

6759
// Setup Web Chat with Speech-to-Speech
6860
const { directLine, store } = testHelpers.createDirectLineEmulator();
6961
directLine.setCapability('getVoiceConfiguration', { sampleRate: 24000, chunkIntervalMs: 100 }, { emitEvent: false });
7062

71-
// Track voiceState changes
63+
// Track voiceState and microphoneMuted changes
7264
store.subscribe(() => {
7365
currentVoiceState = store.getState().voice?.voiceState || 'idle';
66+
currentMicrophoneMuted = store.getState().voice?.microphoneMuted || false;
7467
});
7568

7669
// Intercept postActivity to capture outgoing voice chunks
@@ -79,7 +72,8 @@
7972
if (activity.name === 'media.chunk' && activity.type === 'event') {
8073
audioChunks.push({
8174
content: activity.value?.content,
82-
voiceState: currentVoiceState
75+
voiceState: currentVoiceState,
76+
microphoneMuted: currentMicrophoneMuted
8377
});
8478
}
8579
return originalPostActivity(activity);
@@ -103,6 +97,7 @@
10397

10498
// Helper to get voice state from store
10599
const getVoiceState = () => store.getState().voice?.voiceState;
100+
const getMicrophoneMuted = () => store.getState().voice?.microphoneMuted;
106101

107102
render(
108103
<FluentThemeProvider variant="fluent">
@@ -141,41 +136,42 @@
141136
// Wait for some listening chunks
142137
await pageConditions.became(
143138
'At least 2 listening chunks received',
144-
() => audioChunks.filter(c => c.voiceState === 'listening').length >= 2,
139+
() => audioChunks.filter(c => c.voiceState === 'listening' && !c.microphoneMuted).length >= 2,
145140
2000
146141
);
147142

148-
// ===== TEST 3: Mute from listening state → muted state =====
143+
// ===== TEST 3: Mute while listening → microphoneMuted true, voiceState stays listening =====
149144
muteControlRef.setMuted(true);
150145

151146
await pageConditions.became(
152-
'Voice state is muted',
153-
() => getVoiceState() === 'muted',
147+
'microphoneMuted is true',
148+
() => getMicrophoneMuted() === true,
154149
1000
155150
);
156151

157152
expect(muteControlRef.muted).toBe(true);
153+
expect(getVoiceState()).toBe('listening'); // voiceState stays listening
158154

159155
// Wait for muted chunks
160156
await pageConditions.became(
161157
'At least 2 muted chunks received',
162-
() => audioChunks.filter(c => c.voiceState === 'muted').length >= 2,
158+
() => audioChunks.filter(c => c.microphoneMuted).length >= 2,
163159
2000
164160
);
165161

166162
// ===== TEST 4: Verify muted chunks are all zeros =====
167-
const mutedChunks = audioChunks.filter(c => c.voiceState === 'muted');
163+
const mutedChunks = audioChunks.filter(c => c.microphoneMuted);
168164
expect(mutedChunks.length).toBeGreaterThanOrEqual(2);
169165
for (const chunk of mutedChunks) {
170166
expect(isAudioAllZeros(chunk.content)).toBe(true);
171167
}
172168

173-
// ===== TEST 5: Unmute → back to listening state =====
169+
// ===== TEST 5: Unmute → microphoneMuted false =====
174170
muteControlRef.setMuted(false);
175171

176172
await pageConditions.became(
177-
'Voice state is listening after unmute',
178-
() => getVoiceState() === 'listening',
173+
'microphoneMuted is false after unmute',
174+
() => getMicrophoneMuted() === false,
179175
1000
180176
);
181177

@@ -190,15 +186,15 @@
190186
);
191187

192188
// ===== TEST 6: Verify listening chunks contain real (non-zero) audio =====
193-
const listeningChunks = audioChunks.filter(c => c.voiceState === 'listening');
189+
const listeningChunks = audioChunks.filter(c => c.voiceState === 'listening' && !c.microphoneMuted);
194190
expect(listeningChunks.length).toBeGreaterThanOrEqual(4); // At least 2 before mute + 2 after unmute
195191

196192
// Verify listening audio is non-zero (real audio)
197193
for (const chunk of listeningChunks) {
198-
expect(hasNonZeroAudio(chunk.content)).toBe(true);
194+
expect(isAudioAllZeros(chunk.content)).toBe(false);
199195
}
200196

201-
// ===== TEST 7: Stop recording =====
197+
// ===== TEST 7: Stop recording → microphoneMuted resets to false =====
202198
await host.click(micButton);
203199

204200
await pageConditions.became(
@@ -207,7 +203,7 @@
207203
2000
208204
);
209205

210-
expect(muteControlRef.muted).toBe(false);
206+
expect(muteControlRef.muted).toBe(false); // microphoneMuted resets on stop
211207
});
212208
</script>
213209
</body>

packages/api/src/hooks/useVoiceRecordingMuted.ts

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,14 @@ import { useDispatch, useSelector } from './internal/WebChatReduxContext';
44

55
/**
66
* Hook to get and set voice recording mute state in speech-to-speech mode.
7+
*
8+
* Mute is independent of voice state - it can be toggled at any time.
9+
* When muted, silent audio chunks are sent instead of real audio.
10+
* Mute resets to false when recording stops.
711
*/
812
export default function useVoiceRecordingMuted(): readonly [boolean, (muted: boolean) => void] {
913
const dispatch = useDispatch();
10-
const value = useSelector(({ voice }) => voice.voiceState === 'muted');
14+
const value = useSelector(({ voice }) => voice.microphoneMuted);
1115

1216
const setter = useCallback(
1317
(muted: boolean) => {

packages/api/src/providers/SpeechToSpeech/private/VoiceRecorderBridge.tsx

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,18 @@
11
import { useEffect, useCallback } from 'react';
22
import { useRecorder } from './useRecorder';
33
import usePostVoiceActivity from '../../../hooks/internal/usePostVoiceActivity';
4+
import useVoiceRecordingMuted from '../../../hooks/useVoiceRecordingMuted';
45
import useVoiceState from '../../../hooks/useVoiceState';
56

67
/**
78
* VoiceRecorderBridge is an invisible component that bridges the Redux recording state
89
* with the actual microphone recording functionality.
910
*/
1011
export function VoiceRecorderBridge(): null {
12+
const [muted] = useVoiceRecordingMuted();
1113
const [voiceState] = useVoiceState();
1214
const postVoiceActivity = usePostVoiceActivity();
1315

14-
const muted = voiceState === 'muted';
1516
// Derive recording state from voiceState - recording is active when not idle
1617
const recording = voiceState !== 'idle';
1718

@@ -32,17 +33,17 @@ export function VoiceRecorderBridge(): null {
3233

3334
const { mute, record } = useRecorder(handleAudioChunk);
3435

35-
useEffect(() => {
36-
if (muted) {
37-
return mute();
38-
}
39-
}, [muted, mute]);
40-
4136
useEffect(() => {
4237
if (recording) {
4338
return record();
4439
}
4540
}, [record, recording]);
4641

42+
useEffect(() => {
43+
if (muted) {
44+
return mute();
45+
}
46+
}, [muted, mute]);
47+
4748
return null;
4849
}

packages/core/src/actions/setVoiceState.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
const VOICE_SET_STATE = 'WEB_CHAT/VOICE_SET_STATE' as const;
22

3-
type VoiceState = 'idle' | 'listening' | 'muted' | 'user_speaking' | 'processing' | 'bot_speaking';
3+
type VoiceState = 'idle' | 'listening' | 'user_speaking' | 'processing' | 'bot_speaking';
44

55
type VoiceSetStateAction = {
66
type: typeof VOICE_SET_STATE;

packages/core/src/reducers/voiceActivity.ts

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -24,11 +24,13 @@ type VoiceActivityActions =
2424
| VoiceUnregisterHandlerAction;
2525

2626
interface VoiceActivityState {
27+
microphoneMuted: boolean;
2728
voiceState: VoiceState;
2829
voiceHandlers: Map<string, VoiceHandler>;
2930
}
3031

3132
const DEFAULT_STATE: VoiceActivityState = {
33+
microphoneMuted: false,
3234
voiceState: 'idle',
3335
voiceHandlers: new Map()
3436
};
@@ -39,15 +41,15 @@ export default function voiceActivity(
3941
): VoiceActivityState {
4042
switch (action.type) {
4143
case VOICE_MUTE_RECORDING:
42-
// Only allow muting when in listening state
43-
if (state.voiceState !== 'listening') {
44-
console.warn(`botframework-webchat: Cannot mute from "${state.voiceState}" state, must be "listening"`);
44+
// Only allow muting when in recording state
45+
if (state.voiceState === 'idle') {
46+
console.warn(`botframework-webchat: Cannot mute from "${state.voiceState}" state, must be in recording state.`);
4547
return state;
4648
}
4749

4850
return {
4951
...state,
50-
voiceState: 'muted'
52+
microphoneMuted: true
5153
};
5254

5355
case VOICE_REGISTER_HANDLER: {
@@ -87,17 +89,14 @@ export default function voiceActivity(
8789
case VOICE_STOP_RECORDING:
8890
return {
8991
...state,
92+
microphoneMuted: false,
9093
voiceState: 'idle'
9194
};
9295

9396
case VOICE_UNMUTE_RECORDING:
94-
if (state.voiceState !== 'muted') {
95-
console.warn(`botframework-webchat: Should not transit from "${state.voiceState}" to "listening"`);
96-
}
97-
9897
return {
9998
...state,
100-
voiceState: 'listening'
99+
microphoneMuted: false
101100
};
102101

103102
default:

0 commit comments

Comments
 (0)