|
16 | 16 | Test: Mute/Unmute functionality for Speech-to-Speech |
17 | 17 | |
18 | 18 | This test validates: |
19 | | - 1. Listening state can transition to muted and back to listening |
20 | | - 2. Other states (idle) cannot transition to muted |
21 | | - 3. Muted chunks contain all zeros (silent audio) |
22 | | - 4. Uses useVoiceRecordingMuted hook via Composer pattern for mute/unmute control |
| 19 | + 1. Mute is allowed from all state except idle |
| 20 | + 2. When muted during listening, chunks contain all zeros (silent audio) |
| 21 | + 3. When unmuted, chunks contain real audio |
| 22 | + 4. Mute resets to false when recording stops |
| 23 | + 5. Uses useVoiceRecordingMuted hook for mute/unmute control |
23 | 24 | --> |
24 | 25 | <script type="module"> |
25 | 26 | import { setupMockMediaDevices } from '/assets/esm/speechToSpeech/mockMediaDevices.js'; |
|
51 | 52 | return bytes.every(byte => byte === 0); |
52 | 53 | } |
53 | 54 |
|
54 | | - // Helper to check if audio has non-zero data (real audio) |
55 | | - function hasNonZeroAudio(base64Content) { |
56 | | - const binaryString = atob(base64Content); |
57 | | - const bytes = new Uint8Array(binaryString.length); |
58 | | - for (let i = 0; i < binaryString.length; i++) { |
59 | | - bytes[i] = binaryString.charCodeAt(i); |
60 | | - } |
61 | | - return bytes.some(byte => byte !== 0); |
62 | | - } |
63 | | - |
64 | 55 | const audioChunks = []; |
65 | 56 | let currentVoiceState = 'idle'; |
| 57 | + let currentMicrophoneMuted = false; |
66 | 58 |
|
67 | 59 | // Setup Web Chat with Speech-to-Speech |
68 | 60 | const { directLine, store } = testHelpers.createDirectLineEmulator(); |
69 | 61 | directLine.setCapability('getVoiceConfiguration', { sampleRate: 24000, chunkIntervalMs: 100 }, { emitEvent: false }); |
70 | 62 |
|
71 | | - // Track voiceState changes |
| 63 | + // Track voiceState and microphoneMuted changes |
72 | 64 | store.subscribe(() => { |
73 | 65 | currentVoiceState = store.getState().voice?.voiceState || 'idle'; |
| 66 | + currentMicrophoneMuted = store.getState().voice?.microphoneMuted || false; |
74 | 67 | }); |
75 | 68 |
|
76 | 69 | // Intercept postActivity to capture outgoing voice chunks |
|
79 | 72 | if (activity.name === 'media.chunk' && activity.type === 'event') { |
80 | 73 | audioChunks.push({ |
81 | 74 | content: activity.value?.content, |
82 | | - voiceState: currentVoiceState |
| 75 | + voiceState: currentVoiceState, |
| 76 | + microphoneMuted: currentMicrophoneMuted |
83 | 77 | }); |
84 | 78 | } |
85 | 79 | return originalPostActivity(activity); |
|
103 | 97 |
|
104 | 98 | // Helper to get voice state from store |
105 | 99 | const getVoiceState = () => store.getState().voice?.voiceState; |
| 100 | + const getMicrophoneMuted = () => store.getState().voice?.microphoneMuted; |
106 | 101 |
|
107 | 102 | render( |
108 | 103 | <FluentThemeProvider variant="fluent"> |
|
141 | 136 | // Wait for some listening chunks |
142 | 137 | await pageConditions.became( |
143 | 138 | 'At least 2 listening chunks received', |
144 | | - () => audioChunks.filter(c => c.voiceState === 'listening').length >= 2, |
| 139 | + () => audioChunks.filter(c => c.voiceState === 'listening' && !c.microphoneMuted).length >= 2, |
145 | 140 | 2000 |
146 | 141 | ); |
147 | 142 |
|
148 | | - // ===== TEST 3: Mute from listening state → muted state ===== |
| 143 | + // ===== TEST 3: Mute while listening → microphoneMuted true, voiceState stays listening ===== |
149 | 144 | muteControlRef.setMuted(true); |
150 | 145 |
|
151 | 146 | await pageConditions.became( |
152 | | - 'Voice state is muted', |
153 | | - () => getVoiceState() === 'muted', |
| 147 | + 'microphoneMuted is true', |
| 148 | + () => getMicrophoneMuted() === true, |
154 | 149 | 1000 |
155 | 150 | ); |
156 | 151 |
|
157 | 152 | expect(muteControlRef.muted).toBe(true); |
| 153 | + expect(getVoiceState()).toBe('listening'); // voiceState stays listening |
158 | 154 |
|
159 | 155 | // Wait for muted chunks |
160 | 156 | await pageConditions.became( |
161 | 157 | 'At least 2 muted chunks received', |
162 | | - () => audioChunks.filter(c => c.voiceState === 'muted').length >= 2, |
| 158 | + () => audioChunks.filter(c => c.microphoneMuted).length >= 2, |
163 | 159 | 2000 |
164 | 160 | ); |
165 | 161 |
|
166 | 162 | // ===== TEST 4: Verify muted chunks are all zeros ===== |
167 | | - const mutedChunks = audioChunks.filter(c => c.voiceState === 'muted'); |
| 163 | + const mutedChunks = audioChunks.filter(c => c.microphoneMuted); |
168 | 164 | expect(mutedChunks.length).toBeGreaterThanOrEqual(2); |
169 | 165 | for (const chunk of mutedChunks) { |
170 | 166 | expect(isAudioAllZeros(chunk.content)).toBe(true); |
171 | 167 | } |
172 | 168 |
|
173 | | - // ===== TEST 5: Unmute → back to listening state ===== |
| 169 | + // ===== TEST 5: Unmute → microphoneMuted false ===== |
174 | 170 | muteControlRef.setMuted(false); |
175 | 171 |
|
176 | 172 | await pageConditions.became( |
177 | | - 'Voice state is listening after unmute', |
178 | | - () => getVoiceState() === 'listening', |
| 173 | + 'microphoneMuted is false after unmute', |
| 174 | + () => getMicrophoneMuted() === false, |
179 | 175 | 1000 |
180 | 176 | ); |
181 | 177 |
|
|
190 | 186 | ); |
191 | 187 |
|
192 | 188 | // ===== TEST 6: Verify listening chunks contain real (non-zero) audio ===== |
193 | | - const listeningChunks = audioChunks.filter(c => c.voiceState === 'listening'); |
| 189 | + const listeningChunks = audioChunks.filter(c => c.voiceState === 'listening' && !c.microphoneMuted); |
194 | 190 | expect(listeningChunks.length).toBeGreaterThanOrEqual(4); // At least 2 before mute + 2 after unmute |
195 | 191 |
|
196 | 192 | // Verify listening audio is non-zero (real audio) |
197 | 193 | for (const chunk of listeningChunks) { |
198 | | - expect(hasNonZeroAudio(chunk.content)).toBe(true); |
| 194 | + expect(isAudioAllZeros(chunk.content)).toBe(false); |
199 | 195 | } |
200 | 196 |
|
201 | | - // ===== TEST 7: Stop recording ===== |
| 197 | + // ===== TEST 7: Stop recording → microphoneMuted resets to false ===== |
202 | 198 | await host.click(micButton); |
203 | 199 |
|
204 | 200 | await pageConditions.became( |
|
207 | 203 | 2000 |
208 | 204 | ); |
209 | 205 |
|
210 | | - expect(muteControlRef.muted).toBe(false); |
| 206 | + expect(muteControlRef.muted).toBe(false); // microphoneMuted resets on stop |
211 | 207 | }); |
212 | 208 | </script> |
213 | 209 | </body> |
|
0 commit comments