|
4 | 4 |
|
5 | 5 | import ObjectExt from "./ObjectsExt"; |
6 | 6 |
|
| 7 | +// AudioPlayerProcessor worklet code as a string |
| 8 | +const audioPlayerProcessorCode = ` |
| 9 | +class AudioPlayerProcessor extends AudioWorkletProcessor { |
| 10 | + constructor() { |
| 11 | + super(); |
| 12 | + this.buffer = []; |
| 13 | + this.isPlaying = false; |
| 14 | + this.port.onmessage = this.handleMessage.bind(this); |
| 15 | + } |
| 16 | +
|
| 17 | + handleMessage(event) { |
| 18 | + const data = event.data; |
| 19 | + switch (data.type) { |
| 20 | + case "audio": |
| 21 | + this.buffer.push(...data.audioData); |
| 22 | + if (!this.isPlaying) { |
| 23 | + this.isPlaying = true; |
| 24 | + } |
| 25 | + break; |
| 26 | + case "barge-in": |
| 27 | + this.buffer = []; |
| 28 | + this.isPlaying = false; |
| 29 | + break; |
| 30 | + case "initial-buffer-length": |
| 31 | + // Optional: Set initial buffer length |
| 32 | + break; |
| 33 | + default: |
| 34 | + console.error("Unknown message type:", data.type); |
| 35 | + } |
| 36 | + } |
| 37 | +
|
| 38 | + process(inputs, outputs) { |
| 39 | + const output = outputs[0]; |
| 40 | + const channel = output[0]; |
| 41 | + |
| 42 | + if (this.isPlaying && this.buffer.length > 0) { |
| 43 | + const samplesToProcess = Math.min(channel.length, this.buffer.length); |
| 44 | + |
| 45 | + for (let i = 0; i < samplesToProcess; i++) { |
| 46 | + channel[i] = this.buffer.shift(); |
| 47 | + } |
| 48 | + |
| 49 | + // Fill remaining samples with silence if buffer is depleted |
| 50 | + if (samplesToProcess < channel.length) { |
| 51 | + for (let i = samplesToProcess; i < channel.length; i++) { |
| 52 | + channel[i] = 0; |
| 53 | + } |
| 54 | + } |
| 55 | + } else { |
| 56 | + // Output silence when not playing |
| 57 | + for (let i = 0; i < channel.length; i++) { |
| 58 | + channel[i] = 0; |
| 59 | + } |
| 60 | + } |
| 61 | + |
| 62 | + return true; |
| 63 | + } |
| 64 | +} |
| 65 | +
|
| 66 | +registerProcessor("audio-player-processor", AudioPlayerProcessor); |
| 67 | +`; |
7 | 68 |
|
8 | 69 | export default class AudioPlayer { |
9 | 70 | private audioContext: AudioContext | null = null; |
@@ -33,34 +94,39 @@ export default class AudioPlayer { |
33 | 94 | this.analyser = this.audioContext.createAnalyser(); |
34 | 95 | this.analyser.fftSize = 512; |
35 | 96 |
|
| 97 | + // Create a Blob URL for the worklet code |
| 98 | + const blob = new Blob([audioPlayerProcessorCode], { type: 'application/javascript' }); |
| 99 | + const workletUrl = URL.createObjectURL(blob); |
36 | 100 |
|
37 | | - const workletUrl = new URL('./AudioPlayerProcessor.worklet.js', import.meta.url); |
38 | | - // Register the audio worklet |
39 | | - await this.audioContext.audioWorklet.addModule(workletUrl); |
40 | | - |
41 | | - // Create and connect nodes |
42 | | - this.workletNode = new AudioWorkletNode(this.audioContext, "audio-player-processor"); |
43 | | - // Create recorder node for monitoring audio output |
44 | | - this.recorderNode = this.audioContext.createScriptProcessor(512, 1, 1); |
45 | | - |
46 | | - this.workletNode.connect(this.analyser); |
47 | | - this.analyser.connect(this.recorderNode); |
48 | | - this.recorderNode.connect(this.audioContext.destination); |
49 | | - |
50 | | - this.recorderNode.onaudioprocess = (event) => { |
51 | | - // Pass the input along as-is |
52 | | - const inputData = event.inputBuffer.getChannelData(0); |
53 | | - const outputData = event.outputBuffer.getChannelData(0); |
54 | | - outputData.set(inputData); |
| 101 | + try { |
| 102 | + // Register the audio worklet |
| 103 | + await this.audioContext.audioWorklet.addModule(workletUrl); |
55 | 104 |
|
56 | | - // Notify listeners that the audio was played |
57 | | - const samples = new Float32Array(outputData.length); |
58 | | - samples.set(outputData); |
59 | | - this.onAudioPlayedListeners.forEach(listener => listener(samples)); |
60 | | - }; |
| 105 | + // Create and connect nodes |
| 106 | + this.workletNode = new AudioWorkletNode(this.audioContext, "audio-player-processor"); |
| 107 | + this.workletNode.connect(this.analyser); |
| 108 | + this.analyser.connect(this.audioContext.destination); |
61 | 109 |
|
62 | | - this.maybeOverrideInitialBufferLength(); |
63 | | - this.initialized = true; |
| 110 | + // Create recorder node for monitoring audio output |
| 111 | + this.recorderNode = this.audioContext.createScriptProcessor(512, 1, 1); |
| 112 | + this.recorderNode.onaudioprocess = (event) => { |
| 113 | + // Pass the input along as-is |
| 114 | + const inputData = event.inputBuffer.getChannelData(0); |
| 115 | + const outputData = event.outputBuffer.getChannelData(0); |
| 116 | + outputData.set(inputData); |
| 117 | + |
| 118 | + // Notify listeners that the audio was played |
| 119 | + const samples = new Float32Array(outputData.length); |
| 120 | + samples.set(outputData); |
| 121 | + this.onAudioPlayedListeners.forEach(listener => listener(samples)); |
| 122 | + }; |
| 123 | + |
| 124 | + this.maybeOverrideInitialBufferLength(); |
| 125 | + this.initialized = true; |
| 126 | + } finally { |
| 127 | + // Clean up the Blob URL |
| 128 | + URL.revokeObjectURL(workletUrl); |
| 129 | + } |
64 | 130 | } |
65 | 131 |
|
66 | 132 | bargeIn(): void { |
|
0 commit comments