> 技术文档 > 纯血鸿蒙 AudioRenderer+AudioCapturer+RingBuffer 实现麦克风采集+发声

纯血鸿蒙 AudioRenderer+AudioCapturer+RingBuffer 实现麦克风采集+发声

总共两个类,放到代码里,就可以快速完成K歌的效果,但应用层这么做延迟是比较高的,只是做一个分享。

类代码

import { audio } from \'@kit.AudioKit\';import { BusinessError } from \'@kit.BasicServicesKit\';import { AudioBufferFlow, AudioRingBuffer } from \'./AudioRingBuffer\';import { abilityAccessCtrl, PermissionRequestResult, Permissions } from \'@kit.AbilityKit\';import { fileIo, WriteOptions } from \'@kit.CoreFileKit\';export class AudioRenderUtil { private readonly tag = \"AudioRenderUtil\"; private audioRenderer?: audio.AudioRenderer; /**如果需要调试,存储一份 pcm,可以把这里设置 true,拉文件出来,命令看官方文档 */ private readonly withWrite = false; private targetFile?: fileIo.File; private bufferSize = 0; /** RingBuffer 环缓冲区 */ private ringBuffer: AudioRingBuffer; constructor( context: Context, streamInfo: audio.AudioStreamInfo, renderInfo: audio.AudioRendererInfo, ) { this.ringBuffer = new AudioRingBuffer(streamInfo, 0.8, 0.2); const option: audio.AudioRendererOptions = { streamInfo: streamInfo, rendererInfo: renderInfo } LsLog.i(this.tag, `create by ${JSON.stringify(option)}`); if (this.withWrite) { try { this.targetFile = fileIo.openSync( context.cacheDir + `/renderer-test.pcm`, fileIo.OpenMode.READ_WRITE | fileIo.OpenMode.CREATE ) LsLog.i(this.tag, `open file with path: ${this.targetFile.path}`); } catch (e) { LsLog.e(this.tag, `open file failed! -> ${(e as BusinessError).code}:${(e as BusinessError).message}`); } } audio.createAudioRenderer( option, (error, renderer) => { if (error) { LsLog.e(this.tag, `create audio renderer failed! -> ${error.code}:${error.message}`); } else { LsLog.i(this.tag, \'create audio renderer success\'); this.audioRenderer = renderer; if (renderer) { if (this.withWrite) {  renderer.on(\'writeData\', (buffer) => { this.ringBuffer.outFlow(buffer); if (this.targetFile) {  const options: WriteOptions = {  offset: this.bufferSize,  length: buffer.byteLength,  }  renderer.setVolume(0.75);  fileIo.writeSync(this.targetFile.fd, buffer, options);  this.bufferSize += buffer.byteLength; } return audio.AudioDataCallbackResult.VALID;  }); } else {  renderer.on(\'writeData\', (buffer) => { this.ringBuffer.outFlow(buffer); return audio.AudioDataCallbackResult.VALID;  }); } } } } ); } /** 获取输入流入口 */ get inFlow(): AudioBufferFlow { return this.ringBuffer.inFlow; } /** 开始播放 */ start(): void { LsLog.i(this.tag, `do start, current state is [${this.audioRenderer?.state}]`); if (this.audioRenderer !== undefined) { let stateGroup = [audio.AudioState.STATE_PREPARED, audio.AudioState.STATE_PAUSED, audio.AudioState.STATE_STOPPED]; if (stateGroup.indexOf(this.audioRenderer.state.valueOf()) === -1) { // 当且仅当状态为prepared、paused和stopped之一时才能启动渲染。 LsLog.e(this.tag, \'start failed\'); return; } // 开始播放。 this.audioRenderer.start((err: BusinessError) => { if (err) { LsLog.e(this.tag, `Renderer start failed. -> [${err.code}]:${err.message}`); } else { LsLog.i(this.tag, \'Renderer start success.\'); } this.ringBuffer.start(); }); } } /** 停止播放 */ stop(): void { LsLog.i(this.tag, `do stop, current state is [${this.audioRenderer?.state}]`); if (this.audioRenderer !== undefined) { const notRunning = this.audioRenderer.state.valueOf() !== audio.AudioState.STATE_RUNNING; const notPaused = this.audioRenderer.state.valueOf() !== audio.AudioState.STATE_PAUSED; if (notRunning && notPaused) { // 只有渲染器状态为running或paused的时候才可以停止。 LsLog.i(this.tag, \'Renderer is not running or paused\'); return; } // 停止渲染。 this.audioRenderer.stop((err: BusinessError) => { if (err) { LsLog.e(this.tag, `Renderer stop failed. -> [${err.code}]:${err.message}`); } else { LsLog.i(this.tag, \'Renderer stop success.\'); } this.ringBuffer.reset(); }); } } /** 释放资源 */ release(): void { if (this.audioRenderer) { this.audioRenderer.release((err: BusinessError) => { if (err) { LsLog.w(this.tag, `release failed! -> ${err.code}: ${err.message}`); } else { LsLog.i(this.tag, \'release success.\') } }) this.audioRenderer = undefined; } this.ringBuffer.reset(); if (this.targetFile) { fileIo.close(this.targetFile.fd); this.targetFile = undefined; } }}export class AudioCaptureUtil { private readonly tag = \"AudioCaptureUtil\"; private audioCapturer?: audio.AudioCapturer; private waitStartTask?: () => void; private readonly withWrite = false; private targetFile?: fileIo.File; private bufferSize = 0; constructor(context: Context, options: audio.AudioCapturerOptions, flow: AudioBufferFlow) { let permissions: Array<Permissions> = [\'ohos.permission.MICROPHONE\']; let atManager = abilityAccessCtrl.createAtManager(); try { atManager.requestPermissionsFromUser(context, permissions, async (err: BusinessError, data: PermissionRequestResult) => { if (err) { LsLog.e(this.tag, `Request permission failed: ${err.message}`); } else if (data.authResults.includes(-1) || data.authResults.includes(2)) { LsLog.e(this.tag, \'User denied permission\'); } else { // 用户已授权,再调用 createAudioCapturer this.prepare(options, flow); } }); } catch (err) { LsLog.e(this.tag, `Request permission error: ${err.message}`); } if (this.withWrite) { try { this.targetFile = fileIo.openSync( context.cacheDir + `/capturer-test.pcm`, fileIo.OpenMode.READ_WRITE | fileIo.OpenMode.CREATE ) LsLog.i(this.tag, `open file with path: ${this.targetFile.path}`); } catch (e) { LsLog.e(this.tag, `open file failed! -> ${(e as BusinessError).code}:${(e as BusinessError).message}`); } } } private prepare(options: audio.AudioCapturerOptions, flow: AudioBufferFlow) { LsLog.i(this.tag, `create by ${JSON.stringify(options)}`); this.bufferSize = 0; audio.createAudioCapturer( options, (error, capture) => { if (error) { LsLog.e(this.tag, `create audio capture failed! -> ${error.code}:${error.message}`); } else { LsLog.i(this.tag, \'create audio capture success\'); this.audioCapturer = capture; if (capture) { if (this.withWrite) {  capture.on(\'readData\', (buffer) => { if (this.targetFile) {  const options: WriteOptions = {  offset: this.bufferSize,  length: buffer.byteLength,  }  fileIo.writeSync(this.targetFile.fd, buffer, options);  this.bufferSize += buffer.byteLength; } flow(buffer);  }); } else {  capture.on(\'readData\', flow); } if (this.waitStartTask) {  this.start(this.waitStartTask); } } } } ) } /** 开始录制 */ start(onStart: () => void): void { LsLog.i(this.tag, `do start, current state is [${this.audioCapturer?.state}]`); if (this.audioCapturer !== undefined) { this.waitStartTask = undefined; let stateGroup = [audio.AudioState.STATE_PREPARED, audio.AudioState.STATE_PAUSED, audio.AudioState.STATE_STOPPED]; if (stateGroup.indexOf(this.audioCapturer.state.valueOf()) === -1) { // 当且仅当状态为STATE_PREPARED、STATE_PAUSED和STATE_STOPPED之一时才能启动采集。 LsLog.e(this.tag, \'start failed\'); return; } // 启动采集。 this.audioCapturer.start((err: BusinessError) => { if (err) { LsLog.e(this.tag, `Capturer start failed. -> [${err.code}]:${err.message}`); } else { LsLog.i(this.tag, \'Capturer start success.\'); onStart(); } }); } else { this.waitStartTask = onStart; } } /** 停止录制 */ stop(): void { LsLog.i(this.tag, `do stop, current state is [${this.audioCapturer?.state}]`); this.waitStartTask = undefined; if (this.audioCapturer !== undefined) { // 只有采集器状态为STATE_RUNNING或STATE_PAUSED的时候才可以停止。 const notRunning = this.audioCapturer.state.valueOf() !== audio.AudioState.STATE_RUNNING; const notPaused = this.audioCapturer.state.valueOf() !== audio.AudioState.STATE_PAUSED; if (notRunning && notPaused) { LsLog.i(this.tag, \'Capturer is not running or paused\'); return; } //停止采集。 this.audioCapturer.stop((err: BusinessError) => { if (err) { LsLog.e(this.tag, `Capturer stop failed. -> [${err.code}]:${err.message}`); } else { LsLog.i(this.tag, \'Capturer stop success.\'); } }); } } /** 释放资源 */ release(): void { if (this.audioCapturer) { this.audioCapturer.release((err: BusinessError) => { if (err) { LsLog.w(this.tag, `release failed! -> ${err.code}: ${err.message}`); } else { LsLog.i(this.tag, \'release success.\') } }) this.audioCapturer = undefined; } this.waitStartTask = undefined; if (this.targetFile) { fileIo.close(this.targetFile.fd); this.targetFile = undefined; } }}
import { audio } from \'@kit.AudioKit\';const tag = \"AudioRingBuffer\";/** 音频buffer传递流 */export type AudioBufferFlow = (buffer: ArrayBuffer) => void;/** 向 buffer 视图写入 */type DataViewCopy = (from: DataView, to: DataView, fromOffset: number, toOffset: number) => void;/** 运行状态 */enum RunningState { /** 已停止 */ Stop = 0, /** 等待 buffer */ WaitingBuffer = 1, /** 正在运行 */ Running = 2,}enum StateIndex { RunningState = 0, ReadPos = 1, WritePos = 2,}/** 音频 buffer 环形缓冲器 */export class AudioRingBuffer { /** 缓冲区存储 */ private buffer: SharedArrayBuffer; /** 缓冲区视图(用于实际读写操作) */ private bufferView: DataView; /** dataViewCopy 数据移动 */ private dataViewCopy: DataViewCopy; /** 实际 DataView 可访问的范围 */ private readonly bufferSize: number; /** 状态、读写位置指针 */ private state = new Int32Array([RunningState.Stop, 0, 1]); /** 音频输入流:将外部数据写入环形缓冲区 */ readonly inFlow: AudioBufferFlow = (inBuffer) => { this.workInRunning(() => this.writeToBuffer(inBuffer)); }; /** 音频输出流:从环形缓冲区读取数据到外部 */ readonly outFlow: AudioBufferFlow = (outBuffer) => { this.workInRunning(() => this.readFromBuffer(outBuffer)); } /** 获取 DataView 视图的长度 */ private dataViewLen: (dataView: DataView) => number; /** Buffer 发声 threshold,buffer 到了此比例才会发声 */ private readonly readThreshold: number; /** * 构造音频环形缓冲区 * @param streamInfo 音频格式 * @param bufferDuration 缓冲时长(秒),建议0.1-1.0之间 * @param readThreshold 首帧读取阈值,增加这个值会增加延迟,降低有可能首帧断音 */ constructor(streamInfo: audio.AudioStreamInfo, bufferDuration: number = 0.5, readThreshold: number = 0.5) { if (bufferDuration <= 0 || bufferDuration > 1) { const def = 0.5; LsLog.w(tag, `unavalibale bufferDuration: ${bufferDuration}, use default => ${def}`); bufferDuration = def; } if (readThreshold <= 0 || readThreshold > 1) { const def = 0.5; LsLog.w(tag, `unavalibale readThreshold: ${readThreshold}, use default => ${def}`); readThreshold = def; } this.readThreshold = readThreshold; // 计算缓冲区大小:根据音频参数动态计算 // 每秒音频数据量 const bytesPerSample = this.calcBytesPerSample(streamInfo.sampleFormat); const bytesPerSecond = streamInfo.samplingRate * streamInfo.channels * bytesPerSample; let bufferSize = Math.ceil(bytesPerSecond * bufferDuration); // 缓冲时长对应的字节数 // 确保缓冲区大小至少为1024字节,避免过小导致频繁溢出 bufferSize = Math.max(bufferSize, 1024); // 初始化缓冲区 this.buffer = new SharedArrayBuffer(bufferSize); this.bufferView = new DataView(this.buffer); this.dataViewLen = (view) => Math.ceil(view.byteLength / bytesPerSample); this.bufferSize = this.dataViewLen(this.bufferView); // 初始化读取器、写入器、视图生成器 this.dataViewCopy = this.generateDataViewCopy(streamInfo.sampleFormat); LsLog.i(tag, `audio buffer init with ${bufferSize} bytes, duration: ${bufferDuration}s`); } /** 生成 buffer copy */ private generateDataViewCopy(format: audio.AudioSampleFormat): DataViewCopy { switch (format) { case audio.AudioSampleFormat.SAMPLE_FORMAT_U8: return (from, to, fromOffset, toOffset) => to.setUint8(toOffset, from.getUint8(fromOffset)); case audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE: return (from, to, fromOffset, toOffset) => to.setInt16(toOffset * 2, from.getInt16(fromOffset * 2, true), true); case audio.AudioSampleFormat.SAMPLE_FORMAT_S24LE: return (from, to, fromOffset, toOffset) => { const rawValue = from.getUint8(fromOffset * 4) | (from.getUint8(fromOffset * 4 + 1) << 8) | (from.getUint8(fromOffset * 4 + 2) << 16); // 处理符号扩展 const sign = rawValue & 0x800000 ? -1 : 1; const adjustedValue = sign * (rawValue & 0x7FFFFF); to.setInt32(toOffset * 4, adjustedValue, true); } case audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE: return (from, to, fromOffset, toOffset) => to.setInt32(toOffset * 4, from.getInt32(fromOffset * 4, true), true); default: return (from, to, fromOffset, toOffset) => to.setUint8(toOffset, from.getUint8(fromOffset)); } } /** 计算每个采样点的数据量 */ private calcBytesPerSample(format: audio.AudioSampleFormat): number { switch (format) { case audio.AudioSampleFormat.SAMPLE_FORMAT_U8: return 1; case audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE: return 2; case audio.AudioSampleFormat.SAMPLE_FORMAT_S24LE: return 4; case audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE: return 4; default: return 1; } } /** * 在运行状态下执行任务 * @param task 要执行的任务函数 */ private workInRunning(task: () => void) { try { if (Atomics.load(this.state, 0) !== RunningState.Stop) { task(); } } catch (err) { LsLog.e(tag, `任务执行错误: ${err}`); } } /** * 计算当前可用空间大小 * 实际可用空间 = 总容量 - 已使用空间 - 1(预留判断位) */ private getAvailableSpace(): number { return this.bufferSize - 1 - this.getUsedSpace(); } /** * 计算当前已使用空间大小 */ private getUsedSpace(): number { return (this.getState(StateIndex.WritePos) - this.getState(StateIndex.ReadPos) + this.bufferSize) % this.bufferSize; } /** * 将数据写入环形缓冲区 * @param inBuffer 输入数据缓冲区 */ private writeToBuffer(inBuffer: ArrayBuffer): void { const inputData = new DataView(inBuffer); const inputLength = this.dataViewLen(inputData); if (inputLength <= 0) { return; } // 获取可用空间并计算实际可写入长度 const availableSpace = this.getAvailableSpace(); if (inputLength > availableSpace) { LsLog.w(tag, `buffer fulled! has use ${this.getUsedSpace()}, available: ${availableSpace}`); return; } // 处理写入(分是否需要环绕两种情况) const writePos = this.getState(StateIndex.WritePos); const contiguousSpace = this.bufferSize - writePos; if (inputLength <= contiguousSpace) { // 无需环绕,直接写入 for (let i = 0; i < inputLength; i++) { this.dataViewCopy(inputData, this.bufferView, i, writePos + i); } this.setState(StateIndex.WritePos, (writePos + inputLength) % this.bufferSize); } else { // 需要环绕,分两部分写入 for (let i = 0; i < contiguousSpace; i++) { this.dataViewCopy(inputData, this.bufferView, i, writePos + i); } const remaining = inputLength - contiguousSpace; for (let i = 0; i < remaining; i++) { this.dataViewCopy(inputData, this.bufferView, contiguousSpace + i, i); } this.setState(StateIndex.WritePos, remaining); } } /** * 从环形缓冲区读取数据 * @param outBuffer 输出数据缓冲区 */ private readFromBuffer(outBuffer: ArrayBuffer): void { const outputData = new DataView(outBuffer); const outputLength = this.dataViewLen(outputData); if (outputLength <= 0) { return; } // 计算可读取数据量 const usedSpace = this.getUsedSpace(); if (this.getState(StateIndex.RunningState) === RunningState.WaitingBuffer) { if (usedSpace / this.bufferSize < this.readThreshold) { for (let i = 0; i < outputLength; i++) { outputData.setInt8(i, 0); } return; } } this.setState(StateIndex.RunningState, RunningState.Running); const readLength = Math.min(outputLength, usedSpace); // 处理读取(分是否需要环绕两种情况) const readPos = this.getState(StateIndex.ReadPos); const contiguousData = this.bufferSize - readPos; if (readLength <= contiguousData) { for (let i = 0; i < readLength; i++) { this.dataViewCopy(this.bufferView, outputData, readPos + i, i); } this.setState(StateIndex.ReadPos, (readPos + readLength) % this.bufferSize); } else { for (let i = 0; i < contiguousData; i++) { this.dataViewCopy(this.bufferView, outputData, readPos + i, i); } const remaining = readLength - contiguousData; for (let i = 0; i < remaining; i++) { this.dataViewCopy(this.bufferView, outputData, i, contiguousData + i); } this.setState(StateIndex.ReadPos, remaining); } if (readLength < outputLength) { LsLog.w(tag, `read ${outputLength}, but real avalible just ${readLength}, others fill with 0`); for (let i = readLength; i < outputLength; i++) { outputData.setInt8(i, 0); } } } private getState(index: StateIndex): number { return Atomics.load(this.state, index); } private setState(index: StateIndex, value: number) { Atomics.store(this.state, index, value); } /** * 开始流传输 */ start() { this.setState(StateIndex.RunningState, RunningState.WaitingBuffer); LsLog.i(tag, \"buffer start running\"); } /** * 重置流(清空缓冲区并重置指针) */ reset() { this.setState(StateIndex.RunningState, RunningState.Stop); this.setState(StateIndex.ReadPos, 0); this.setState(StateIndex.WritePos, 1); LsLog.i(tag, \"buffer has reset\"); }}

调用

1. 初始化
render = new AudioRenderUtil(context, streamInfo, render.renderInfo);recordFlow = this.render.inFlow;capture = new AudioCaptureUtil(context, { streamInfo: streamInfo, capturerInfo: capture.captureInfo}, recordFlow);
2. 开始
/** 开始 capture/render */private _startKaraoke() { this.capture?.start(() => { // 在录音成功启动后,才有必要开始播放 this.render?.start(); });}
3. 停止
/** 停止 capture/render */private _stopKaraoke() { this.capture?.stop(); this.render?.stop();}
4. 释放
onRelease(): void { this._stopKaraoke(); this.capture?.release(); this.capture = undefined; this.render?.release(); this.render = undefined;}