ios – 核心音频离线渲染GenericOutput

ios – 核心音频离线渲染GenericOutput,第1张

概述任何人使用core-Audio成功完成脱机渲染. 我必须混合两个音频文件并应用混响(使用2 AudioFilePlayer,MultiChannelMixer,Reverb2和RemoteIO). 得到它的工作我可以保存它,而它的预览(在renderCallBack的RemoteIO). 我需要保存它而不玩它(离线). 提前致谢. 离线渲染使用GenericOutput AudioUnit为我工作 任何人使用core-Audio成功完成脱机渲染.

我必须混合两个音频文件并应用混响(使用2 AudiofilePlayer,MultiChannelmixer,Reverb2和RemoteIO).
得到它的工作我可以保存它,而它的预览(在renderCallBack的RemoteIO).

我需要保存它而不玩它(离线).
提前致谢.

解决方法 离线渲染使用GenericOutput AudioUnit为我工作.
我在这里分享工作代码.
核心音频框架似乎有一点.但是像ASBD这样的小事情,参数…等正在产生这些问题.尝试努力,它会工作.不要放弃:-).核心音频在处理低级音频时非常强大和有用.这是我从最近几周学到的东西.享受:-D ….

在.h中声明这些

//AUGraphAUGraph mGraph;//Audio Unit ReferencesAudioUnit mfilePlayer;AudioUnit mfilePlayer2;AudioUnit mReverb;AudioUnit mTone;AudioUnit mmixer;AudioUnit mGIO;//Audio file LocationAudiofileID inputfile;AudiofileID inputfile2;//Audio file refereces for savingExtAudiofileRef extAudiofile;//Standard sample ratefloat64 graphSampleRate;AudioStreamBasicDescription stereoStreamFormat864;float64 MaxSampleTime;

// in .m class

- (ID) init{    self = [super init];    graphSampleRate = 44100.0;    MaxSampleTime   = 0.0;    UInt32 category = kAudioSessioncategory_mediaplayback;    Checkerror(AudioSessionSetProperty(kAudioSessionProperty_Audiocategory,sizeof(category),&category),"Couldn't set category on audio session");    [self initializeAUGraph];    return self;}

// ASBD设置

- (voID) setupStereoStream864 {        // The AudioUnitSampleType data type is the recommended type for sample data in audio    // units. This obtains the byte size of the type for use in filling in the ASBD.    size_t bytesPerSample = sizeof (AudioUnitSampleType);    // Fill the application audio format struct's fIElds to define a linear PCM,// stereo,noninterleaved stream at the harDWare sample rate.    stereoStreamFormat864.mFormatID          = kAudioFormatlinearPCM;    stereoStreamFormat864.mFormatFlags       = kAudioFormatFlagsAudioUnitCanonical;    stereoStreamFormat864.mBytesPerPacket    = bytesPerSample;    stereoStreamFormat864.mFramesPerPacket   = 1;    stereoStreamFormat864.mBytesPerFrame     = bytesPerSample;    stereoStreamFormat864.mChannelsPerFrame  = 2; // 2 indicates stereo    stereoStreamFormat864.mBitsPerChannel    = 8 * bytesPerSample;    stereoStreamFormat864.mSampleRate        = graphSampleRate;}

// AUGraph设置

- (voID)initializeAUGraph{    [self setupStereoStream864];    // Setup the AUGraph,add AUNodes,and make connections// create a new AUGraphCheckerror(NewAUGraph(&mGraph),"Couldn't create new graph");// AUNodes represent AudioUnits on the AUGraph and provIDe an// easy means for connecting audioUnits together.    AUNode filePlayerNode;    AUNode filePlayerNode2;AUNode mixerNode;AUNode reverbNode;AUNode toneNode;AUNode gOutputNode;// file player component    AudioComponentDescription filePlayer_desc;filePlayer_desc.componentType = kAudioUnitType_Generator;filePlayer_desc.componentSubType = kAudioUnitSubType_AudiofilePlayer;filePlayer_desc.componentFlags = 0;filePlayer_desc.componentFlagsMask = 0;filePlayer_desc.componentManufacturer = kAudioUnitManufacturer_Apple;// file player component2    AudioComponentDescription filePlayer2_desc;filePlayer2_desc.componentType = kAudioUnitType_Generator;filePlayer2_desc.componentSubType = kAudioUnitSubType_AudiofilePlayer;filePlayer2_desc.componentFlags = 0;filePlayer2_desc.componentFlagsMask = 0;filePlayer2_desc.componentManufacturer = kAudioUnitManufacturer_Apple;// Create AudioComponentDescriptions for the AUs we want in the graph// mixer componentAudioComponentDescription mixer_desc;mixer_desc.componentType = kAudioUnitType_mixer;mixer_desc.componentSubType = kAudioUnitSubType_MultiChannelmixer;mixer_desc.componentFlags = 0;mixer_desc.componentFlagsMask = 0;mixer_desc.componentManufacturer = kAudioUnitManufacturer_Apple;// Create AudioComponentDescriptions for the AUs we want in the graph// Reverb componentAudioComponentDescription reverb_desc;reverb_desc.componentType = kAudioUnitType_Effect;reverb_desc.componentSubType = kAudioUnitSubType_Reverb2;reverb_desc.componentFlags = 0;reverb_desc.componentFlagsMask = 0;reverb_desc.componentManufacturer = kAudioUnitManufacturer_Apple;//tone component    AudioComponentDescription tone_desc;tone_desc.componentType = kAudioUnitType_FormatConverter;//tone_desc.componentSubType = kAudioUnitSubType_NewTimePitch;    tone_desc.componentSubType = kAudioUnitSubType_Varispeed;tone_desc.componentFlags = 0;tone_desc.componentFlagsMask = 0;tone_desc.componentManufacturer = kAudioUnitManufacturer_Apple;    AudioComponentDescription gOutput_desc;gOutput_desc.componentType = kAudioUnitType_Output;gOutput_desc.componentSubType = kAudioUnitSubType_GenericOutput;gOutput_desc.componentFlags = 0;gOutput_desc.componentFlagsMask = 0;gOutput_desc.componentManufacturer = kAudioUnitManufacturer_Apple;//Add nodes to graph// Add nodes to the graph to hold our AudioUnits,// You pass in a reference to the  AudioComponentDescription// and get back an  AudioUnit    AUGraphAddNode(mGraph,&filePlayer_desc,&filePlayerNode );    AUGraphAddNode(mGraph,&filePlayer2_desc,&filePlayerNode2 );    AUGraphAddNode(mGraph,&mixer_desc,&mixerNode );    AUGraphAddNode(mGraph,&reverb_desc,&reverbNode );    AUGraphAddNode(mGraph,&tone_desc,&toneNode );AUGraphAddNode(mGraph,&gOutput_desc,&gOutputNode);//Open the graph early,initialize late// open the graph AudioUnits are open but not initialized (no resource allocation occurs here)Checkerror(AUGraphOpen(mGraph),"Couldn't Open the graph");//Reference to Nodes// get the reference to the AudioUnit object for the file player graph nodeAUGraphNodeInfo(mGraph,filePlayerNode,NulL,&mfilePlayer);AUGraphNodeInfo(mGraph,filePlayerNode2,&mfilePlayer2);    AUGraphNodeInfo(mGraph,reverbNode,&mReverb);    AUGraphNodeInfo(mGraph,toneNode,&mTone);    AUGraphNodeInfo(mGraph,mixerNode,&mmixer);AUGraphNodeInfo(mGraph,gOutputNode,&mGIO);    AUGraphConnectNodeinput(mGraph,0);    AUGraphConnectNodeinput(mGraph,1);AUGraphConnectNodeinput(mGraph,0);    UInt32 busCount   = 2;    // bus count for mixer unit input//Setup mixer unit bus count    Checkerror(AudioUnitSetProperty (                                 mmixer,kAudioUnitProperty_ElementCount,kAudioUnitScope_input,&busCount,sizeof (busCount)                                 ),"Couldn't set mixer unit's bus count");//Enable metering mode to vIEw levels input and output levels of mixer    UInt32 onValue = 1;    Checkerror(AudioUnitSetProperty(mmixer,kAudioUnitProperty_MeteringMode,&onValue,sizeof(onValue)),"error");// Increase the maximum frames per slice allows the mixer unit to accommodate the//    larger slice size used when the screen is locked.    UInt32 maximumFramesPerSlice = 4096;    Checkerror(AudioUnitSetProperty (                                 mmixer,kAudioUnitProperty_MaximumFramesPerSlice,kAudioUnitScope_Global,&maximumFramesPerSlice,sizeof (maximumFramesPerSlice)                                 ),"Couldn't set mixer units maximum framers per slice");// set the audio data format of tone Unit    AudioUnitSetProperty(mTone,kAudioUnitProperty_StreamFormat,&stereoStreamFormat864,sizeof(AudioStreamBasicDescription));// set the audio data format of reverb Unit    AudioUnitSetProperty(mReverb,sizeof(AudioStreamBasicDescription));// set initial reverb    AudioUnitParameterValue reverbTime = 2.5;    AudioUnitSetParameter(mReverb,4,reverbTime,0);    AudioUnitSetParameter(mReverb,5,0);    AudioStreamBasicDescription     auEffectStreamFormat;    UInt32 asbdSize = sizeof (auEffectStreamFormat);memset (&auEffectStreamFormat,sizeof (auEffectStreamFormat ));// get the audio data format from reverbCheckerror(AudioUnitGetProperty(mReverb,&auEffectStreamFormat,&asbdSize),"Couldn't get aueffectunit ASBD");    auEffectStreamFormat.mSampleRate = graphSampleRate;// set the audio data format of mixer Unit    Checkerror(AudioUnitSetProperty(mmixer,kAudioUnitScope_Output,sizeof(auEffectStreamFormat)),"Couldn't set ASBD on mixer output");Checkerror(AUGraphInitialize(mGraph),"Couldn't Initialize the graph");    [self setUpAUfilePlayer];    [self setUpAUfilePlayer2];  }

//音频文件播放设置在这里我正在设置语音文件

-(Osstatus) setUpAUfilePlayer{Nsstring *songPath = [[NSBundle mainBundle] pathForResource: @"testVoice" ofType:@".m4a"];CFURLRef songURL = ( CFURLRef) [NSURL fileURLWithPath:songPath];// open the input audio fileCheckerror(AudiofileOpenURL(songURL,kAudiofileReadPermission,&inputfile),"setUpAUfilePlayer AudiofileOpenURL Failed");AudioStreamBasicDescription fileASBD;// get the audio data format from the fileUInt32 propSize = sizeof(fileASBD);Checkerror(AudiofileGetProperty(inputfile,kAudiofilePropertyDataFormat,&propSize,&fileASBD),"setUpAUfilePlayer Couldn't get file's data format");// tell the file player unit to load the file we want to playCheckerror(AudioUnitSetProperty(mfilePlayer,kAudioUnitProperty_ScheduledfileIDs,&inputfile,sizeof(inputfile)),"setUpAUfilePlayer AudioUnitSetProperty[kAudioUnitProperty_ScheduledfileIDs] Failed");UInt64 nPackets;UInt32 propsize = sizeof(nPackets);Checkerror(AudiofileGetProperty(inputfile,kAudiofilePropertyAudioDataPacketCount,&propsize,&nPackets),"setUpAUfilePlayer AudiofileGetProperty[kAudiofilePropertyAudioDataPacketCount] Failed");// tell the file player AU to play the entire fileScheduledAudiofileRegion rgn;memset (&rgn.mTimeStamp,sizeof(rgn.mTimeStamp));rgn.mTimeStamp.mFlags = kAudioTimeStampSampleTimeValID;rgn.mTimeStamp.mSampleTime = 0;rgn.mCompletionProc = NulL;rgn.mCompletionProcUserData = NulL;rgn.mAudiofile = inputfile;rgn.mLoopCount = -1;rgn.mStartFrame = 0;rgn.mFramestoplay = nPackets * fileASBD.mFramesPerPacket;if (MaxSampleTime < rgn.mFramestoplay){    MaxSampleTime = rgn.mFramestoplay;}Checkerror(AudioUnitSetProperty(mfilePlayer,kAudioUnitProperty_ScheduledfileRegion,&rgn,sizeof(rgn)),"setUpAUfilePlayer1 AudioUnitSetProperty[kAudioUnitProperty_ScheduledfileRegion] Failed");// prime the file player AU with default valuesUInt32 defaultVal = 0;Checkerror(AudioUnitSetProperty(mfilePlayer,kAudioUnitProperty_ScheduledfilePrime,&defaultVal,sizeof(defaultVal)),"setUpAUfilePlayer AudioUnitSetProperty[kAudioUnitProperty_ScheduledfilePrime] Failed");// tell the file player AU when to start playing (-1 sample time means next render cycle)AudioTimeStamp startTime;memset (&startTime,sizeof(startTime));startTime.mFlags = kAudioTimeStampSampleTimeValID;startTime.mSampleTime = -1;Checkerror(AudioUnitSetProperty(mfilePlayer,kAudioUnitProperty_ScheduleStartTimeStamp,&startTime,sizeof(startTime)),"setUpAUfilePlayer AudioUnitSetProperty[kAudioUnitProperty_ScheduleStartTimeStamp]");return noErr;  }

//音频文件播放设置在这里我正在设置BGMusic文件

-(Osstatus) setUpAUfilePlayer2{Nsstring *songPath = [[NSBundle mainBundle] pathForResource: @"BGmusic" ofType:@".mp3"];CFURLRef songURL = ( CFURLRef) [NSURL fileURLWithPath:songPath];// open the input audio fileCheckerror(AudiofileOpenURL(songURL,&inputfile2),"setUpAUfilePlayer2 AudiofileOpenURL Failed");AudioStreamBasicDescription fileASBD;// get the audio data format from the fileUInt32 propSize = sizeof(fileASBD);Checkerror(AudiofileGetProperty(inputfile2,"setUpAUfilePlayer2 Couldn't get file's data format");// tell the file player unit to load the file we want to playCheckerror(AudioUnitSetProperty(mfilePlayer2,&inputfile2,sizeof(inputfile2)),"setUpAUfilePlayer2 AudioUnitSetProperty[kAudioUnitProperty_ScheduledfileIDs] Failed");UInt64 nPackets;UInt32 propsize = sizeof(nPackets);Checkerror(AudiofileGetProperty(inputfile2,"setUpAUfilePlayer2 AudiofileGetProperty[kAudiofilePropertyAudioDataPacketCount] Failed");// tell the file player AU to play the entire fileScheduledAudiofileRegion rgn;memset (&rgn.mTimeStamp,sizeof(rgn.mTimeStamp));rgn.mTimeStamp.mFlags = kAudioTimeStampSampleTimeValID;rgn.mTimeStamp.mSampleTime = 0;rgn.mCompletionProc = NulL;rgn.mCompletionProcUserData = NulL;rgn.mAudiofile = inputfile2;rgn.mLoopCount = -1;rgn.mStartFrame = 0;rgn.mFramestoplay = nPackets * fileASBD.mFramesPerPacket;if (MaxSampleTime < rgn.mFramestoplay){    MaxSampleTime = rgn.mFramestoplay;}Checkerror(AudioUnitSetProperty(mfilePlayer2,"setUpAUfilePlayer2 AudioUnitSetProperty[kAudioUnitProperty_ScheduledfileRegion] Failed");// prime the file player AU with default valuesUInt32 defaultVal = 0;Checkerror(AudioUnitSetProperty(mfilePlayer2,"setUpAUfilePlayer2 AudioUnitSetProperty[kAudioUnitProperty_ScheduledfilePrime] Failed");// tell the file player AU when to start playing (-1 sample time means next render cycle)AudioTimeStamp startTime;memset (&startTime,sizeof(startTime));startTime.mFlags = kAudioTimeStampSampleTimeValID;startTime.mSampleTime = -1;Checkerror(AudioUnitSetProperty(mfilePlayer2,"setUpAUfilePlayer2 AudioUnitSetProperty[kAudioUnitProperty_ScheduleStartTimeStamp]");return noErr;  }

//开始保存文件

- (voID)startRecordingAAC{AudioStreamBasicDescription destinationFormat;memset(&destinationFormat,sizeof(destinationFormat));destinationFormat.mChannelsPerFrame = 2;destinationFormat.mFormatID = kAudioFormatMPEG4AAC;UInt32 size = sizeof(destinationFormat);Osstatus result = AudioFormatGetProperty(kAudioFormatProperty_FormatInfo,&size,&destinationFormat);if(result) printf("AudioFormatGetProperty %ld \n",result);NSArray  *paths = NSSearchPathForDirectorIEsInDomains(NSdocumentDirectory,NSUserDomainMask,YES);Nsstring *documentsDirectory = [paths objectAtIndex:0];Nsstring *destinationfilePath = [[Nsstring alloc] initWithFormat: @"%@/output.m4a",documentsDirectory];CFURLRef destinationURL = CFURLCreateWithfileSystemPath(kcfAllocatorDefault,(CFStringRef)destinationfilePath,kcfURLPOSIXPathStyle,false);[destinationfilePath release];// specify codec Saving the output in .m4a formatresult = ExtAudiofileCreateWithURL(destinationURL,kAudiofileM4AType,&destinationFormat,kAudiofileFlags_Erasefile,&extAudiofile);if(result) printf("ExtAudiofileCreateWithURL %ld \n",result);CFRelease(destinationURL);// This is a very important part and easIEst way to set the ASBD for the file with correct format.AudioStreamBasicDescription clIEntFormat;UInt32 fSize = sizeof (clIEntFormat);memset(&clIEntFormat,sizeof(clIEntFormat));// get the audio data format from the Output UnitCheckerror(AudioUnitGetProperty(mGIO,&clIEntFormat,&fSize),"AudioUnitGetProperty on Failed");// set the audio data format of mixer UnitCheckerror(ExtAudiofileSetProperty(extAudiofile,kExtAudiofileProperty_ClIEntDataFormat,sizeof(clIEntFormat),&clIEntFormat),"ExtAudiofileSetProperty kExtAudiofileProperty_ClIEntDataFormat Failed");// specify codecUInt32 codec = kAppleHarDWareAudioCodecManufacturer;Checkerror(ExtAudiofileSetProperty(extAudiofile,kExtAudiofileProperty_CodecManufacturer,sizeof(codec),&codec),"ExtAudiofileSetProperty on extAudiofile Faild");Checkerror(ExtAudiofileWriteAsync(extAudiofile,NulL),"ExtAudiofileWriteAsync Failed");[self pullGenericOutput];}

//从GenericOutput节点手动输入和获取数据/缓冲区.

-(voID)pullGenericOutput{AudioUnitRenderActionFlags flags = 0;AudioTimeStamp inTimeStamp;memset(&inTimeStamp,sizeof(AudioTimeStamp));inTimeStamp.mFlags = kAudioTimeStampSampleTimeValID;UInt32 busNumber = 0;UInt32 numberFrames = 512;inTimeStamp.mSampleTime = 0;int channelCount = 2;NSLog(@"Final numberFrames :%li",numberFrames);int totFrms = MaxSampleTime;while (totFrms > 0){    if (totFrms < numberFrames)    {        numberFrames = totFrms;        NSLog(@"Final numberFrames :%li",numberFrames);    }    else    {        totFrms -= numberFrames;    }    audiobufferlist *bufferList = (audiobufferlist*)malloc(sizeof(audiobufferlist)+sizeof(AudioBuffer)*(channelCount-1));    bufferList->mNumberBuffers = channelCount;    for (int j=0; j<channelCount; j++)    {        AudioBuffer buffer = {0};        buffer.mNumberChannels = 1;        buffer.mDataByteSize = numberFrames*sizeof(AudioUnitSampleType);        buffer.mData = calloc(numberFrames,sizeof(AudioUnitSampleType));        bufferList->mBuffers[j] = buffer;    }    Checkerror(AudioUnitRender(mGIO,&flags,&inTimeStamp,busNumber,numberFrames,bufferList),"AudioUnitRender mGIO");    Checkerror(ExtAudiofileWrite(extAudiofile,("extaudiofilewrite fail"));}[self filesSavingCompleted];}

// filesSavingCompleted

-(voID)filesSavingCompleted{Osstatus status = ExtAudiofiledispose(extAudiofile);printf("Osstatus(ExtAudiofiledispose): %ld\n",status);}
总结

以上是内存溢出为你收集整理的ios – 核心音频离线渲染GenericOutput全部内容,希望文章能够帮你解决ios – 核心音频离线渲染GenericOutput所遇到的程序开发问题。

如果觉得内存溢出网站内容还不错,欢迎将内存溢出网站推荐给程序员好友。

欢迎分享,转载请注明来源:内存溢出

原文地址:https://www.54852.com/web/1097034.html

(0)
打赏 微信扫一扫微信扫一扫 支付宝扫一扫支付宝扫一扫
上一篇 2022-05-28
下一篇2022-05-28

发表评论

登录后才能评论

评论列表(0条)

    保存