I have AVPlayerone that plays audio from mono AVAsset, doing some processing using an audio processor along the way. How can I convert this asset to stereo before reaching the tap? The second channel can be empty or a copy of the first channel (it will be filled manually in the tap).
I tried converting a monaural stereo signal to a crane, but apparently we do not control the ASBD or AudioBufferList structure when we are inside the crane. I also did a standalone conversion, but this creates big hurdles (it can be quite slow, not suitable for web streaming).
Here is the barebone code (but complete) that you can use with any monophonic file. You will see that by the time it hits the processor, press only one channel instead of the desired two channels. To use the code, you just need to add classes MediaPlayerand TapProcessorlower in empty application with one view, use this code instead of ViewControllerthe following code ViewController, and add your own mono file your project. Thanks for reading.
mediaplayer.h
#import <Foundation/Foundation.h>
@interface MediaPlayer : NSObject
@end
MediaPlayer.m
#import "MediaPlayer.h"
#import "TapProcessor.h"
#import <AVFoundation/AVFoundation.h>
@interface MediaPlayer()
@property (nonatomic, strong) AVAsset *asset;
@property (nonatomic, strong) AVPlayer *player;
@property (nonatomic, strong) TapProcessor *audioTapProcessor;
@end
@implementation MediaPlayer
- (id)init {
if (self = [super init]){
NSString *path = [[NSBundle mainBundle] pathForResource:@"MonoSource"
ofType:@"mp3"];
[self loadFileWithPath:path];
}
return self;
}
-(void)loadFileWithPath:(NSString*)path{
NSURL *fileURL = [NSURL fileURLWithPath:path];
NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES]
forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
self.asset = [AVURLAsset URLAssetWithURL:fileURL options:options];
[self.asset loadValuesAsynchronouslyForKeys:@[@"tracks"] completionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
AVKeyValueStatus status = [self.asset statusOfValueForKey:@"tracks" error:nil];
switch (status) {
case AVKeyValueStatusLoaded:
[self setupPlayer];
break;
default:
break;
}
});
}];
}
- (void) setupPlayer{
AVPlayerItem *item = [AVPlayerItem playerItemWithAsset:self.asset];
AVAssetTrack *audioTrack = [[self.asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[self printInfoForTrack:audioTrack];
TapProcessor *newProcessor = [[TapProcessor alloc] initWithTrack:audioTrack];
AVAudioMix *audioMix = [newProcessor audioMix];
item.audioMix = audioMix;
self.player = [AVPlayer playerWithPlayerItem:item];
[self.player play];
}
-(void) printInfoForTrack:(AVAssetTrack*)track{
CMAudioFormatDescriptionRef item = (__bridge CMAudioFormatDescriptionRef)[track.formatDescriptions objectAtIndex:0];
const AudioStreamBasicDescription* desc = CMAudioFormatDescriptionGetStreamBasicDescription(item);
NSLog(@"Number of track channels: %d", desc->mChannelsPerFrame);
}
@end
Tapprocessor.h
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
@interface TapProcessor : NSObject
- (id)initWithTrack:(AVAssetTrack *)track;
@property (readonly, nonatomic) AVAssetTrack *track;
@property (readonly, nonatomic) AVAudioMix *audioMix;
@end
TapProcessor.m
#import "TapProcessor.h"
static void tap_InitCallback(MTAudioProcessingTapRef tap,
void *clientInfo,
void **tapStorageOut){
}
static void tap_FinalizeCallback(MTAudioProcessingTapRef tap){
}
static void tap_PrepareCallback(MTAudioProcessingTapRef tap,
CMItemCount maxFrames,
const AudioStreamBasicDescription *processingFormat){
NSLog(@"Number of tap channels: %d", processingFormat->mChannelsPerFrame);
}
static void tap_UnprepareCallback(MTAudioProcessingTapRef tap){
}
static void tap_ProcessCallback(MTAudioProcessingTapRef tap,
CMItemCount numberFrames,
MTAudioProcessingTapFlags flags,
AudioBufferList *bufferListInOut,
CMItemCount *numberFramesOut,
MTAudioProcessingTapFlags *flagsOut){
MTAudioProcessingTapGetSourceAudio(tap, numberFrames, bufferListInOut, NULL, NULL, NULL);
*numberFramesOut = numberFrames;
}
@implementation TapProcessor
- (id)initWithTrack:(AVAssetTrack *)track{
self = [super init];
if (self){
_track = track;
}
return self;
}
@synthesize audioMix = _audioMix;
- (AVAudioMix *)audioMix {
if (!_audioMix){
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
if (audioMix){
AVMutableAudioMixInputParameters *audioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:self.track];
if (audioMixInputParameters) {
MTAudioProcessingTapCallbacks callbacks;
callbacks.version = kMTAudioProcessingTapCallbacksVersion_0;
callbacks.clientInfo = (__bridge void *)self,
callbacks.init = tap_InitCallback;
callbacks.finalize = tap_FinalizeCallback;
callbacks.prepare = tap_PrepareCallback;
callbacks.unprepare = tap_UnprepareCallback;
callbacks.process = tap_ProcessCallback;
MTAudioProcessingTapRef audioProcessingTap;
if (noErr == MTAudioProcessingTapCreate(kCFAllocatorDefault,
&callbacks,
kMTAudioProcessingTapCreationFlag_PreEffects,
&audioProcessingTap)){
audioMixInputParameters.audioTapProcessor = audioProcessingTap;
CFRelease(audioProcessingTap);
audioMix.inputParameters = @[audioMixInputParameters];
_audioMix = audioMix;
}
}
}
}
return _audioMix;
}
@end
ViewController.h
#import <UIKit/UIKit.h>
@interface ViewController : UIViewController
@end
ViewController.m
#import "ViewController.h"
#import "MediaPlayer.h"
@interface ViewController ()
@property (nonatomic,strong) MediaPlayer *mediaPlayer;
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.mediaPlayer = [[MediaPlayer alloc] init];
}
@end