How to detect a blow in the microphone with Unity and IOS
I'm aware that there are several threads with the same problem, but i haven't been able to开发者_运维技巧 make their solution work. I ended up creating this class:
MicroController.h
#import Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreAudio/CoreAudioTypes.h>
#import <UIKit/UIKit.h>
@interface MicroController : UIView < UITextFieldDelegate, UIImagePickerControllerDelegate, UINavigationControllerDelegate, AVAudioSessionDelegate > {
AVAudioRecorder *recorder;
NSTimer *levelTimer;
double lowPassResults;
}
- (void)levelTimerCallback:(NSTimer *)timer;
@end
MicroController.mm
#import "MicroController.h"
@implementation MicroController
- (id)init
{
NSURL *url = [NSURL fileURLWithPath:@"/dev/null"];
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
[NSNumber numberWithInt: kAudioFormatAppleLossless], AVFormatIDKey,
[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
[NSNumber numberWithInt: AVAudioQualityMax], AVEncoderAudioQualityKey,
nil];
NSError *error;
recorder = [[AVAudioRecorder alloc] initWithURL:url settings:settings error:&error];
if (recorder) {
[recorder prepareToRecord];
recorder.meteringEnabled = YES;
if ([recorder prepareToRecord] == YES){
[recorder record];
}else {
int errorCode = CFSwapInt32HostToBig ([error code]);
NSLog(@"Error: %@ [%4.4s])" , [error localizedDescription], (char*)&errorCode);
}
levelTimer = [NSTimer scheduledTimerWithTimeInterval: 0.03 target: self selector: @selector(levelTimerCallback:) userInfo: nil repeats: YES];
} else
NSLog([error description]);
// input 'level' is in meter.mAveragePower
return self;
}
- (void)levelTimerCallback:(NSTimer *)timer {
[recorder updateMeters];
const double ALPHA = 1.0; // 0.05f
double peakPowerForChannel = pow(10, (0.05 * [recorder peakPowerForChannel:0]));
lowPassResults = ALPHA * peakPowerForChannel + (1.0 - ALPHA) * lowPassResults;
if (lowPassResults > 0.55)
NSLog(@"Mic blow detected");
[recorder updateMeters];
NSLog(@"Average input: %f Peak input: %f", [recorder averagePowerForChannel:0], [recorder peakPowerForChannel:0]);
}
- (void)viewDidLoad {
[super viewDidLoad];
NSLog(@"initiated");
NSURL *url = [NSURL fileURLWithPath:@"/dev/null"];
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
[NSNumber numberWithInt: kAudioFormatAppleLossless], AVFormatIDKey,
[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
[NSNumber numberWithInt: AVAudioQualityMax], AVEncoderAudioQualityKey,
nil];
NSError *error;
recorder = [[AVAudioRecorder alloc] initWithURL:url settings:settings error:&error];
if (recorder) {
[recorder prepareToRecord];
recorder.meteringEnabled = YES;
[recorder record];
levelTimer = [NSTimer scheduledTimerWithTimeInterval: 0.03 target: self selector: @selector(levelTimerCallback:) userInfo: nil repeats: YES];
} else
NSLog([error description]);
}
- (void)dealloc {
[levelTimer release];
[recorder release];
[super dealloc];
}
@end
But I can't get to make the microphone work. I'm getting only this output, and the microphone never responds:
Average input: -120.000000 Peak input: -120.000000
Any idea about what can be wrong ?
Thanks for your help !
Well, I didn't think important to mention that i was working with unity, and it was the culprit.
Unity modifies some settings, so it's important to do this when you load your viewcontroller:
[[AVAudioSession sharedInstance]
setCategory: AVAudioSessionCategoryPlayAndRecord
error: &setCategoryError];
if (setCategoryError) {
NSLog([setCategoryError description]);
}
Otherwise a perfectly written audiorecorder won't work correctly. And that was it..problem solved !
This bug was a PAIN to detect, I hope this message helps anyone else in the same predicament.
OK, here is the solve: file "MicController.m"
#import "MicController.h"
static MicController *sharedListener = nil;
@implementation MicController
+ (MicController *)sharedListener {
@synchronized(self) {
if (sharedListener == nil)
[[self alloc] init];
}
return sharedListener;
}
- (void)dealloc {
//[sharedListener stop];
//[levelTimer release];
[recorder release];
[super dealloc];
}
#pragma mark -
#pragma mark Listening
- (void)listen {
[[AVAudioSession sharedInstance]
setCategory: AVAudioSessionCategoryPlayAndRecord
error: nil];
NSURL *url = [NSURL fileURLWithPath:@"/dev/null"];
//kAudioFormatAppleIMA4
//kAudioFormatMPEG4AAC
/*
NSMutableDictionary *settings = [NSMutableDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
[NSNumber numberWithInt: kAudioFormatAppleLossless], AVFormatIDKey,
[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
[NSNumber numberWithInt: AVAudioQualityMax], AVEncoderAudioQualityKey,
nil];
*/
NSDictionary *settings = [[NSDictionary alloc] initWithObjectsAndKeys:
[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
[NSNumber numberWithInt: kAudioFormatAppleLossless], AVFormatIDKey,
[NSNumber numberWithInt: 2], AVNumberOfChannelsKey,
[NSNumber numberWithInt: AVAudioQualityMax], AVEncoderAudioQualityKey,
nil];
/*
[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
[NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithInt: 2], AVNumberOfChannelsKey,
[NSNumber numberWithInt: AVAudioQualityMax], AVEncoderAudioQualityKey,
[NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
nil];
*/
NSError *error;
recorder = [[AVAudioRecorder alloc] initWithURL:url settings:settings error:&error];
if (recorder) {
[recorder prepareToRecord];
recorder.meteringEnabled = YES;
[recorder record];
//levelTimer = [NSTimer scheduledTimerWithTimeInterval: 0.03 target: self selector: @selector(levelTimerCallback:) userInfo: nil repeats: YES];
} else
NSLog([error description]);
}
- (void)stop {
[recorder release];
}
/*
- (void)levelTimerCallback:(NSTimer *)timer {
[recorder updateMeters];
const double ALPHA = 0.05;
double peakPowerForChannel = pow(10, (0.05 * [recorder peakPowerForChannel:0]));
lowPassResults = ALPHA * peakPowerForChannel + (1.0 - ALPHA) * lowPassResults;
//if (lowPassResults < 0.95)
//NSLog(@"Mic blow detected");
NSLog(@"Average input: %f Peak input: %f Low pass results: %f", [recorder averagePowerForChannel:0], [recorder peakPowerForChannel:0], lowPassResults);
}
*/
#pragma mark -
#pragma mark Levels getters
- (Float32)averagePower {
[recorder updateMeters];
const double ALPHA = 0.7;
double peakPowerForChannel = pow(10, (0.05 * [recorder averagePowerForChannel:0]));
lowPassResults = ALPHA * peakPowerForChannel + (1.0 - ALPHA) * lowPassResults;
return [recorder averagePowerForChannel:0];
//return lowPassResults;
//NSLog(@"Average input: %f Peak input: %f Low pass results: %f", [recorder averagePowerForChannel:0], [recorder peakPowerForChannel:0], lowPassResults);
}
- (Float32)peakPower {
[recorder updateMeters];
const double ALPHA = 0.7;
double peakPowerForChannel = pow(10, (0.05 * [recorder peakPowerForChannel:0]));
lowPassResults = ALPHA * peakPowerForChannel + (1.0 - ALPHA) * lowPassResults;
return [recorder peakPowerForChannel:0];
//return lowPassResults;
//NSLog(@"Average input: %f Peak input: %f Low pass results: %f", [recorder averagePowerForChannel:0], [recorder peakPowerForChannel:0], lowPassResults);
}
#pragma mark -
#pragma mark Singleton Pattern
+ (id)allocWithZone:(NSZone *)zone {
@synchronized(self) {
if (sharedListener == nil) {
sharedListener = [super allocWithZone:zone];
return sharedListener;
}
}
return nil;
}
- (id)copyWithZone:(NSZone *)zone {
return self;
}
- (id)init {
if ([super init] == nil)
return nil;
return self;
}
- (id)retain {
return self;
}
- (unsigned)retainCount {
return UINT_MAX;
}
- (void)release {
// Do nothing.
}
- (id)autorelease {
return self;
}
@end
Just add the code
[[AVAudioSession sharedInstance]
setCategory: AVAudioSessionCategoryPlayAndRecord
error: nil];
above de void "- (void)listen {"
Thanks. Bye
I have the same problem, in the case I use a Titanium appcelerator. In the simulator the class work correctly, but in the device not, Average input = -120 Here is the code:
//
// MicController.h
// Mic
//
// Created by DekWilde on 10/26/11.
// Copyright 2011 DekWilde. All rights reserved.
//
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreAudio/CoreAudioTypes.h>
@interface MicController : NSObject {
AVAudioRecorder *recorder;
//NSTimer *levelTimer;
double lowPassResults;
}
+ (MicController *)sharedListener;
- (void)listen;
- (void)stop;
//- (void)levelTimerCallback:(NSTimer *)timer;
- (Float32)averagePower;
- (Float32)peakPower;
@end
and the MicController.m
#import "MicController.h"
static MicController *sharedListener = nil;
@implementation MicController
+ (MicController *)sharedListener {
@synchronized(self) {
if (sharedListener == nil)
[[self alloc] init];
}
return sharedListener;
}
- (void)dealloc {
//[sharedListener stop];
//[levelTimer release];
[recorder release];
[super dealloc];
}
#pragma mark -
#pragma mark Listening
- (void)listen {
NSURL *url = [NSURL fileURLWithPath:@"/dev/null"];
//kAudioFormatAppleIMA4
//kAudioFormatMPEG4AAC
/*
NSMutableDictionary *settings = [NSMutableDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
[NSNumber numberWithInt: kAudioFormatAppleLossless], AVFormatIDKey,
[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
[NSNumber numberWithInt: AVAudioQualityMax], AVEncoderAudioQualityKey,
nil];
*/
NSDictionary *settings = [[NSDictionary alloc] initWithObjectsAndKeys:
[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
[NSNumber numberWithInt: kAudioFormatAppleLossless], AVFormatIDKey,
[NSNumber numberWithInt: 2], AVNumberOfChannelsKey,
[NSNumber numberWithInt: AVAudioQualityMax], AVEncoderAudioQualityKey,
nil];
/*
[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
[NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithInt: 2], AVNumberOfChannelsKey,
[NSNumber numberWithInt: AVAudioQualityMax], AVEncoderAudioQualityKey,
[NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
nil];
*/
NSError *error;
recorder = [[AVAudioRecorder alloc] initWithURL:url settings:settings error:&error];
if (recorder) {
[recorder prepareToRecord];
recorder.meteringEnabled = YES;
[recorder record];
//levelTimer = [NSTimer scheduledTimerWithTimeInterval: 0.03 target: self selector: @selector(levelTimerCallback:) userInfo: nil repeats: YES];
} else
NSLog([error description]);
}
- (void)stop {
[recorder release];
}
/*
- (void)levelTimerCallback:(NSTimer *)timer {
[recorder updateMeters];
const double ALPHA = 0.05;
double peakPowerForChannel = pow(10, (0.05 * [recorder peakPowerForChannel:0]));
lowPassResults = ALPHA * peakPowerForChannel + (1.0 - ALPHA) * lowPassResults;
//if (lowPassResults < 0.95)
//NSLog(@"Mic blow detected");
NSLog(@"Average input: %f Peak input: %f Low pass results: %f", [recorder averagePowerForChannel:0], [recorder peakPowerForChannel:0], lowPassResults);
}
*/
#pragma mark -
#pragma mark Levels getters
- (Float32)averagePower {
[recorder updateMeters];
const double ALPHA = 0.7;
double peakPowerForChannel = pow(10, (0.05 * [recorder averagePowerForChannel:0]));
lowPassResults = ALPHA * peakPowerForChannel + (1.0 - ALPHA) * lowPassResults;
return [recorder averagePowerForChannel:0];
//return lowPassResults;
//NSLog(@"Average input: %f Peak input: %f Low pass results: %f", [recorder averagePowerForChannel:0], [recorder peakPowerForChannel:0], lowPassResults);
}
- (Float32)peakPower {
[recorder updateMeters];
const double ALPHA = 0.7;
double peakPowerForChannel = pow(10, (0.05 * [recorder peakPowerForChannel:0]));
lowPassResults = ALPHA * peakPowerForChannel + (1.0 - ALPHA) * lowPassResults;
return [recorder peakPowerForChannel:0];
//return lowPassResults;
//NSLog(@"Average input: %f Peak input: %f Low pass results: %f", [recorder averagePowerForChannel:0], [recorder peakPowerForChannel:0], lowPassResults);
}
#pragma mark -
#pragma mark Singleton Pattern
+ (id)allocWithZone:(NSZone *)zone {
@synchronized(self) {
if (sharedListener == nil) {
sharedListener = [super allocWithZone:zone];
return sharedListener;
}
}
return nil;
}
- (id)copyWithZone:(NSZone *)zone {
return self;
}
- (id)init {
if ([super init] == nil)
return nil;
return self;
}
- (id)retain {
return self;
}
- (unsigned)retainCount {
return UINT_MAX;
}
//- (void)release {
// Do nothing.
//}
- (id)autorelease {
return self;
}
@end
My question is: where i need to put this code that you has posted about the solution of this problem. . . How I can solve this problem?
Now that Unity 3.5 has added support for Microphone input I suggest that you do it in Unity so that you don't have to maintain the Cocoa code and make it easy to port this to Android/Web/Native in the future.
There are already some useful posts about this:
http://forum.unity3d.com/threads/123036-iOS-Microphone-input
http://forum.unity3d.com/threads/118215-Blow-detection-(Using-iOS-Microphone)
精彩评论