Correctly Stoping AVAudioPlayer. Currently runs in Background - objective-c

I've been strugling with this problem for quite a while.
I'm trying to make a player that contains a playlist and plays only the music files stored inside the application.
Now, i'm using AVAudioPlayer to play the mp3 files. My problem is that when i load the playlist i can't stop the previous mp3 from playing and therefor i get a mixture of sounds between the first audio and all the other audio files that i load from the playlist.
so, here is my code:
#import "MainViewController.h"
#import <Foundation/Foundation.h>
#pragma mark Audio session callbacks_______________________
#implementation MainViewController
#synthesize artworkItem;
#synthesize userMediaItemCollection;
#synthesize playBarButton;
#synthesize pauseBarButton;
#synthesize musicPlayer;
#synthesize navigationBar;
#synthesize noArtworkImage;
#synthesize backgroundColorTimer;
#synthesize nowPlayingLabel, AudFile;
#synthesize appSoundButton;
#synthesize addOrShowMusicButton;
#synthesize appSoundPlayer;
#synthesize soundFileURL;
#synthesize interruptedOnPlayback;
#synthesize playedMusicOnce;
#synthesize playing;
#pragma mark Music control________________________________
- (IBAction) playOrPauseMusic: (id)sender {
}
- (IBAction) AddMusicOrShowMusic: (id) sender {
[audioPlayer stop];
// Load the Playlist direcly
MusicTableViewController *controller = [[MusicTableViewController alloc] initWithNibName: #"MusicTableView" bundle: nil];
controller.delegate = self;
controller.modalTransitionStyle = UIModalTransitionStyleCoverVertical;
[self presentModalViewController: controller animated: YES];
[controller release];
}
#pragma mark Application playback control_________________
- (IBAction) playAppSound: (id) sender {
// [appSoundPlayer play];
// playing = YES;
// [appSoundButton setEnabled: NO];
}
// delegate method for the audio route change alert view; follows the protocol specified
// in the UIAlertViewDelegate protocol.
- (void) alertView: routeChangeAlertView clickedButtonAtIndex: buttonIndex {
if ((NSInteger) buttonIndex == 1) {
[appSoundPlayer play];
} else {
[appSoundPlayer setCurrentTime: 0];
[appSoundButton setEnabled: YES];
}
[routeChangeAlertView release];
}
#pragma mark AV Foundation delegate methods____________
- (void) audioPlayerDidFinishPlaying: (AVAudioPlayer *) appSoundPlayer successfully: (BOOL) flag {
}
- (void) audioPlayerBeginInterruption: player {
}
- (void) audioPlayerEndInterruption: player {
}
#pragma mark Table view delegate methods________________
// Invoked when the user taps the Done button in the table view.
- (void) musicTableViewControllerDidFinish: (MusicTableViewController *) controller {
[self dismissModalViewControllerAnimated: YES];
}
#pragma mark Application setup____________________________
#if TARGET_IPHONE_SIMULATOR
#warning *** Simulator mode: iPod library access works only when running on a device.
#endif
//play audio 2
- (void)playAudio2:(NSString*) name type:(NSString*)type
{
[self stopAudio];
[audioPlayer stop];
[audioPlayer release];
NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle]
pathForResource:name
ofType:type]];
NSError *error;
audioPlayer = [[AVAudioPlayer alloc]
initWithContentsOfURL:url
error:&error];
if (error)
{
NSLog(#"Error in audioPlayer: %#",
[error localizedDescription]);
} else {
audioPlayer.delegate = self;
[audioPlayer prepareToPlay];
}
[audioPlayer play];
}
-(void)playAudio
{
[appSoundPlayer play];
}
- (void)stopAudio
{
[appSoundPlayer stop];
}
// Configure the application.
- (void) viewDidLoad {
[super viewDidLoad];
[self setPlayedMusicOnce: NO];
[self setNoArtworkImage: [UIImage imageNamed: #"no_artwork.png"]];
[self setPlayBarButton: [[UIBarButtonItem alloc] initWithBarButtonSystemItem: UIBarButtonSystemItemPlay
target: self
action: #selector (playOrPauseMusic:)]];
[self setPauseBarButton: [[UIBarButtonItem alloc] initWithBarButtonSystemItem: UIBarButtonSystemItemPause
target: self
action: #selector (playOrPauseMusic:)]];
[addOrShowMusicButton setTitle: NSLocalizedString (#"Add Music", #"Title for 'Add Music' button, before user has chosen some music")
forState: UIControlStateNormal];
[appSoundButton setTitle: NSLocalizedString (#"Play App Sound", #"Title for 'Play App Sound' button")
forState: UIControlStateNormal];
[nowPlayingLabel setText: NSLocalizedString (#"Instructions", #"Brief instructions to user, shown at launch")];
// Configure a timer to change the background color. The changing color represents an
// application that is doing something else while iPod music is playing.
[self setBackgroundColorTimer: [NSTimer scheduledTimerWithTimeInterval: 3.5
target: self
selector: #selector (updateBackgroundColor)
userInfo: nil
repeats: YES]];
[self setupAudioSession];
}
// Invoked by the backgroundColorTimer.
- (void) updateBackgroundColor {
[UIView beginAnimations: nil context: nil];
[UIView setAnimationDuration: 3.0];
CGFloat redLevel = rand() / (float) RAND_MAX;
CGFloat greenLevel = rand() / (float) RAND_MAX;
CGFloat blueLevel = rand() / (float) RAND_MAX;
self.view.backgroundColor = [UIColor colorWithRed: redLevel
green: greenLevel
blue: blueLevel
alpha: 1.0];
[UIView commitAnimations];
}
#pragma mark Application state management_____________
- (void) didReceiveMemoryWarning {
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
- (void) viewDidUnload {
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
-(void) setupAudioSession
{
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
NSError *setCategoryError = nil;
[audioSession setCategory:AVAudioSessionCategoryPlayback error:&setCategoryError];
if (setCategoryError){/* Handle Error COndition*/}
NSError *activationError = nil;
[audioSession setActive:YES error:&activationError];
if (activationError){
/*handle error*/
}
}
// just for test , delete after
-(void) myInstanceMethod: (id)sender{
[self setupAudioSession];
[audioPlayer stop];
[audioPlayer release];
AudFile = sender;
NSLog(#"My Instance metthod called ok with item : %#", sender);
[self playAudio2:[NSString stringWithFormat:#"%#",AudFile] type:#"mp3"];
//[self setupApplicationAudio: sender];
NSLog(#"New Audfile is: %#", AudFile);
// [self setupApplicationAudio];
}
+(void) myClassMethod{
NSLog(#"Class Called ok");
}
- (void)dealloc {
/*
// This sample doesn't use libray change notifications; this code is here to show how
// it's done if you need it.
[[NSNotificationCenter defaultCenter] removeObserver: self
name: MPMediaLibraryDidChangeNotification
object: musicPlayer];
[[MPMediaLibrary defaultMediaLibrary] endGeneratingLibraryChangeNotifications];
*/
[[NSNotificationCenter defaultCenter] removeObserver: self
name: MPMusicPlayerControllerNowPlayingItemDidChangeNotification
object: musicPlayer];
[[NSNotificationCenter defaultCenter] removeObserver: self
name: MPMusicPlayerControllerPlaybackStateDidChangeNotification
object: musicPlayer];
[musicPlayer endGeneratingPlaybackNotifications];
[musicPlayer release];
[artworkItem release];
[backgroundColorTimer invalidate];
[backgroundColorTimer release];
[navigationBar release];
[noArtworkImage release];
[nowPlayingLabel release];
[pauseBarButton release];
[playBarButton release];
[soundFileURL release];
[userMediaItemCollection release];
[super dealloc];
}
I am sending the file name from my playlist to my player through
-(void) myInstanceMethod: (id)sender{
[self setupAudioSession];
[audioPlayer stop];
[audioPlayer release];
AudFile = sender;
NSLog(#"My Instance metthod called ok with item : %#", sender);
[self playAudio2:[NSString stringWithFormat:#"%#",AudFile] type:#"mp3"];
//[self setupApplicationAudio: sender];
NSLog(#"New Audfile is: %#", AudFile);
// [self setupApplicationAudio];
}
and i load my playlist from my mainviewcontroller using the UIModalTransition in the method
- (IBAction) AddMusicOrShowMusic: (id) sender {
[audioPlayer stop];
// Load the Playlist direcly
MusicTableViewController *controller = [[MusicTableViewController alloc] initWithNibName: #"MusicTableView" bundle: nil];
controller.delegate = self;
controller.modalTransitionStyle = UIModalTransitionStyleCoverVertical;
[self presentModalViewController: controller animated: YES];
[controller release];
}
As you can see when i load my playlist i tell the player to stop playing through
[audioPlayer stop];
but for some reason it failes to stop playing, therefore creating an overlap of sounds.
This is my problem which i'm trying to figure out.
Thank you
and best regards!

Related

Take a picture in iOS without UIImagePicker and without preview it

Do you know any way / method to take a photo in iOS and saving it to camera Roll only with a simple button pressure without showing any preview?
I already know how to show the camera view but it show the preview of the image and the user need to click the take photo button to take the photo.
In few Words: the user click the button, the picture is taken, without previews nor double checks to take / save the photo.
I already found the takePicture method of UIIMagePickerController Class http://developer.apple.com/library/ios/documentation/uikit/reference/UIImagePickerController_Class/UIImagePickerController/UIImagePickerController.html#//apple_ref/occ/instm/UIImagePickerController/takePicture
Set the showsCameraControls-Property to NO.
poc = [[UIImagePickerController alloc] init];
[poc setTitle:#"Take a photo."];
[poc setDelegate:self];
[poc setSourceType:UIImagePickerControllerSourceTypeCamera];
poc.showsCameraControls = NO;
You also have to add your own Controls as a custom view on the top of poc.view. But that is very simple and you can add your own UI-style by that way.
You receive the image-data as usually within the imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:
To take the photo, you call
[poc takePicture];
from your custom button.
Hope, that works for you.
Assuming you want a point-and-shoot method, you can create an AVSession and just call the UIImageWriteToSavedPhotosAlbum method.
Here is a link that goes into that exact process: http://www.musicalgeometry.com/?p=1297
It's also worth noting that your users need to have given the app access to their camera roll or you may experience issues saving the images.
You need to design your own custom preview according to your size, on capture button pressed and call buttonPressed method and do stuff what you want
(void)buttonPressed:(UIButton *)sender {
NSLog(#" Capture Clicked");
[self.imagePicker takePicture];
//[NSTimer scheduledTimerWithTimeInterval:3.0f target:self
selector:#selector(timerFired:) userInfo:nil repeats:NO];
}
following is code that will take photo without showing preview screen. when i tried the accepted answer, which uses UIImagePickerController, the preview screen showed, then auto disappeared. with the code below, user taps 'takePhoto' button, and the devices takes photo with zero change to UI (in my app, i add a green check mark next to take photo button). the code below is from apple https://developer.apple.com/LIBRARY/IOS/samplecode/AVCam/Introduction/Intro.html with the 'extra functions' (that do not relate to taking still photo) commented out. thank you incmiko for suggesting this code in your answer iOS take photo from camera without modalViewController.
updating code, 26 mar 2015:
to trigger snap photo:
[self snapStillImage:sender];
in .h file:
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
// include code below in header file, after #import and before #interface
// avfoundation copy paste code
static void * CapturingStillImageContext = &CapturingStillImageContext;
static void * RecordingContext = &RecordingContext;
static void * SessionRunningAndDeviceAuthorizedContext = &SessionRunningAndDeviceAuthorizedContext;
// avfoundation, include code below after #interface
// avf - Session management.
#property (nonatomic) dispatch_queue_t sessionQueue; // Communicate with the session and other session objects on this queue.
#property (nonatomic) AVCaptureSession *session;
#property (nonatomic) AVCaptureDeviceInput *videoDeviceInput;
#property (nonatomic) AVCaptureMovieFileOutput *movieFileOutput;
#property (nonatomic) AVCaptureStillImageOutput *stillImageOutput;
// avf - Utilities.
#property (nonatomic) UIBackgroundTaskIdentifier backgroundRecordingID;
#property (nonatomic, getter = isDeviceAuthorized) BOOL deviceAuthorized;
#property (nonatomic, readonly, getter = isSessionRunningAndDeviceAuthorized) BOOL sessionRunningAndDeviceAuthorized;
#property (nonatomic) BOOL lockInterfaceRotation;
#property (nonatomic) id runtimeErrorHandlingObserver;
in .m file:
#pragma mark - AV Foundation
- (BOOL)isSessionRunningAndDeviceAuthorized
{
return [[self session] isRunning] && [self isDeviceAuthorized];
}
+ (NSSet *)keyPathsForValuesAffectingSessionRunningAndDeviceAuthorized
{
return [NSSet setWithObjects:#"session.running", #"deviceAuthorized", nil];
}
// call following method from viewDidLoad
- (void)CreateAVCaptureSession
{
// Create the AVCaptureSession
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[self setSession:session];
// Check for device authorization
[self checkDeviceAuthorizationStatus];
// In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
// Why not do all of this on the main queue?
// -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue so that the main queue isn't blocked (which keeps the UI responsive).
dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
[self setSessionQueue:sessionQueue];
dispatch_async(sessionQueue, ^{
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
NSError *error = nil;
AVCaptureDevice *videoDevice = [ViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionFront];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (error)
{
NSLog(#"%#", error);
}
if ([session canAddInput:videoDeviceInput])
{
[session addInput:videoDeviceInput];
[self setVideoDeviceInput:videoDeviceInput];
dispatch_async(dispatch_get_main_queue(), ^{
// Why are we dispatching this to the main queue?
// Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView can only be manipulated on main thread.
// Note: As an exception to the above rule, it is not necessary to serialize video orientation changes on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
});
}
/* AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (error)
{
NSLog(#"%#", error);
}
if ([session canAddInput:audioDeviceInput])
{
[session addInput:audioDeviceInput];
}
*/
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([session canAddOutput:movieFileOutput])
{
[session addOutput:movieFileOutput];
AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ([connection isVideoStabilizationSupported])
[connection setEnablesVideoStabilizationWhenAvailable:YES];
[self setMovieFileOutput:movieFileOutput];
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([session canAddOutput:stillImageOutput])
{
[stillImageOutput setOutputSettings:#{AVVideoCodecKey : AVVideoCodecJPEG}];
[session addOutput:stillImageOutput];
[self setStillImageOutput:stillImageOutput];
}
});
}
// call method below from viewWilAppear
- (void)AVFoundationStartSession
{
dispatch_async([self sessionQueue], ^{
[self addObserver:self forKeyPath:#"sessionRunningAndDeviceAuthorized" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:SessionRunningAndDeviceAuthorizedContext];
[self addObserver:self forKeyPath:#"stillImageOutput.capturingStillImage" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:CapturingStillImageContext];
[self addObserver:self forKeyPath:#"movieFileOutput.recording" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:RecordingContext];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];
__weak ViewController *weakSelf = self;
[self setRuntimeErrorHandlingObserver:[[NSNotificationCenter defaultCenter] addObserverForName:AVCaptureSessionRuntimeErrorNotification object:[self session] queue:nil usingBlock:^(NSNotification *note) {
ViewController *strongSelf = weakSelf;
dispatch_async([strongSelf sessionQueue], ^{
// Manually restarting the session since it must have been stopped due to an error.
[[strongSelf session] startRunning];
});
}]];
[[self session] startRunning];
});
}
// call method below from viewDidDisappear
- (void)AVFoundationStopSession
{
dispatch_async([self sessionQueue], ^{
[[self session] stopRunning];
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];
[[NSNotificationCenter defaultCenter] removeObserver:[self runtimeErrorHandlingObserver]];
[self removeObserver:self forKeyPath:#"sessionRunningAndDeviceAuthorized" context:SessionRunningAndDeviceAuthorizedContext];
[self removeObserver:self forKeyPath:#"stillImageOutput.capturingStillImage" context:CapturingStillImageContext];
[self removeObserver:self forKeyPath:#"movieFileOutput.recording" context:RecordingContext];
});
}
- (BOOL)prefersStatusBarHidden
{
return YES;
}
- (BOOL)shouldAutorotate
{
// Disable autorotation of the interface when recording is in progress.
return ![self lockInterfaceRotation];
}
- (NSUInteger)supportedInterfaceOrientations
{
return UIInterfaceOrientationMaskAll;
}
- (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration
{
// [[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] setVideoOrientation:(AVCaptureVideoOrientation)toInterfaceOrientation];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if (context == CapturingStillImageContext)
{
BOOL isCapturingStillImage = [change[NSKeyValueChangeNewKey] boolValue];
if (isCapturingStillImage)
{
[self runStillImageCaptureAnimation];
}
}
else if (context == RecordingContext)
{
BOOL isRecording = [change[NSKeyValueChangeNewKey] boolValue];
dispatch_async(dispatch_get_main_queue(), ^{
if (isRecording)
{
// [[self cameraButton] setEnabled:NO];
// [[self recordButton] setTitle:NSLocalizedString(#"Stop", #"Recording button stop title") forState:UIControlStateNormal];
// [[self recordButton] setEnabled:YES];
}
else
{
// [[self cameraButton] setEnabled:YES];
// [[self recordButton] setTitle:NSLocalizedString(#"Record", #"Recording button record title") forState:UIControlStateNormal];
// [[self recordButton] setEnabled:YES];
}
});
}
else if (context == SessionRunningAndDeviceAuthorizedContext)
{
BOOL isRunning = [change[NSKeyValueChangeNewKey] boolValue];
dispatch_async(dispatch_get_main_queue(), ^{
if (isRunning)
{
// [[self cameraButton] setEnabled:YES];
// [[self recordButton] setEnabled:YES];
// [[self stillButton] setEnabled:YES];
}
else
{
// [[self cameraButton] setEnabled:NO];
// [[self recordButton] setEnabled:NO];
// [[self stillButton] setEnabled:NO];
}
});
}
else
{
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
#pragma mark Actions
- (IBAction)toggleMovieRecording:(id)sender
{
// [[self recordButton] setEnabled:NO];
dispatch_async([self sessionQueue], ^{
if (![[self movieFileOutput] isRecording])
{
[self setLockInterfaceRotation:YES];
if ([[UIDevice currentDevice] isMultitaskingSupported])
{
// Setup background task. This is needed because the captureOutput:didFinishRecordingToOutputFileAtURL: callback is not received until AVCam returns to the foreground unless you request background execution time. This also ensures that there will be time to write the file to the assets library when AVCam is backgrounded. To conclude this background execution, -endBackgroundTask is called in -recorder:recordingDidFinishToOutputFileURL:error: after the recorded file has been saved.
[self setBackgroundRecordingID:[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil]];
}
// Update the orientation on the movie file output video connection before starting recording.
// [[[self movieFileOutput] connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] videoOrientation]];
// Turning OFF flash for video recording
[ViewController setFlashMode:AVCaptureFlashModeOff forDevice:[[self videoDeviceInput] device]];
// Start recording to a temporary file.
NSString *outputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[#"movie" stringByAppendingPathExtension:#"mov"]];
[[self movieFileOutput] startRecordingToOutputFileURL:[NSURL fileURLWithPath:outputFilePath] recordingDelegate:self];
}
else
{
[[self movieFileOutput] stopRecording];
}
});
}
- (IBAction)changeCamera:(id)sender
{
// [[self cameraButton] setEnabled:NO];
// [[self recordButton] setEnabled:NO];
// [[self stillButton] setEnabled:NO];
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *currentVideoDevice = [[self videoDeviceInput] device];
AVCaptureDevicePosition preferredPosition = AVCaptureDevicePositionUnspecified;
AVCaptureDevicePosition currentPosition = [currentVideoDevice position];
switch (currentPosition)
{
case AVCaptureDevicePositionUnspecified:
preferredPosition = AVCaptureDevicePositionBack;
break;
case AVCaptureDevicePositionBack:
preferredPosition = AVCaptureDevicePositionFront;
break;
case AVCaptureDevicePositionFront:
preferredPosition = AVCaptureDevicePositionBack;
break;
}
AVCaptureDevice *videoDevice = [ViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:preferredPosition];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
[[self session] beginConfiguration];
[[self session] removeInput:[self videoDeviceInput]];
if ([[self session] canAddInput:videoDeviceInput])
{
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentVideoDevice];
[ViewController setFlashMode:AVCaptureFlashModeAuto forDevice:videoDevice];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:videoDevice];
[[self session] addInput:videoDeviceInput];
[self setVideoDeviceInput:videoDeviceInput];
}
else
{
[[self session] addInput:[self videoDeviceInput]];
}
[[self session] commitConfiguration];
dispatch_async(dispatch_get_main_queue(), ^{
// [[self cameraButton] setEnabled:YES];
// [[self recordButton] setEnabled:YES];
// [[self stillButton] setEnabled:YES];
});
});
}
- (IBAction)snapStillImage:(id)sender
{
dispatch_async([self sessionQueue], ^{
// Update the orientation on the still image output video connection before capturing.
// [[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] videoOrientation]];
// Flash set to Auto for Still Capture
[ViewController setFlashMode:AVCaptureFlashModeAuto forDevice:[[self videoDeviceInput] device]];
// Capture a still image.
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
// do someting good with saved image
[self saveImageToParse:image];
}
}];
});
}
- (IBAction)focusAndExposeTap:(UIGestureRecognizer *)gestureRecognizer
{
// CGPoint devicePoint = [(AVCaptureVideoPreviewLayer *)[[self previewView] layer] captureDevicePointOfInterestForPoint:[gestureRecognizer locationInView:[gestureRecognizer view]]];
// [self focusWithMode:AVCaptureFocusModeAutoFocus exposeWithMode:AVCaptureExposureModeAutoExpose atDevicePoint:devicePoint monitorSubjectAreaChange:YES];
}
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake(.5, .5);
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}
#pragma mark File Output Delegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
if (error)
NSLog(#"%#", error);
[self setLockInterfaceRotation:NO];
// Note the backgroundRecordingID for use in the ALAssetsLibrary completion handler to end the background task associated with this recording. This allows a new recording to be started, associated with a new UIBackgroundTaskIdentifier, once the movie file output's -isRecording is back to NO — which happens sometime after this method returns.
UIBackgroundTaskIdentifier backgroundRecordingID = [self backgroundRecordingID];
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
[[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error)
NSLog(#"%#", error);
[[NSFileManager defaultManager] removeItemAtURL:outputFileURL error:nil];
if (backgroundRecordingID != UIBackgroundTaskInvalid)
[[UIApplication sharedApplication] endBackgroundTask:backgroundRecordingID];
}];
}
#pragma mark Device Configuration
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self videoDeviceInput] device];
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
{
[device setFocusMode:focusMode];
[device setFocusPointOfInterest:point];
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
{
[device setExposureMode:exposureMode];
[device setExposurePointOfInterest:point];
}
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
[device unlockForConfiguration];
}
else
{
NSLog(#"%#", error);
}
});
}
+ (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
{
if ([device hasFlash] && [device isFlashModeSupported:flashMode])
{
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
[device setFlashMode:flashMode];
[device unlockForConfiguration];
}
else
{
NSLog(#"%#", error);
}
}
}
+ (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = [devices firstObject];
for (AVCaptureDevice *device in devices)
{
if ([device position] == position)
{
captureDevice = device;
break;
}
}
return captureDevice;
}
#pragma mark UI
- (void)runStillImageCaptureAnimation
{
/*
dispatch_async(dispatch_get_main_queue(), ^{
[[[self previewView] layer] setOpacity:0.0];
[UIView animateWithDuration:.25 animations:^{
[[[self previewView] layer] setOpacity:1.0];
}];
});
*/
}
- (void)checkDeviceAuthorizationStatus
{
NSString *mediaType = AVMediaTypeVideo;
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
if (granted)
{
//Granted access to mediaType
[self setDeviceAuthorized:YES];
}
else
{
//Not granted access to mediaType
dispatch_async(dispatch_get_main_queue(), ^{
[[[UIAlertView alloc] initWithTitle:#"AVCam!"
message:#"AVCam doesn't have permission to use Camera, please change privacy settings"
delegate:self
cancelButtonTitle:#"OK"
otherButtonTitles:nil] show];
[self setDeviceAuthorized:NO];
});
}
}];
}

MPMoviePlayerController doesn't work with remoteControlReceivedWithEvent

I want to allow the controls from my keyboard to work in my app. The controls use Apple's Remote Control events (beginReceivingRemoteControlEvents, endReceivingRemoteControlEvents, and remoteControlReceivedWithEvent); however I cannot seem to get this to work with MPMoviePlayerController.
I do not see any events at the start of the program, even though beginReceivingRemoteControlEvents is called at the start.
I do not see any events during the playback of a video.
I do see events after I close the video.
From the above, it seems that the audio stream of MPMoviePlayerController disables the controls. However I do not know how to change this. I tried using [moviePlayer setUseApplicationAudioSession:NO]; to change the audio to use the system session, yet it does nothing.
Here is my setup. My app delegate is a UIViewController. I set the main window's root view controller to the app delegate, add views to the view controller and in the view controller for the parts which has to do with video.
- (BOOL)canBecomeFirstResponder {
return YES;
}
- (void)tableView:(UITableView *)theTableView didSelectRowAtIndexPath:(NSIndexPath *)theIndexPath {
NSString *file = [[MGMFilesPath stringByExpandingTildeInPath] stringByAppendingPathComponent:[files objectAtIndex:[theIndexPath indexAtPosition:1]]];
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayback error:nil];
[audioSession setActive:YES error:nil];
NSLog(#"%d", [self isFirstResponder]);
moviePlayer = [[MPMoviePlayerController alloc] initWithContentURL:[NSURL fileURLWithPath:file]];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(exitedFullscreen:) name:MPMoviePlayerDidExitFullscreenNotification object:moviePlayer];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(playbackFinished:) name:MPMoviePlayerPlaybackDidFinishNotification object:moviePlayer];
if ([moviePlayer respondsToSelector:#selector(setFullscreen:animated:)]) {
[[self view] addSubview:[moviePlayer view]];
[moviePlayer setFullscreen:YES animated:YES];
[moviePlayer play];
} else {
[moviePlayer play];
}
[fileView deselectRowAtIndexPath:theIndexPath animated:NO];
}
- (void)viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
[[UIApplication sharedApplication] beginReceivingRemoteControlEvents];
[self becomeFirstResponder];
}
- (void)viewWillDisappear:(BOOL)animated {
[super viewWillDisappear:animated];
[[UIApplication sharedApplication] endReceivingRemoteControlEvents];
[self resignFirstResponder];
}
- (void)remoteControlReceivedWithEvent:(UIEvent *)event {
NSLog(#"remoteControlReceivedWithEvent: %#", event);
if (event.type==UIEventTypeRemoteControl) {
if (event.subtype==UIEventSubtypeRemoteControlPlay) {
NSLog(#"Play");
} else if (event.subtype==UIEventSubtypeRemoteControlPause) {
NSLog(#"Pause");
} else if (event.subtype==UIEventSubtypeRemoteControlTogglePlayPause) {
NSLog(#"Play Pause");
}
}
}
- (void)exitedFullscreen:(NSNotification*)notification {
[[moviePlayer view] removeFromSuperview];
[moviePlayer stop];
[moviePlayer release];
moviePlayer = nil;
[[AVAudioSession sharedInstance] setActive:NO error:nil];
}
- (void)playbackFinished:(NSNotification*)theNotification {
[[NSNotificationCenter defaultCenter] removeObserver:self name:MPMoviePlayerDidExitFullscreenNotification object:moviePlayer];
[[NSNotificationCenter defaultCenter] removeObserver:self name:MPMoviePlayerPlaybackDidFinishNotification object:moviePlayer];
NSNumber *reason = [[theNotification userInfo] objectForKey:MPMoviePlayerPlaybackDidFinishReasonUserInfoKey];
if ([reason intValue]!=MPMovieFinishReasonUserExited) {
[moviePlayer setFullscreen:NO animated:YES];
[[moviePlayer view] removeFromSuperview];
[moviePlayer stop];
[moviePlayer release];
moviePlayer = nil;
[[AVAudioSession sharedInstance] setActive:NO error:nil];
}
NSLog(#"%d", [self isFirstResponder]);
}
As you can see in the code above, I verified that it was first responder and it was, so I know it's not a first responder issue.
Can someone help me get this working?
Thanks
Apparently, MPMoviePlayerController isn't the way to go for this. What I ended up doing was using MPMoviePlayerViewController and override the remoteControlReceivedWithEvent to customize the controls. Below is my current code which I am using.
#interface MGMMoviePlayerViewController : MPMoviePlayerViewController
- (void)remoteControlReceivedWithEvent:(UIEvent *)event;
#end
#implementation MGMMoviePlayerViewController
- (void)remoteControlReceivedWithEvent:(UIEvent *)theEvent {
if (theEvent.type==UIEventTypeRemoteControl) {
if (theEvent.subtype==UIEventSubtypeRemoteControlPlay) {
[[self moviePlayer] play];
} else if (theEvent.subtype==UIEventSubtypeRemoteControlPause) {
[[self moviePlayer] pause];
} else if (theEvent.subtype==UIEventSubtypeRemoteControlTogglePlayPause) {
if ([[self moviePlayer] playbackState]==MPMoviePlaybackStatePlaying) {
[[self moviePlayer] pause];
} else {
[[self moviePlayer] play];
}
} else if (theEvent.subtype==UIEventSubtypeRemoteControlStop) {
[[self moviePlayer] stop];
} else if (theEvent.subtype==UIEventSubtypeRemoteControlNextTrack) {
NSTimeInterval currentTime = [[self moviePlayer] currentPlaybackTime];
currentTime += 10;
if (currentTime>[[self moviePlayer] duration])
currentTime = [[self moviePlayer] duration];
[[self moviePlayer] setCurrentPlaybackTime:currentTime];
} else if (theEvent.subtype==UIEventSubtypeRemoteControlPreviousTrack) {
NSTimeInterval currentTime = [[self moviePlayer] currentPlaybackTime];
currentTime -= 10;
if (currentTime<0)
currentTime = 0;
[[self moviePlayer] setCurrentPlaybackTime:currentTime];
} else if (theEvent.subtype==UIEventSubtypeRemoteControlBeginSeekingBackward) {
[[self moviePlayer] beginSeekingBackward];
} else if (theEvent.subtype==UIEventSubtypeRemoteControlBeginSeekingForward) {
[[self moviePlayer] beginSeekingForward];
} else if (theEvent.subtype==UIEventSubtypeRemoteControlEndSeekingBackward || theEvent.subtype==UIEventSubtypeRemoteControlEndSeekingForward) {
[[self moviePlayer] endSeeking];
}
}
}
#end
- (void)tableView:(UITableView *)theTableView didSelectRowAtIndexPath:(NSIndexPath *)theIndexPath {
NSString *file = [[MGMFilesPath stringByExpandingTildeInPath] stringByAppendingPathComponent:[files objectAtIndex:[theIndexPath indexAtPosition:1]]];
moviePlayerView = [[MGMMoviePlayerViewController alloc] initWithContentURL:[NSURL fileURLWithPath:file]];
[self presentMoviePlayerViewControllerAnimated:moviePlayerView];
[[moviePlayerView moviePlayer] play];
[fileView deselectRowAtIndexPath:theIndexPath animated:NO];
}

No known class method for selector 'sharedStore'

Have a singleton class for BNRItemStore, but when I tried to call it, I get the above error which causes an ARC issue. Have commented out the error.
DetailViewController.m
#import "DetailViewController.h"
#import "BNRItem.h"
#import "BNRImageStore.h"
#import "BNRItemStore.h"
#implementation DetailViewController
#synthesize item;
-(id)initForNewItem:(BOOL)isNew
{
self = [super initWithNibName:#"DetailViewController" bundle:nil];
if(self){
if (isNew) {
UIBarButtonItem *doneItem = [[UIBarButtonItem alloc]
initWithBarButtonSystemItem:UIBarButtonSystemItemDone
target:self
action:#selector(save:)];
[[self navigationItem] setRightBarButtonItem:doneItem];
UIBarButtonItem *cancelItem = [[UIBarButtonItem alloc]
initWithBarButtonSystemItem:UIBarButtonSystemItemCancel
target:self
action:#selector(cancel:)];
[[self navigationItem] setLeftBarButtonItem:cancelItem];
}
}
return self;
}
-(id)initWithNibName:(NSString *)nibName bundle:(NSBundle *)bundle
{
#throw [NSException exceptionWithName:#"Wrong initializer"
reason:#"Use initForNewItem:"
userInfo:nil];
return nil;
}
-(void)viewDidLoad
{
[super viewDidLoad];
UIColor *clr = nil;
if ([[UIDevice currentDevice]userInterfaceIdiom]== UIUserInterfaceIdiomPad) {
clr = [UIColor colorWithRed:0.875 green:0.88 blue:0.91 alpha:1];
} else {
clr = [UIColor groupTableViewBackgroundColor];
}
[[self view]setBackgroundColor:clr];
}
- (void)viewDidUnload {
nameField = nil;
serialNumberField = nil;
valueField = nil;
dateLabel = nil;
imageView = nil;
[super viewDidUnload];
}
-(void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
[nameField setText:[item itemName]];
[serialNumberField setText:[item serialNumber]];
[valueField setText:[NSString stringWithFormat:#"%d", [item valueInDollars]]];
// Create a NSDateFormatter that will turn a date into a simple date string
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc]init];
[dateFormatter setDateStyle:NSDateFormatterMediumStyle];
[dateFormatter setTimeStyle:NSDateFormatterNoStyle];
// Use filtered NSDate object to set dateLabel contents
[dateLabel setText:[dateFormatter stringFromDate:[item dateCreated]]];
NSString *imageKey = [item imageKey];
if (imageKey) {
// Get image for image key from image store
UIImage *imageToDisplay = [[BNRImageStore sharedStore]imageForKey:imageKey];
// Use that image to put on the screen in imageview
[imageView setImage:imageToDisplay];
} else {
// Clear the imageview
[imageView setImage:nil];
}
}
-(void)viewWillDisappear:(BOOL)animated
{
[super viewWillDisappear:animated];
// Clear first responder
[[self view]endEditing:YES];
// "Save" changes to item
[item setItemName:[nameField text]];
[item setSerialNumber:[serialNumberField text]];
[item setValueInDollars:[[valueField text] intValue]];
}
-(BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)io
{
if ([[UIDevice currentDevice]userInterfaceIdiom]==UIUserInterfaceIdiomPad) {
return YES;
} else {
return (io==UIInterfaceOrientationPortrait);
}
}
-(void)setItem:(BNRItem *)i
{
item = i;
[[self navigationItem] setTitle:[item itemName]];
}
- (IBAction)takePicture:(id)sender {
if ([imagePickerPopover isPopoverVisible]) {
// If the popover is already up, get rid of it
[imagePickerPopover dismissPopoverAnimated:YES];
imagePickerPopover = nil;
return;
}
UIImagePickerController *imagePicker =
[[UIImagePickerController alloc]init];
// If our device has a camera, we want to take a picture, otherwise, we
// just pick from the photo library
if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]) {
[imagePicker setSourceType:UIImagePickerControllerSourceTypeCamera];
} else {
[imagePicker setSourceType:UIImagePickerControllerSourceTypePhotoLibrary];
// This line of code will generate a warning right now, ignore it
[imagePicker setDelegate:self];
//Place image picker on the screen
// Check for iPad device before instantiating the popover controller
if ([[UIDevice currentDevice]userInterfaceIdiom]==UIUserInterfaceIdiomPad) {
// Create a new popover controller that will display the imagepicker
imagePickerPopover = [[UIPopoverController alloc]initWithContentViewController:imagePicker];
[imagePickerPopover setDelegate:self];
// Display the popover controller; sender
// is the camera bar button item
[imagePickerPopover presentPopoverFromBarButtonItem:sender permittedArrowDirections:UIPopoverArrowDirectionAny animated:YES];
} else {
[self presentViewController:imagePicker animated:YES completion:nil];
}
}
}
-(void)popoverControllerDidDismissPopover:(UIPopoverController *)popoverController
{
NSLog(#"User dismissed popover");
imagePickerPopover = nil;
}
- (IBAction)backgroundTapped:(id)sender {
[[self view]endEditing:YES];
}
-(void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
NSString *oldKey = [item imageKey];
// Did the item already have an image?
if (oldKey) {
// Delete the old image
[[BNRImageStore sharedStore]deleteImageForKey:oldKey];
}
UIImage *image = [info objectForKey:UIImagePickerControllerOriginalImage];
// Create a CFUUID object - it knows how to create unique identifier strings
CFUUIDRef newUniqueID = CFUUIDCreate(kCFAllocatorDefault);
// Create a string from unique identifier
CFStringRef newUniqueIDString = CFUUIDCreateString(kCFAllocatorDefault, newUniqueID); // Incompatible integer to pointer conversion initializing
// Use that unique ID to set our item's imageKey
NSString *key = (__bridge NSString *)newUniqueIDString;
[item setImageKey:key];
// Store image in the BNRImageStore with this key
[[BNRImageStore sharedStore] setImage:image forKey:[item imageKey]];
CFRelease(newUniqueIDString);
CFRelease(newUniqueID);
[imageView setImage:image];
if ([[UIDevice currentDevice]userInterfaceIdiom]==UIUserInterfaceIdiomPad) {
// If on the phone, the image picker is presented modally. Dismiss it.
[self dismissViewControllerAnimated:YES completion:nil];
} else {
// If on the pad, the image picker is in the popover. Dismiss the popover.
[imagePickerPopover dismissPopoverAnimated:YES];
imagePickerPopover = nil;
}
}
-(BOOL)textFieldShouldReturn:(UITextField *)textField
{
[textField resignFirstResponder];
return YES;
}
-(void)save:(id)sender
{
[[self presentingViewController]dismissViewControllerAnimated:YES
completion:nil];
}
-(void)cancel:(id)sender
{
// If the user cancelled, then remove the BNRItem from the store
[[BNRItemStore sharedStore]removeItem:item]; // No known class method for selector 'sharedStore'
[[self presentingViewController]dismissViewControllerAnimated:YES completion:nil];
}
DetailViewController.h
#import <UIKit/UIKit.h>
#class BNRItem;
#interface DetailViewController : UIViewController <UINavigationControllerDelegate, UIImagePickerControllerDelegate,UITextFieldDelegate, UIPopoverControllerDelegate>
{
__weak IBOutlet UITextField *nameField;
__weak IBOutlet UITextField *serialNumberField;
__weak IBOutlet UITextField *valueField;
__weak IBOutlet UILabel *dateLabel;
__weak IBOutlet UIImageView *imageView;
UIPopoverController *imagePickerPopover;
}
#property(nonatomic,strong)BNRItem *item;
-(id)initForNewItem:(BOOL)isNew;
- (IBAction)takePicture:(id)sender;
- (IBAction)backgroundTapped:(id)sender;
#end
BNRItemStore.m
#import "BNRItemStore.h"
#import "BNRItem.h"
#implementation BNRItemStore
+ (BNRItemStore *)defaultStore
{
static BNRItemStore *defaultStore = nil;
if(!defaultStore)
defaultStore = [[super allocWithZone:nil] init];
return defaultStore;
}
+ (id)allocWithZone:(NSZone *)zone
{
return [self defaultStore];
}
- (id)init
{
self = [super init];
if(self) {
allItems = [[NSMutableArray alloc] init];
}
return self;
}
- (void)removeItem:(BNRItem *)p
{
[allItems removeObjectIdenticalTo:p];
}
- (NSArray *)allItems
{
return allItems;
}
- (void)moveItemAtIndex:(int)from
toIndex:(int)to
{
if (from == to) {
return;
}
// Get pointer to object being moved so we can re-insert it
BNRItem *p = [allItems objectAtIndex:from];
// Remove p from array
[allItems removeObjectAtIndex:from];
// Insert p in array at new location
[allItems insertObject:p atIndex:to];
}
- (BNRItem *)createItem
{
BNRItem *p = [BNRItem randomItem];
[allItems addObject:p];
return p;
}
#end
BNRItemStore.h
#import <Foundation/Foundation.h>
#class BNRItem;
#interface BNRItemStore : NSObject
{
NSMutableArray *allItems;
}
+ (BNRItemStore *)defaultStore;
- (void)removeItem:(BNRItem *)p;
- (NSArray *)allItems;
- (BNRItem *)createItem;
- (void)moveItemAtIndex:(int)from
toIndex:(int)to;
#end
You are calling +sharedStore on BNRItemStore where your error occurs. This is because it really does not exist according to the code you posted.
All of the others calling +sharedStore are using the one presumably made available by BNRImageStore which you didn't provide. I assume it exists in that class? :)
In short, change:
[[BNRItemStore sharedStore]removeItem:item];
to
[[BNRImageStore sharedStore]removeItem:item];

download multiple images with ASIHttpRequest

I have a UITableView which contain several cell. When I click a cell, I push an other controller which shows the detail of this cell. In the detail I have a scrollView which contains several UIImageViews. I would like to download the images for these views from the web using ASIHTTPRequest, when the detail view is loaded.
You can override UIView. Each view has an ASIHTTPRequest. When each download has finished, you can use drawRect to draw the downloaded image.
this is a demo:
MyImageView.h
#import <UIKit/UIKit.h>
#import "ASIHTTPRequest.h"
#interface MyImageView : UIView <ASIHTTPRequestDelegate>
{
ASIHTTPRequest *httpRequest;
UIImage *image;
}
- (void)startRequest:(NSString *)_url;
#end
MyImageView.m
#import "MyImageView.h"
#implementation MyImageView
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
// Initialization code
}
return self;
}
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect
{
[super drawRect:rect];
// Drawing code
if (image != nil) {
[image drawInRect:self.bounds];
}
}
- (void)dealloc
{
[httpRequest clearDelegatesAndCancel];
[httpRequest release];
[image release];
[super dealloc];
}
-(void)startRequest:(NSString *)_url
{
if (httpRequest != nil) {
[httpRequest clearDelegatesAndCancel];
[httpRequest release];
}
httpRequest = [[ASIHTTPRequest alloc] initWithURL:[NSURL URLWithString:_url]];
[httpRequest setTimeOutSeconds:30];
[httpRequest setDelegate:self];
[httpRequest startAsynchronous];
}
- (void)requestFinished:(ASIHTTPRequest *)request
{
if ([request responseStatusCode] == 200) {
image = [[UIImage alloc] initWithData:[request responseData]];
[self setNeedsDisplay];
}
}
- (void)requestFailed:(ASIHTTPRequest *)request
{
NSLog(#"request failed");
}
Usage:
MyImageView *imageView = [[MyImageView alloc] initWithFrame:CGRectMake(100, 100, 50, 50)];
[imageView startRequest:#"http://imageUrl"];
[self.view addSubview:imageView];
[imageView release];
Here's a class derived from UIImageView:
Header file, UIHTTPImageView.h:
#import "ASIHTTPRequest.h"
#interface UIHTTPImageView : UIImageView {
ASIHTTPRequest *request;
}
- (void)setImageWithURL:(NSURL *)url placeholderImage:(UIImage *)placeholder;
#end
and UIHTTPImageView.m:
#import "UIHTTPImageView.h"
#implementation UIHTTPImageView
- (void)setImageWithURL:(NSURL *)url placeholderImage:(UIImage *)placeholder {
[request setDelegate:nil];
[request cancel];
[request release];
request = [[ASIHTTPRequest requestWithURL:url] retain];
[request setCacheStoragePolicy:ASICachePermanentlyCacheStoragePolicy];
if (placeholder)
self.image = placeholder;
[request setDelegate:self];
[request startAsynchronous];
}
- (void)dealloc {
[request setDelegate:nil];
[request cancel];
[request release];
[super dealloc];
}
- (void)requestFinished:(ASIHTTPRequest *)req
{
if (request.responseStatusCode != 200)
return;
self.image = [UIImage imageWithData:request.responseData];
}
#end
Note that there's no reporting of errors, you may want an requestFailed: method if you want to report a problem to the user.

Loading presentModalViewController in ViewDidAppear causes EXC_BAD_ACCESS

In the Follwoing ViewControllerClass I get EXC_BAD_ACCESS when trying to call presentModalViewController in ViewDidAppear method.
#import "SounderViewController.h"
#import "ASIFormDataRequest.h"
#import "ASIHTTPRequest.h"
#import "JSON.h"
#import "InfoViewController.h"
#implementation SounderViewController
#synthesize ipod;
#synthesize ivc;
#synthesize title_lb, artist_lb, check;
-(IBAction)showCurrentSongInfo{
MPMediaItem * song = [ipod nowPlayingItem];
NSString * title = [song valueForProperty:MPMediaItemPropertyTitle];
NSString * artist = [song valueForProperty:MPMediaItemPropertyArtist];
title_lb.text = title;
artist_lb.text = artist;
}
-(void)playbackStateChanged: (NSNotification*) notification{
[self showCurrentSongInfo];
NSLog(#"Playback state: %#",[notification name]);
if (ipod.playbackState != MPMusicPlaybackStatePlaying) {
NSLog(#"Is not playing");
[self presentModalViewController:self.ivc animated:YES];
}else if (ipod.playbackState == MPMusicPlaybackStatePlaying) {
NSLog(#"Is playing");
[self dismissModalViewControllerAnimated:YES];
}
}
-(void)nowPlayingItemChanged: (NSNotification*) notification{
[self showCurrentSongInfo];
NSLog(#"Playing item changed: %#",[notification name]);
}
- (void)viewDidLoad {
[super viewDidLoad];
self.ivc = [[InfoViewController alloc] initWithNibName:#"InfoViewController" bundle:nil];
self.ipod = [MPMusicPlayerController iPodMusicPlayer];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector (playbackStateChanged:)
name:#"MPMusicPlayerControllerPlaybackStateDidChangeNotification"
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector (nowPlayingItemChanged:)
name:#"MPMusicPlayerControllerNowPlayingItemDidChangeNotification"
object:nil];
[[MPMusicPlayerController iPodMusicPlayer] beginGeneratingPlaybackNotifications];
}
-(void)viewDidAppear:(BOOL)animated{
[super viewDidAppear:animated];
if (ipod.playbackState != MPMusicPlaybackStatePlaying) {
[self presentModalViewController:self.ivc animated:YES];
}else{
[self showCurrentSongInfo];
}
}
-(IBAction)showInfoView{
[self presentModalViewController:self.ivc animated:YES];
}
#pragma mark View Methods
- (void)didReceiveMemoryWarning {
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
- (void)viewDidUnload {
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (void)dealloc {
[super dealloc];
}
#end
Method call
[self presentModalViewController:self.ivc animated:YES];
in ViewDidAppear causes EXC_BAD_ACCESS.
I have tried debuging it with NSZombieEnabled but got only a stack call to main.
The thing that gets me crazy is that if the same code is run from method playbackStateChanged it works fine.
If any of you can help I wan't get bold as quick. Thanks.
I finally made it to work! But I think it's just quick fix.
So I found out that to make my ivc to show up I need to delay the call to presentModalViewController
[self performSelector:#selector(showWaitingMessageView:) withObject:self.ivc afterDelay:1];
That's it. It works.
I don't know why this helped, so if one of you gurus knows more about it please enlighten me.