i was wondering why I don't get the notification if my beacon proximity is Immediate – when the app is in background. In foreground mode, everything works fine.
#import "ESTViewController.h"
#import <ESTBeaconManager.h>
#interface ESTViewController () <ESTBeaconManagerDelegate>
#property (nonatomic, strong) ESTBeaconManager* beaconManager;
#property (nonatomic, strong) ESTBeacon* selectedBeacon;
#property (weak, nonatomic) IBOutlet UILabel *outputLabel;
#end
#implementation ESTViewController
- (void)viewDidLoad
{
[super viewDidLoad];
/////////////////////////////////////////////////////////////
// setup Estimote beacon manager
// craete manager instance
self.beaconManager = [[ESTBeaconManager alloc] init];
self.beaconManager.delegate = self;
self.beaconManager.avoidUnknownStateBeacons = YES;
// create sample region with major value defined
ESTBeaconRegion* region = [[ESTBeaconRegion alloc] initWithProximityUUID:ESTIMOTE_PROXIMITY_UUID identifier: #"EstimoteSampleRegion" ];
NSLog(#"TODO: Update the ESTBeaconRegion with your major / minor number and enable background app refresh in the Settings on your device for the NotificationDemo to work correctly.");
// start looking for estimote beacons in region
[self.beaconManager startMonitoringForRegion:region];
[self.beaconManager startRangingBeaconsInRegion:region];
[self.beaconManager requestStateForRegion:region];
// setup view
self.outputLabel.text = #"Seachring for reagion";
}
-(void)beaconManager:(ESTBeaconManager *)manager
didDetermineState:(CLRegionState)state
forRegion:(ESTBeaconRegion *)region
{
// NSLog(#"Region: %#", region);
if(state == CLRegionStateInside)
{
NSLog(#"State: CLRegionStateInside");
}
else
{
NSLog(#"State: CLRegionOutside");
}
}
-(void)beaconManager:(ESTBeaconManager *)manager
didRangeBeacons:(NSArray *)beacons
inRegion:(ESTBeaconRegion *)region
{
if([beacons count] > 0)
{
self.selectedBeacon = [beacons firstObject]; // get the closest
NSString* labelText = [NSString stringWithFormat:
#"Major: %i, Minor: %i\nRegion: ",
[self.selectedBeacon.major unsignedShortValue],
[self.selectedBeacon.minor unsignedShortValue]];
// calculate and set new y position
switch (self.selectedBeacon.proximity)
{
case CLProximityUnknown:
labelText = [labelText stringByAppendingString: #"Unknown"];
break;
case CLProximityImmediate:
labelText = [labelText stringByAppendingString: #"Immediate"];
[self fireNotification];
break;
case CLProximityNear:
labelText = [labelText stringByAppendingString: #"Near"];
break;
case CLProximityFar:
labelText = [labelText stringByAppendingString: #"Far"];
break;
default:
break;
}
self.outputLabel.text = labelText;
}
}
-(void)fireNotification {
NSLog(#"Fire Notification from background");
// present local notification
UILocalNotification *notification = [[UILocalNotification alloc] init];
notification.alertBody = #"You are very close to the beacon";
notification.soundName = UILocalNotificationDefaultSoundName;
[[UIApplication sharedApplication] presentLocalNotificationNow:notification];
// Request a server...
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#end
With iOS, ranging for iBeacons generally only works in the foreground. The code that checks for CLProximityImmediate is in a ranging callback method.
If your app is in the background, it will only receive ranging callbacks for five seconds after entering/exiting a monitored iBeacon region.
This principle is true for the standard iOS CoreLocation APIs. While you are using the proprietary Estimote SDK, the same limitations also apply since it is a wrapper around the standard APIs.
Related
I have been experimenting with AudioKit and have made a sample app to try and plot recording audio and audio from playback. I am seeing an issue though, when I record or playback audio, the rolling waveform doesn't show up in the view on a device. It shows up perfectly fine on sim (11.4) however. I've provided the recording view controller code below for context in how I'm trying to implement this while recording audio.
Any help or being pointed in the general direction would be greatly appreciated.
RecordingVC.m code:
#import "FirstViewController.h"
#interface FirstViewController ()
#end
#implementation FirstViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self setupConfig];
[self setupUI];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void) setupUI
{
//Configure waveform view
self.recordingPlotView.gain = 2;
self.recordingPlotView.backgroundColor = [UIColor colorWithRed: .10 green: .10 blue: .10 alpha: 1];
self.recordingPlotView.color = [UIColor colorWithRed: .44 green: .44 blue: .44 alpha: 1];
self.recordingPlotView.plotType = EZPlotTypeRolling;
self.recordingPlotView.shouldFill = YES;
self.recordingPlotView.shouldMirror = YES;
[self.view addSubview: self.recordingPlotView];
}
- (void) setupConfig
{
self.isRecording = NO;
[AKSettings setAudioInputEnabled: true];
[AKSettings setPlaybackWhileMuted: true];
[AVAudioSession.sharedInstance setCategory: AVAudioSessionCategoryAmbient withOptions: kAudioSessionProperty_OverrideCategoryDefaultToSpeaker error: nil];
self.mic = [[EZMicrophone alloc] initWithMicrophoneDelegate: self];
}
#pragma mark - EZMicrophone Delegate methods
- (void) microphone:(EZMicrophone *)microphone
hasAudioReceived:(float **)buffer
withBufferSize:(UInt32)bufferSize
withNumberOfChannels:(UInt32)numberOfChannels
{
__weak typeof (self) weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
[weakSelf.recordingPlotView updateBuffer:buffer[0]
withBufferSize:bufferSize];
});
}
- (void) microphone:(EZMicrophone *)microphone
hasBufferList:(AudioBufferList *)bufferList
withBufferSize:(UInt32)bufferSize
withNumberOfChannels:(UInt32)numberOfChannels
{
if (self.isRecording)
{
[self.recorder appendDataFromBufferList:bufferList
withBufferSize:bufferSize];
}
}
#pragma mark - EZRecorder Delegate methods
- (void)recorderDidClose:(EZRecorder *)recorder
{
self.recorder.delegate = nil;
}
#pragma mark - Utils
- (NSArray *)applicationDocuments
{
return NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
}
- (NSString *)applicationDocumentsDirectory
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
return basePath;
}
- (NSURL *)testFilePathURL
{
return [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/%#",
[self applicationDocumentsDirectory],
#"test2.m4a"]];
}
#pragma mark - user Interaction
- (IBAction)playButtonTapped:(id)sender {
if (self.isRecording)
{
self.isRecording = NO;
self.playButton.titleLabel.text = #"Record";
[self.mic stopFetchingAudio];
}
else
{
self.isRecording = YES;
self.playButton.titleLabel.text = #"Pause";
[self.mic startFetchingAudio];
self.recorder = [EZRecorder recorderWithURL: [self testFilePathURL] clientFormat: [self.mic audioStreamBasicDescription] fileType: EZRecorderFileTypeM4A delegate: self];
}
}
- (IBAction)stopButtonTapped:(id)sender {
if (self.isRecording)
{
self.isRecording = NO;
self.playButton.titleLabel.text = #"Record";
[self.mic stopFetchingAudio];
[self.recorder closeAudioFile];
}
[self.recordingPlotView clear];
self.recorder = nil;
}
#end
RecordingVC.h code:
#import <UIKit/UIKit.h>
#import AudioKit;
#import AudioKitUI;
#interface FirstViewController : UIViewController <EZMicrophoneDelegate, EZRecorderDelegate>
#property (strong, nonatomic) IBOutlet EZAudioPlot *recordingPlotView;
#property (nonatomic, strong) EZMicrophone* mic;
#property (nonatomic, strong) EZRecorder* recorder;
#property (nonatomic, assign) BOOL isRecording;
#property (strong, nonatomic) IBOutlet UIButton *playButton;
#end
Small Update:
I've managed to get the playback waveform displaying on device by setting the gain in interface builder, even though I was setting it in code during viewDidLoad().
I've tried doing the same (setting the gain for the plot in interface builder) for the recording VC (the code above) but that did solve this as it did for the playback VC.
I ran your project and it works on the device the same as the simulator except that the simulator's microphone is the computer's and seems much more sensitive than on the device, so I had to set the gain higher:
self.recordingPlotView.gain = 20;
before I noticed the waveform.
Excuse me but I'm a total Noob, not a programmer. I based a photo editing app on a template and customised it heavily with help from Google searches, tutorials etc.
Using Xcode 7.3.1, iOS 9.3, newer Photosframework and only objective C.
Ive got the app to a point that Im happy with it, except that I noticed on first launch, the app hangs (debug reports semaphore_wait_trap().
The app can't get to next step "request to access photos" alert pop up in iOS 9.3, and only way to get to it is to hit the home button, then see the grant access alert, then switch back to app. Then quit the app, reload it and then it runs fine overtime after that. This is of course not an ideal user experience.
I see if I pause on debug mode its hanging on: "semaphore_wait_trap()"
Ive googled and searched for days and can't find a solution to get the permissions alert popup to show on top of my app window.
Its beyond me. Any Ideas would be greatly appreciated.
See screen shot of the launch image that remains on top of the alert pop up.
If you press the "Home" button, the alert to grant access to photos appears.
The app delegate:
#implementation AppDelegate
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
if ([UIApplication instancesRespondToSelector:#selector(registerUserNotificationSettings:)]){
[application registerUserNotificationSettings:[UIUserNotificationSettings settingsForTypes:UIUserNotificationTypeAlert|UIUserNotificationTypeBadge|UIUserNotificationTypeSound categories:nil]];
}
UILocalNotification *locationNotification = [launchOptions objectForKey:UIApplicationLaunchOptionsLocalNotificationKey];
if (locationNotification) {
// Sets icon badge number to zero
application.applicationIconBadgeNumber = 0;
}
// END Local Notification ==========================
return true;
}
-(void)application:(UIApplication *)application didReceiveLocalNotification:(UILocalNotification *)notification {
// Resets icon's badge number to zero
application.applicationIconBadgeNumber = 0;
}
Here is a snippet of the main View controller (hope its not to long, not sure where the problem lies)
HomeVC.m:
#import "HomeVC.h"
#import "Configs.h"
#import "AAPLGridViewCell2.h"
#import "NSIndexSet+Convenience.h"
#import "UICollectionView+Convenience.h"
#import "AAPLRootListViewController.h"
#import "Configs.h"
#import "ImageEditorTheme.h"
#import "ImageEditorTheme+Private.h"
#import PhotosUI;
#import UIKit;
#interface HomeVC()
<
PHPhotoLibraryChangeObserver,
UICollectionViewDelegateFlowLayout,
UICollectionViewDataSource,
UICollectionViewDelegate
>
#property (nonatomic, strong) NSArray *sectionFetchResults;
#property (nonatomic, strong) NSArray *sectionLocalizedTitles;
#property (nonatomic, strong) PHCachingImageManager *imageManager;
#property CGRect previousPreheatRect;
#property (nonatomic, strong) IBOutlet UICollectionViewFlowLayout *flowLayout;
#property (nonatomic, assign) CGSize lastTargetSize;
#end
#implementation HomeVC
{
UIActivityIndicatorView *_indicatorView;
}
static NSString * const AllPhotosReuseIdentifier = #"AllPhotosCell";
static NSString * const CollectionCellReuseIdentifier = #"CollectionCell";
static NSString * const CellReuseIdentifier = #"Cell";
static CGSize AssetGridThumbnailSize;
- (void)awakeFromNib {
self.imageManager = [[PHCachingImageManager alloc] init];
[self resetCachedAssets];
[[PHPhotoLibrary sharedPhotoLibrary] registerChangeObserver:self];
}
- (void)dealloc {
[[PHPhotoLibrary sharedPhotoLibrary] unregisterChangeObserver:self];
}
- (void)viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
_logoImage.layer.cornerRadius = 30;
[self loadPhotos];
[_libraryOutlet addTarget:self action:#selector(touchUp:) forControlEvents:UIControlEventTouchUpInside];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(handle_data) name:#"reload_data" object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(hideMenu) name:#"hide_menu" object:nil];
}
- (void)viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
// Begin caching assets in and around collection view's visible rect.
[self updateCachedAssets];
}
-(void)handle_data {
//[self.collectionView2 layoutIfNeeded];
//[self resetCachedAssets];
[self.collectionView2 reloadData];
[self updateCachedAssets];
NSLog(#"did it work?");
}
- (void)viewDidLayoutSubviews
{
NSInteger section = [self.collectionView2 numberOfSections] - 1;
NSInteger item = [self.collectionView2 numberOfItemsInSection:section] - 1;
NSIndexPath *indexPath = [NSIndexPath indexPathForItem:item inSection:section];
[self.collectionView2 scrollToItemAtIndexPath:indexPath atScrollPosition:(UICollectionViewScrollPositionTop) animated:NO];
//[self loadPhotos];
}
-(void) loadPhotos {
PHFetchOptions *allPhotosOptions = [[PHFetchOptions alloc] init];
allPhotosOptions.sortDescriptors = #[[NSSortDescriptor sortDescriptorWithKey:#"creationDate" ascending:YES]];
PHFetchResult *allPhotos = [PHAsset fetchAssetsWithOptions:allPhotosOptions];
if (self.assetsFetchResults == nil) {
self.assetsFetchResults = allPhotos;
}
}
#pragma mark - PHPhotoLibraryChangeObserver
- (void)photoLibraryDidChange:(PHChange *)changeInstance {
// Check if there are changes to the assets we are showing.
PHFetchResultChangeDetails *collectionChanges = [changeInstance changeDetailsForFetchResult:self.assetsFetchResults];
if (collectionChanges == nil) {
return;
}
/*
Change notifications may be made on a background queue. Re-dispatch to the
main queue before acting on the change as we'll be updating the UI.
*/
dispatch_async(dispatch_get_main_queue(), ^{
// Get the new fetch result.
self.assetsFetchResults = [collectionChanges fetchResultAfterChanges];
UICollectionView *collectionView = self.collectionView;
if (![collectionChanges hasIncrementalChanges] || [collectionChanges hasMoves]) {
// Reload the collection view if the incremental diffs are not available
[collectionView reloadData];
} else {
/*
Tell the collection view to animate insertions and deletions if we
have incremental diffs.
*/
[collectionView performBatchUpdates:^{
NSIndexSet *removedIndexes = [collectionChanges removedIndexes];
if ([removedIndexes count] > 0) {
[collectionView deleteItemsAtIndexPaths:[removedIndexes aapl_indexPathsFromIndexesWithSection:0]];
}
NSIndexSet *insertedIndexes = [collectionChanges insertedIndexes];
if ([insertedIndexes count] > 0) {
[collectionView insertItemsAtIndexPaths:[insertedIndexes aapl_indexPathsFromIndexesWithSection:0]];
}
NSIndexSet *changedIndexes = [collectionChanges changedIndexes];
if ([changedIndexes count] > 0) {
[collectionView reloadItemsAtIndexPaths:[changedIndexes aapl_indexPathsFromIndexesWithSection:0]];
}
} completion:NULL];
}
[self resetCachedAssets];
});
}
#pragma mark - UICollectionViewDataSource
- (NSInteger)collectionView:(UICollectionView *)collectionView numberOfItemsInSection:(NSInteger)section {
return self.assetsFetchResults.count;
}
- (CGSize)collectionView:(UICollectionView *)collectionView layout:(UICollectionViewLayout*)collectionViewLayout sizeForItemAtIndexPath:(NSIndexPath *)indexPath; {
CGFloat colum = 3.0, spacing = 0.0;
CGFloat value = floorf((CGRectGetWidth(self.view.bounds) - (colum - 1) * spacing) / colum);
UICollectionViewFlowLayout *layout = [[UICollectionViewFlowLayout alloc] init];
layout.itemSize = CGSizeMake(value, value);
layout.sectionInset = UIEdgeInsetsMake(0, 0, 0, 0);
layout.minimumInteritemSpacing = spacing;
layout.minimumLineSpacing = spacing;
return CGSizeMake(value, value);
//return self.collectionView.frame.size;
}
- (UICollectionViewCell *)collectionView:(UICollectionView *)collectionView cellForItemAtIndexPath:(NSIndexPath *)indexPath {
PHAsset *asset = self.assetsFetchResults[indexPath.item];
// Dequeue an AAPLGridViewCell.
AAPLGridViewCell2 *cell = [collectionView dequeueReusableCellWithReuseIdentifier:CellReuseIdentifier forIndexPath:indexPath];
cell.representedAssetIdentifier = asset.localIdentifier;
// Request an image for the asset from the PHCachingImageManager.
[self.imageManager requestImageForAsset:asset
targetSize:CGSizeMake(130, 130)
contentMode:PHImageContentModeAspectFill
options:nil
resultHandler:^(UIImage *result, NSDictionary *info) {
// Set the cell's thumbnail image if it's still showing the same asset.
if ([cell.representedAssetIdentifier isEqualToString:asset.localIdentifier]) {
cell.thumbnailImage = result;
}
}];
CGPoint bottomOffset = CGPointMake(-0, self.collectionView.contentSize.height - self.collectionView.bounds.size.height + self.collectionView.contentInset.bottom);
[self.collectionView setContentOffset:bottomOffset animated:NO];;
return cell;
}
- (void) collectionView:(UICollectionView *)collectionView didSelectItemAtIndexPath:(NSIndexPath *)indexPath
{
// Prepare the options to pass when fetching the live photo.
PHAsset *asset = self.assetsFetchResults[indexPath.item];
PHImageRequestOptions *options = [[PHImageRequestOptions alloc] init];
options.deliveryMode = PHImageRequestOptionsDeliveryModeHighQualityFormat;
options.networkAccessAllowed = NO;
dispatch_async(dispatch_get_main_queue(), ^{
_indicatorView = [ImageEditorTheme indicatorView];
_indicatorView.center = self.containerView.center;
[self.containerView addSubview:_indicatorView];
[_indicatorView startAnimating];
UIStoryboard *storyboard = [UIStoryboard storyboardWithName:#"Main" bundle:nil];
PreviewVC *prevVC = (PreviewVC *)[storyboard instantiateViewControllerWithIdentifier:#"PreviewVC"];
[[PHImageManager defaultManager] requestImageForAsset:asset targetSize:PHImageManagerMaximumSize contentMode:PHImageContentModeAspectFit options:options resultHandler:^(UIImage *result, NSDictionary *info) {
// Show the UIImageView and use it to display the requested image.
passedImage = result;
prevVC.modalTransitionStyle = UIModalTransitionStyleCrossDissolve;
[self presentViewController:prevVC animated:true completion:nil];
[_indicatorView stopAnimating];
}];
});
}
#pragma mark - UIScrollViewDelegate
- (void)scrollViewDidScroll:(UIScrollView *)scrollView {
// Update cached assets for the new visible area.
[self updateCachedAssets];
}
I managed to solve the issue. It was as simple as removing the call to "[self resetCachedAssets];" in "awakeFromNib"
Works great now.
I have a watch app that is being updated for watch os 2. The sendmessage does not wake the parent app. According to the transition documentation is this how you would wake a parent in the background.
"The iOS app is always considered reachable, and calling this method from your Watch app wakes up the iOS app in the background as needed."
Has anyone had this problem? The only way to get data is to have the parent app already open.
Another weird thing is the watch app changes the uitableview for the parent app. When the -(IBAction)yesterdaySales:(id)sender is called on the watch, it changes the parent app UITableView instead of the watch tableview.
InterfaceController.m
#import "InterfaceController.h"
#import "MyRowController.h"
#import "ftDateParser.h"
#import WatchKit;
#import <WatchConnectivity/WatchConnectivity.h>
#interface InterfaceController() <WCSessionDelegate>
{
IBOutlet WKInterfaceDevice *it;
BOOL tday;
IBOutlet WKInterfaceLabel *lblCompany;
}
#end
#implementation InterfaceController
#synthesize myTable = _myTable;
- (void)awakeWithContext:(id)context {
[super awakeWithContext:context];
// Configure interface objects here.
if([WCSession isSupported]){
WCSession *session = [WCSession defaultSession];
session.delegate = self;
[session activateSession];
}
//[self requestInfoPhone];
[self getToday];
}
- (void)willActivate {
// This method is called when watch view controller is about to be visible to user
[super willActivate];
}
- (void)didDeactivate {
// This method is called when watch view controller is no longer visible
[super didDeactivate];
}
-(void)requestInfoPhone{
NSDictionary *dic = #{#"request":#"ySales"};
[[WCSession defaultSession] sendMessage:dic
replyHandler:^(NSDictionary *replyInfo){
NSLog(#"The Reply: %#", replyInfo);
NSDictionary *location = replyInfo;
NSString *name = location[#"label"];
NSString *totalSales = location[#"totalSales"];
// NSString *test2 = location[#"rowText"];
NSMutableArray *sales = [[NSMutableArray alloc]init];
NSMutableArray *storeNames = [[NSMutableArray alloc]init];
sales = location[#"rowText"];
storeNames = location[#"storeNames"];
[self loadTable:sales names:storeNames company:name];
[_labelName setText:name];
[_labelTotalSales setText:totalSales];
tday = YES;
}
errorHandler:^(NSError *error){
NSLog(#"%#", error);
}
];
}
-(void)loadTable:(NSMutableArray*)tester names:(NSMutableArray*)names company:(NSString *)company{
[_myTable setNumberOfRows:[tester count] withRowType:#"row"];
[_labelName setText:company];
for (int i = 0; i < [tester count]; i++) {
MyRowController *vc = [_myTable rowControllerAtIndex:i];
[vc.testLabel setText:[ftDateParser currencyFormat: tester[i]]];
[vc.nameLabel setText:[ftDateParser parseName:names[i]]];
}
[_myTable scrollToRowAtIndex:(0)];
}
-(IBAction)yesterdaySales:(id)sender{
if (tday) {
[_ydaySales setTitle:#"Today Sales"];
[self requestInfoPhone];
}
else{
[_ydaySales setTitle:#"Yesterday Sales"];
[self getToday];
}
}
-(void)getToday{
NSDictionary *dic = #{#"request":#"todaySales"};
[[WCSession defaultSession] sendMessage:dic
replyHandler:^(NSDictionary *replyInfo){
NSDictionary *location = replyInfo;
NSString *name = location[#"label"];
NSString *totalSales = location[#"totalSales"];
// NSString *test2 = location[#"rowText"];
NSMutableArray *sales = [[NSMutableArray alloc]init];
NSMutableArray *storeNames = [[NSMutableArray alloc]init];
sales = location[#"rowText"];
storeNames = location[#"storeNames"];
[self loadTable:sales names:storeNames company:name];
[_labelName setText:name];
[_labelTotalSales setText:totalSales];
tday = YES;
}
errorHandler:^(NSError *error){
NSLog(#"%#", error);
}
];
}
#end
Parent.m
-(void)setUpAppForWatch{
done = NO;
if([WCSession isSupported]){
WCSession *session = [WCSession defaultSession];
session.delegate = self;
[session activateSession];
}
}
-(void)session:(WCSession *)session didReceiveMessage:(NSDictionary<NSString *,id> *)message replyHandler:(void (^)(NSDictionary<NSString *,id> * _Nonnull))replyHandler{
/*UIApplication *application = [UIApplication sharedApplication];
__block UIBackgroundTaskIdentifier identifier = UIBackgroundTaskInvalid;
dispatch_block_t endBlock = ^ {
if (identifier != UIBackgroundTaskInvalid) {
[application endBackgroundTask:identifier];
}
identifier = UIBackgroundTaskInvalid;
};
identifier = [application beginBackgroundTaskWithExpirationHandler:endBlock];*/
[self setUpAppForWatch];
[self getTheDate];
startDate = todayDay;
endDate = tomorrow;
//[self getTodaySalesforWatch];
NSString *currency = [ftDateParser currencyFormat:totalSales];
NSDictionary *dic = #{#"label": [NSString stringWithFormat:#"%#", #"Town Crier, Inc."],
#"totalSales": currency,
#"rowText": storeSalesData,//[NSString stringWithFormat:#"%#", currency]
#"storeNames":storeNames
};
NSString *request = [message objectForKey:#"request"];
if ([request isEqualToString:#"todaySales"]) {
[self getTodaySalesforWatch];
}
else if ([request isEqualToString:#"ySales"]){
[self connectToWebService];
}
if (done) {
replyHandler(dic);
}
}
Edit:
Maybe the changes to the parent app were happening before, but I didn't know cause the app was running in the background. Still can't get it to wake the parent app.
You don't link to the source of the quote at the top of your question but it must be referring to the openParentApplication method of WatchKit 1. Devices running WatchOS 2.0 cannot call openParentApplication.
The method you're implementing in the code in your question is for a WCSession, which only works for immediate communication between a WatchKit app extension and an iOS app that are both running at the same time. This method does not cause your iOS app to launch, neither in the background nor in the foreground. Other asynchronous communication methods must be used if both apps are not running at the time.
I expect once clicking the toolbar button like "cafe", cafes will show in the visible map region. Data are fetched from Google Places API. Everything works fine except pins don't display after clicking the button.
A latency is expected here for sure. Yet i do the fetch in a background queue and puts up a spinning wheel while waiting, and the spinning wheel hides when fetching and parsing is done. So I am quite sure the data is there at the moment that spinning wheel disappears. But the pins don't show up until I scroll the map.
I figure that scrolling map only triggers mapView:regionDidChangeAnimated:. But I can't figure out how it relates the problem. Can anyone help?
The source code of ViewController.m, where pretty much everything happens.
#import "ViewController.h"
#import "MapPoint.h"
#import "MBProgressHUD.h"
#define kGOOGLE_API_KEY #"AIzaSyCHqbAoY7WCL3l7x188ZM4ciiTixejzQ4Y"
#define kBgQueue dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)
#interface ViewController () <CLLocationManagerDelegate, MKMapViewDelegate>
#property (weak, nonatomic) IBOutlet MKMapView *mapView;
#property (strong, nonatomic) CLLocationManager *locationManager;
#property int currentDist;
#property CLLocationCoordinate2D currentCentre;
#end
#implementation ViewController
#synthesize mapView = _mapView;
#synthesize locationManager = _locationManager;
#synthesize currentDist = _currentDist;
#synthesize currentCentre = _currentCentre;
- (void)viewDidLoad{
[super viewDidLoad];
}
- (void)viewDidUnload{
[self setMapView:nil];
[super viewDidUnload];
}
// set the map region after launching
-(void)viewWillAppear:(BOOL)animated
{
//Instantiate a location object.
self.locationManager = [[CLLocationManager alloc] init];
//Make this controller the delegate for the location manager.
self.locationManager.delegate = self;
//Set some parameters for the location object.
[self.locationManager setDistanceFilter:kCLDistanceFilterNone];
[self.locationManager setDesiredAccuracy:kCLLocationAccuracyBest];
// order: latitude(纬度), longitude(经度)
CLLocationCoordinate2D center = self.locationManager.location.coordinate;
// 单位是degree
MKCoordinateSpan span = MKCoordinateSpanMake(0.03, 0.03);
MKCoordinateRegion region = MKCoordinateRegionMake(center, span);
[self.mapView setRegion:region animated:YES];
// NSLog(#"currentCentre is (%f , %f)", self.currentCentre.latitude, self.currentCentre.longitude);
}
// Get place tpye from button title
// All buttons share this one method
- (IBAction)toolbarButtonPressed:(id)sender
{
UIBarButtonItem *button = (UIBarButtonItem *)sender;
NSString *buttonTitle = [button.title lowercaseString];
//Use this title text to build the URL query and get the data from Google.
[self queryGooglePlaces:buttonTitle];
}
// Parse response JSON data
-(void)parseData:(NSData *)responseData {
NSError* error;
NSDictionary* json = [NSJSONSerialization JSONObjectWithData:responseData
options:kNilOptions
error:&error];
//The results from Google will be an array obtained from the NSDictionary object with the key "results".
NSArray* places = [json objectForKey:#"results"];
[self plotPositions:places];
NSLog(#"Plot is done");
}
// Format query string
-(void) queryGooglePlaces: (NSString *) googleType {
// query string
NSString *url = [NSString stringWithFormat:#"https://maps.googleapis.com/maps/api/place/search/json?location=%f,%f&radius=%#&types=%#&sensor=true&key=%#", self.currentCentre.latitude, self.currentCentre.longitude, [NSString stringWithFormat:#"%i", _currentDist], googleType, kGOOGLE_API_KEY];
//string to URL
NSURL *googleRequestURL=[NSURL URLWithString:url];
// Retrieve data from the query URL by GCD
dispatch_async(kBgQueue, ^{
NSData* data = [NSData dataWithContentsOfURL: googleRequestURL];
[self parseData:data];
[MBProgressHUD hideHUDForView:self.view animated:YES];
});
MBProgressHUD *hud = [MBProgressHUD showHUDAddedTo:self.view animated:YES];
hud.labelText = #"Please Wait..";
}
#pragma mark - Map View Delegate
// called many times when map scrolling or zooming
// Use this to get currentCentre and currentDist (radius)
-(void)mapView:(MKMapView *)mapView regionDidChangeAnimated:(BOOL)animated {
//Get the east and west points on the map so you can calculate the distance (zoom level) of the current map view.
MKMapRect mRect = self.mapView.visibleMapRect;
MKMapPoint eastMapPoint = MKMapPointMake(MKMapRectGetMinX(mRect), MKMapRectGetMidY(mRect));
MKMapPoint westMapPoint = MKMapPointMake(MKMapRectGetMaxX(mRect), MKMapRectGetMidY(mRect));
//Set your current distance instance variable.
self.currentDist = MKMetersBetweenMapPoints(eastMapPoint, westMapPoint);
//Set your current center point on the map instance variable.
self.currentCentre = self.mapView.centerCoordinate;
// NSLog(#"currentCentre is (%f , %f)", self.currentCentre.latitude, self.currentCentre.longitude);
}
// Setup annotation objects
-(void)plotPositions:(NSArray *)data {
// 1 - Remove any existing custom annotations but not the user location blue dot.
for (id<MKAnnotation> annotation in self.mapView.annotations) {
if ([annotation isKindOfClass:[MapPoint class]]) {
[self.mapView removeAnnotation:annotation];
}
}
// 2 - Loop through the array of places returned from the Google API.
for (int i=0; i<[data count]; i++) {
//Retrieve the NSDictionary object in each index of the array.
NSDictionary* place = [data objectAtIndex:i];
// 3 - There is a specific NSDictionary object that gives us the location info.
NSDictionary *geo = [place objectForKey:#"geometry"];
// Get the lat and long for the location.
NSDictionary *loc = [geo objectForKey:#"location"];
// 4 - Get your name and address info for adding to a pin.
NSString *name=[place objectForKey:#"name"];
NSString *vicinity=[place objectForKey:#"vicinity"];
// Create a special variable to hold this coordinate info.
CLLocationCoordinate2D placeCoord;
// Set the lat and long.
placeCoord.latitude=[[loc objectForKey:#"lat"] doubleValue];
placeCoord.longitude=[[loc objectForKey:#"lng"] doubleValue];
// 5 - Create a new annotation.
MapPoint *placeObject = [[MapPoint alloc] initWithName:name address:vicinity coordinate:placeCoord];
[self.mapView addAnnotation:placeObject];
}
NSLog(#"addAnnotation is done");
}
// Setup annotation view
-(MKAnnotationView *)mapView:(MKMapView *)mapView viewForAnnotation:(id <MKAnnotation>)annotation {
// Define your reuse identifier.
static NSString *identifier = #"MapPoint";
if ([annotation isKindOfClass:[MapPoint class]]) {
MKPinAnnotationView *annotationView = (MKPinAnnotationView *) [self.mapView dequeueReusableAnnotationViewWithIdentifier:identifier];
if (annotationView == nil) {
annotationView = [[MKPinAnnotationView alloc] initWithAnnotation:annotation reuseIdentifier:identifier];
} else {
annotationView.annotation = annotation;
}
annotationView.enabled = YES;
annotationView.canShowCallout = YES;
annotationView.animatesDrop = YES;
// NSLog(#"annotation view is added");
return annotationView;
}
return nil;
}
#end
A couple of things:
Move your removeAnnotation and addAnnotation code to run on the UI thread, e.g.:
dispatch_async (dispatch_get_main_queye(), ^
{
[self.mapView addAnnotation:placeObject];
});
Move your viewWillAppear initialization code to viewDidLoad. viewWillAppear may be called multiple times during the lifetime of your view controller
What is the simplest way to play a video programmatically with Objective-C in Mac OS X 10.7 (Lion)? And if I want to support OS X 10.6 (Snow Leopard) too?
I noticed that iOS AV Foundation was introduced to OS X 10.7. Unfortunately the documentation seems to be written for iOS and I found it confusing.
Here's a NSView subclass that plays a video given a URL, using AV Foundation (thus Mac OS X 10.7 upwards only). Based on the AVSimplePlayer sample code.
Header:
#interface RMVideoView : NSView
#property (nonatomic, readonly, strong) AVPlayer* player;
#property (nonatomic, readonly, strong) AVPlayerLayer* playerLayer;
#property (nonatomic, retain) NSURL* videoURL;
- (void) play;
#end
Implementation:
static void *RMVideoViewPlayerLayerReadyForDisplay = &RMVideoViewPlayerLayerReadyForDisplay;
static void *RMVideoViewPlayerItemStatusContext = &RMVideoViewPlayerItemStatusContext;
#interface RMVideoView()
- (void)onError:(NSError*)error;
- (void)onReadyToPlay;
- (void)setUpPlaybackOfAsset:(AVAsset *)asset withKeys:(NSArray *)keys;
#end
#implementation RMVideoView
#synthesize player = _player;
#synthesize playerLayer = _playerLayer;
#synthesize videoURL = _videoURL;
- (id)initWithFrame:(NSRect)frame {
self = [super initWithFrame:frame];
if (self) {
self.wantsLayer = YES;
_player = [[AVPlayer alloc] init];
[self addObserver:self forKeyPath:#"player.currentItem.status" options:NSKeyValueObservingOptionNew context:RMVideoViewPlayerItemStatusContext];
}
return self;
}
- (void) dealloc {
[self.player pause];
[self removeObserver:self forKeyPath:#"player.currentItem.status"];
[self removeObserver:self forKeyPath:#"playerLayer.readyForDisplay"];
[_player release];
[_playerLayer release];
[_videoURL release];
[super dealloc];
}
- (void) setVideoURL:(NSURL *)videoURL {
_videoURL = videoURL;
[self.player pause];
[self.playerLayer removeFromSuperlayer];
AVURLAsset *asset = [AVAsset assetWithURL:self.videoURL];
NSArray *assetKeysToLoadAndTest = [NSArray arrayWithObjects:#"playable", #"hasProtectedContent", #"tracks", #"duration", nil];
[asset loadValuesAsynchronouslyForKeys:assetKeysToLoadAndTest completionHandler:^(void) {
dispatch_async(dispatch_get_main_queue(), ^(void) {
[self setUpPlaybackOfAsset:asset withKeys:assetKeysToLoadAndTest];
});
}];
}
#pragma mark - KVO
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if (context == RMVideoViewPlayerItemStatusContext) {
AVPlayerStatus status = [[change objectForKey:NSKeyValueChangeNewKey] integerValue];
switch (status) {
case AVPlayerItemStatusUnknown:
break;
case AVPlayerItemStatusReadyToPlay:
[self onReadyToPlay];
break;
case AVPlayerItemStatusFailed:
[self onError:nil];
break;
}
} else if (context == RMVideoViewPlayerLayerReadyForDisplay) {
if ([[change objectForKey:NSKeyValueChangeNewKey] boolValue]) {
self.playerLayer.hidden = NO;
}
} else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
#pragma mark - Private
- (void)onError:(NSError*)error {
// Notify delegate
}
- (void)onReadyToPlay {
// Notify delegate
}
- (void)setUpPlaybackOfAsset:(AVAsset *)asset withKeys:(NSArray *)keys {
for (NSString *key in keys) {
NSError *error = nil;
if ([asset statusOfValueForKey:key error:&error] == AVKeyValueStatusFailed) {
[self onError:error];
return;
}
}
if (!asset.isPlayable || asset.hasProtectedContent) {
[self onError:nil];
return;
}
if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) { // Asset has video tracks
_playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
self.playerLayer.frame = self.layer.bounds;
self.playerLayer.autoresizingMask = kCALayerWidthSizable | kCALayerHeightSizable;
self.playerLayer.hidden = YES;
[self.layer addSublayer:self.playerLayer];
[self addObserver:self forKeyPath:#"playerLayer.readyForDisplay" options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew context:RMVideoViewPlayerLayerReadyForDisplay];
}
// Create a new AVPlayerItem and make it our player's current item.
AVPlayerItem *playerItem = [AVPlayerItem playerItemWithAsset:asset];
[self.player replaceCurrentItemWithPlayerItem:playerItem];
}
#pragma mark - Public
- (void) play {
[self.player play];
}
#end
"Simplest" depends on exactly what you're trying to do. If you want more control (e.g., rendering the movie as an OpenGL texture) or less (e.g., a completely independent window that you can just pop up and ignore), there might be different answers.
But for most use cases, if you want 10.6+ support, the simplest way to show a movie is QTKit. See the article "Using QTKit for Media Playback" in the Xcode documentation for a good starting point.