How to detect if the UIMotionEffect is supported? - ios7

Is there a way to detect if the UIMotionEffect is supported on the device my app is running on?

You can test it like this :
if ([UIMotionEffect class]) {
// do stuff with UIMotionEffect
} else {
// UIMotionEffect does not exists
}
In iOS < 4.2 do it like this :
Class theClass = NSClassFromString(#"UIMotionEffect");
if (theClass) {
// do stuff with UIMotionEffect
} else {
// UIMotionEffect does not exists
}

Related

How does WorldEdit handle brushes?

I'm trying to find out how the Bukkit version of WorldEdit handles brushes. I've been looking at the source code on GitHub, but I couldn't find anythig useful. I've tried to recreate the effect, but I can only get it to work when I'm in interaction reach of the target block.
This is about as close as it gets in the source code:
} else if (action == Action.RIGHT_CLICK_AIR) {
if (we.handleRightClick(player)) {
event.setCancelled(true);
}
}
(WorldEdit/worldedit-bukkit/src/main/java/com/sk89q/worldedit/bukkit/WorldEditListener.java, line 143-147)
There are some other parts of code that get very close. I've also looked in /worldedit-core, but nothing there either.
Could someone help me here?
Edit: This is how I try to do it:
public static void onRightClick (PlayerInteractEvent event) {
if (event.getAction() == Action.RIGHT_CLICK_BLOCK) {
Location location = event.getClickedBlock().getLocation();
if (event.getItem() != null) {
if (event.getItem().getItemMeta().equals(ItemManager.wand.getItemMeta())) {
Player player = event.getPlayer();
player.getWorld().doStuff(location);
}
}
}
}
Edit #2: what I'm most curious about is: How does WE select the location to apply the brush if you are outside of interaction reach?
I needed to use BlockIterators for this. The final code looks like this:
public class BoomWandEvent implements Listener {
#EventHandler
public static void onRightClick (PlayerInteractEvent event) {
Player player = event.getPlayer();
if (event.getAction() == Action.RIGHT_CLICK_BLOCK || event.getAction() == Action.RIGHT_CLICK_AIR) {
if (player.getInventory().getItemInMainHand().equals(ItemManager.explosionWand)) {
Location eyePos = player.getEyeLocation();
BlockIterator raytracer = new BlockIterator(eyePos, 0.0D, player.getClientViewDistance() * 16);
while (raytracer.hasNext()) {
Location location = raytracer.next().getLocation();
if (player.getWorld().getBlockAt(location).getType() != Material.AIR && player.getWorld().getBlockAt(location).getType() != Material.CAVE_AIR && player.getWorld().getBlockAt(location).getType() != Material.VOID_AIR) {
player.getWorld().createExplosion(location, 4f);
return;
}
}
}
}
}
}
Thanks to Rogue for helping!

ios: how to detect if voice dictation was used for UITextView? Or microphone button was tapped on keyboard

how to detect if voice dictation was used for UITextView? Or microphone button was tapped on keyboard in UI textview
You can use Speech Kit framework which Siri uses for speech recognition.
first import speech kit framework then and confirm a delegate
here is swift version. It may be helpful
import Speech
class ViewController: UIViewController, SFSpeechRecognizerDelegate {
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
private var recognitionTask: SFSpeechRecognitionTask?
private let audioEngine = AVAudioEngine()
private let speechRecognizer = SFSpeechRecognizer(locale: Locale.init(identifier: "en-US"))
override func viewDidLoad() {
super.viewDidLoad()
self.authorizeSpeech()
}
private func authorizeSpeech() {
SFSpeechRecognizer.requestAuthorization { (authStatus) in //4
var isButtonEnabled = false
switch authStatus { //5
case .authorized:
isButtonEnabled = true
case .denied:
isButtonEnabled = false
print("User denied access to speech recognition")
case .restricted:
isButtonEnabled = false
print("Speech recognition restricted on this device")
case .notDetermined:
isButtonEnabled = false
print("Speech recognition not yet authorized")
}
OperationQueue.main.addOperation() {
print(isButtonEnabled) //this tells that speech authorized or not
}
}
}
}
now add some custom message in you info.plist
<key>NSMicrophoneUsageDescription</key> <string>Your microphone will be used to record your speech when you press the Start Recording button.</string>
<key>NSSpeechRecognitionUsageDescription</key> <string>Speech recognition will be used to determine which words you speak into this device microphone.</string>
now create a new function called startRecording()
func startRecording() {
if recognitionTask != nil {
recognitionTask?.cancel()
recognitionTask = nil
}
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSessionCategoryRecord)
try audioSession.setMode(AVAudioSessionModeMeasurement)
try audioSession.setActive(true, with: .notifyOthersOnDeactivation)
} catch {
print("audioSession properties weren't set because of an error.")
}
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
guard let inputNode = audioEngine.inputNode else {
fatalError("Audio engine has no input node")
}
guard let recognitionRequest = recognitionRequest else {
fatalError("Unable to create an SFSpeechAudioBufferRecognitionRequest object")
}
recognitionRequest.shouldReportPartialResults = true
recognitionTask = speechRecognizer.recognitionTask(with: recognitionRequest, resultHandler: { (result, error) in
var isFinal = false
if result != nil {
your_text_view.text = result?.bestTranscription.formattedString
isFinal = (result?.isFinal)!
}
if error != nil || isFinal {
self.audioEngine.stop()
inputNode.removeTap(onBus: 0)
self.recognitionRequest = nil
self.recognitionTask = nil
}
})
let recordingFormat = inputNode.outputFormat(forBus: 0)
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer, when) in
self.recognitionRequest?.append(buffer)
}
audioEngine.prepare()
do {
try audioEngine.start()
} catch {
print("audioEngine couldn't start because of an error.")
}
}
confirm delegate
func speechRecognizer(_ speechRecognizer: SFSpeechRecognizer, availabilityDidChange available: Bool) {
if available {
startRecording()
} else {
//print("not implement")
}
}
If you are searching this for hiding label placeholder, below one will help you.
public func textViewDidChange(_ textView: UITextView) {
if (textView.text?.isEmpty)! {
lblPlaceholder?.isHidden = false
} else {
lblPlaceholder?.isHidden = true
}
}

"matchMedia" support in Dart

How to use window.matchMedia in Dart?
I have found corresponding method:
MediaQueryList matchMedia(String query)
And "MediaQueryList" method:
void addListener(MediaQueryListListener listener)
But: MediaQueryListListener has no constructor and looks like some sort of a generated stub.
I have JS example:
var mq = window.matchMedia( "(min-width: 500px)" );
// media query event handler
if (matchMedia) {
var mq = window.matchMedia("(min-width: 500px)");
mq.addListener(WidthChange);
WidthChange(mq);
}
// media query change
function WidthChange(mq) {
if (mq.matches) {
// window width is at least 500px
}
else {
// window width is less than 500px
}
}
And it has good support http://caniuse.com/#feat=matchmedia
As pointed in a comment it doesn't seem to be implemented in Dart for now.
However you can use dart:js to do that like this :
import 'dart:js';
main() {
if (context['matchMedia'] != null) {
final mq = context.callMethod('matchMedia', ['(min-width: 500px)']);
mq.callMethod('addListener', [widthChange]);
widthChange(mq);
}
}
widthChange(mq) {
if (mq['matches']) {
print('window width is at least 500px');
} else {
print('window width is less than 500px');
}
}

How can you detect the connection and disconnection of external monitors on the Mac?

Do you have any idea how I can detect additional screens being plugged in / unplugged in a Cocoa application?
I want to detect the moment when the user plugs or unplugs another screen to his Mac. How could I do this?
Your answer lies in Quartz.
#include <ApplicationServices/ApplicationServices.h>
CGError CGDisplayRegisterReconfigurationCallback (
CGDisplayReconfigurationCallBack proc,
void *userInfo
);
And then your proc looks like:
MyCGDisplayReconfigurationCallBack(
CGDirectDisplayID display,
CGDisplayChangeSummaryFlags flags,
void *userInfo) {
if (flags & kCGDisplayAddFlag || flags & kCGDisplayRemoveFlag) {
DoStuff(display, flags, userInfo);
}
}
In Swift5:
extension ScreenDetector {
static let callback: CGDisplayReconfigurationCallBack = { (displayId, flags, userInfo) in
guard let opaque = userInfo else {
return
}
let mySelf = Unmanaged<ScreenDetector>.fromOpaque(opaque).takeUnretainedValue()
if flags.contains(.addFlag) {
//Add Display...
}else if flags.contains(.removeFlag) {
//Removed Display...
}
}
func addObervers() {
let userData = Unmanaged<ScreenDetector>.passUnretained(self).toOpaque()
CGDisplayRegisterReconfigurationCallback(ScreenDetector.callback, userData)
}
func removeObservers() {
let userData = Unmanaged<ScreenDetector>.passUnretained(self).toOpaque()
CGDisplayRemoveReconfigurationCallback(ScreenDetector.callback, userData)
}
}
If someone is interested in doing this in Swift 2.3, I scratched my head for a little while to translate #iluvcapra 's code:
let userData = UnsafeMutablePointer<ViewController>(Unmanaged.passUnretained(self).toOpaque()) //use the class name of your "self" for future reference inside the callback
CGDisplayRegisterReconfigurationCallback({ (display: UInt32, flags: CGDisplayChangeSummaryFlags, userInfo: UnsafeMutablePointer<Swift.Void>) in
let mySelf = Unmanaged<ViewController>.fromOpaque(COpaquePointer(userInfo)).takeUnretainedValue() //change here to your class name
if flags.rawValue & CGDisplayChangeSummaryFlags.AddFlag.rawValue > 0 {
//do stuff on connect
mySelf.someFunction()
} else if flags.rawValue & CGDisplayChangeSummaryFlags.RemoveFlag.rawValue > 0 {
//do stuff on disconnect
}
}, userData)

Pause application in QML when app is in background Symbian

I want to know of any pure QML way to find out whether the application is in the background or not and then accordingly stop or play music. In meego the alternate way to do is through the PlatformWindow Element but it does not exist in Symbian QML. Help needed please
Finally I got it working :) and i did it though Qt way... here are the steps
1) Create a class MyEventFilter
class myEventFilter : public QObject
{
bool eventFilter(QObject *obj, QEvent *event) {
switch(event->type()) {
case QEvent::WindowActivate:
emit qmlvisiblechange(true);
qDebug() << "Window activated";
bis_foreground=true;
return true;
case QEvent::WindowDeactivate:
emit qmlvisiblechange(false);
qDebug() << "Window deactivated";
bis_foreground=false;
return true;
default:
return false;
}
}
void dosomething();
private:
int something;
public:
bool bis_foreground;
Q_OBJECT
public slots:
Q_INVOKABLE QString checkvisibility() {
if (bis_foreground==true) return "true";
else return "false";
}
signals:
void qmlvisiblechange(bool is_foreground);
};
2) Then in main.cpp include this file include the class and add setContext propery like this
context->setContextProperty("myqmlobject", &ef);
3) in qml file call it like this:
Item {
id: name
Connections
{
target:myqmlobject
onQmlvisiblechange:
{
if(is_foreground)
{
//dont do anything...
}
else
{
playSound.stop()
}
}
}
}
Enjoy :)
Why do you need a pure QML way?
You can detect if an application has been sent to the background by installing an event filter.
Check: http://www.developer.nokia.com/Community/Wiki/Detecting_when_a_Qt_application_has_been_switched_to_the_background_and_when_resumed
For a "pure" QML way, there is the Symbian QML element:
http://doc.qt.nokia.com/qt-components-symbian/qml-symbian.html
It has a foreground property that indicates whether the app is in the foreground or in the background. You can try connecting to onForegroundChanged.
From the documentation, the Symbian element is not "creatable". It exists as a context property named symbian. So a sample usage would be:
import QtQuick 1.1
import com.nokia.symbian 1.1
PageStackWindow {
id: window
initialPage: MainPage {tools: toolBarLayout}
showStatusBar: true
showToolBar: true
function appForegroundChanged() {
console.log("Foreground: " + symbian.foreground)
}
function appCurrentTimeChanged() {
console.log("Current time: " + symbian.currentTime)
}
Component.onCompleted: {
symbian.currentTimeChanged.connect(appCurrentTimeChanged)
symbian.foregroundChanged.connect(appForegroundChanged)
}
ToolBarLayout {
id: toolBarLayout
ToolButton {
flat: true
iconSource: "toolbar-back"
onClicked: window.pageStack.depth <= 1 ? Qt.quit() : window.pageStack.pop()
}
}
}