Videostreaming on Playbook - air

I'm trying to play a video via RTSP using Adobe Air, I have the permissions:
<action>access_internet</action>
<action>set_audio_volume</action>
<action>access_shared</action>
<action>play_audio</action>
but when I play (), it does nothing.
It's possible to do RTSP Streaming on Playbook? If not, it's possible any other kind of videostreaming? webworks? html5 video tag?
The native Youtube App preinstaled do streaming video.
Thanks in advance
here the sample code
package{
import flash.display.Sprite;
import flash.filesystem.File;
import flash.events.Event;
import qnx.events.MediaPlayerEvent;
import qnx.media.MediaPlayer;
import qnx.media.VideoDisplay;
import qnx.ui.events.MediaControlEvent;
import qnx.ui.media.*;
import qnx.dialog.AlertDialog;
import qnx.dialog.DialogSize;
/**
* ...
* #author Fernando Franco Giraldez
*/
// The following metadata specifies the size and properties of the canvas that
// this application should occupy on the BlackBerry PlayBook screen.
[SWF(width="1024", height="600", backgroundColor="#cccccc", frameRate="30")]
public class Main extends Sprite
{
private var _myPlayer:MediaPlayer;
private var _myVD:VideoDisplay;
private var _myMediaControl:MediaControl;
private var alert:AlertDialog;
public function Main()
{
try {
initializeUI();
initializePlayer();
}catch (ex:Error) {
showAlertDialog("Initialize error", ex.name + " - " + ex.message + "\n"+ex.getStackTrace());
}
}
private function initializePlayer():void
{
_myVD = new VideoDisplay;
_myVD.setPosition(1024/2 - 800/2, 600/2 - 480/2);
_myVD.setSize(800, 480);
_myVD.backgroundColor = 0xFFFFFF;
addChild(_myVD);
_myPlayer = new MediaPlayer();
_myPlayer.addEventListener(MediaPlayerEvent.INFO_CHANGE, infoChange);
var file:File = File.userDirectory.resolvePath("shared/videos/Wildlife.wmv");
//_myPlayer.url = file.nativePath; //using this i can see the video
_myPlayer.url = "http://devimages.apple.com/iphone/samples/bipbop/bipbopall.m3u8";
//_myPlayer.url = "rtsp://stream.the.sk/live/joj/joj-hm.3gp"
// but using any of internet urls i can`t see anything
_myPlayer.videoDisplay = _myVD;
_myPlayer.prepare();
showAlertDialog("Preparando video", "acabo de meteer: " + _myPlayer.url);
}
private function initializeUI():void
{
_myMediaControl = new MediaControl();
_myMediaControl.width = 900;
_myMediaControl.x = Math.round((stage.stageWidth - _myMediaControl.width) / 2);
_myMediaControl.y = stage.stageHeight - _myMediaControl.height;
_myMediaControl.setOption( MediaControlOption.VOLUME, true );
_myMediaControl.setOption( MediaControlOption.PLAY_PAUSE, true );
_myMediaControl.setOption( MediaControlOption.NEXT, true );
_myMediaControl.setOption( MediaControlOption.PREVIOUS, true );
_myMediaControl.setOption( MediaControlOption.STOP, true );
_myMediaControl.setOption( MediaControlOption.SEEKBAR, true );
_myMediaControl.setOption( MediaControlOption.DURATION, true );
_myMediaControl.setOption( MediaControlOption.POSITION, true );
_myMediaControl.setOption( MediaControlOption.BACKGROUND, true);
_myMediaControl.setProperty( MediaControlProperty.VOLUME, 80 );
_myMediaControl.addEventListener( MediaControlEvent.STATE_CHANGE, mediaControlStateChange );
_myMediaControl.addEventListener( MediaControlEvent.PROPERTY_CHANGE, mediaControlPropChange );
addChild(_myMediaControl);
}
private function infoChange(event:MediaPlayerEvent):void {
if (event.what.position) {
_myMediaControl.setProperty(MediaControlProperty.POSITION, _myPlayer.position);
}
if (event.what.duration) {
_myMediaControl.setProperty(MediaControlProperty.DURATION, _myPlayer.duration);
}
if (event.what.state) {
_myMediaControl.setState(_myPlayer.isPlaying ? MediaControlState.PLAY : MediaControlState.PAUSE);
}
}
private function mediaControlStateChange(mediaControlEvent:MediaControlEvent):void
{
var state:String = _myMediaControl.getState();
switch( state )
{
case MediaControlState.PLAY:
if (!_myPlayer.isPlaying)
{
try {
_myPlayer.play();
showAlertDialog("Play", "play detectado");
}catch (ex:Error) {
showAlertDialog("play() error", ex.name + " - " + ex.message + "\n"+ex.getStackTrace());
}
}
else
{
_myPlayer.speed = 1000;
}
break;
case MediaControlState.PAUSE:
_myPlayer.pause();
break;
case MediaControlState.STOP:
_myPlayer.stop();
break;
case MediaControlState.SEEK_START:
_myPlayer.pause();
break;
case MediaControlState.SEEK_END:
_myPlayer.play();
break;
default:
break;
}
}
private function mediaControlPropChange(event:MediaControlEvent):void {
switch (event.property) {
case MediaControlProperty.POSITION:
{
_myPlayer.seek(uint( _myMediaControl.getProperty(MediaControlProperty.POSITION)));
}
break;
case MediaControlProperty.DURATION:
break;
case MediaControlProperty.FULLSCREEN:
break;
case MediaControlProperty.VOLUME:
break;
default:
break;
}
}
private function showAlertDialog(title:String,message:String):void
{
alert = new AlertDialog();
alert.title = title;
alert.message = message;
alert.addButton("OK");
alert.addButton("CANCEL");
alert.dialogSize= DialogSize.SIZE_MEDIUM;
alert.addEventListener(Event.SELECT, alertButtonClicked);
alert.show();
}
private function alertButtonClicked(event:Event):void
{
trace("Button Clicked Index: " + event.target.selectedIndex);
trace("Button properties Object"+event.target.getItemAt(event.target.selectedIndex));
}
}
}

It seems that RTSP wasnt supported in 6/2011 (a year ago). From the release notes on the updates of the NDK. The native Youtube App may not be using RSTP for streaming (there are other ways to stream video). Check out this link for more information.
But on the Webworks side, Yes. The Html5 video tag is fully implemented. You can find out more information here: https://bdsc.webapps.blackberry.com/html5/apis/HTMLVideoElement.html it will use the built in video player. Just note that this will not work on BB10.
Update: The HTML5 video tag is now also supported on the BB10 platform.

Related

Adjust Brightness, Contrast using Camera.Parameters

I were trying to make a camera application, I'm unable to find a way to change camera brightness, contrast using Camera.Parameters
So my question is how to add Brightness and contrast feature to increase/decrease brightness/contrast. For example if I increase the seekbarit increase the brightness. if I decrease the seekbar it decrease the brightness.
Please edit my code or put your seprate answer to help me.
package com.example.beautymaker;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.hardware.Camera;
import android.hardware.camera2.CameraCharacteristics;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.zomato.photofilters.imageprocessors.Filter;
import com.zomato.photofilters.imageprocessors.subfilters.BrightnessSubFilter;
import java.io.IOException;
import java.util.List;
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraPreview(Context context, Camera camera) {
super(context);
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(this);
/*Camera.Parameters params = camera.getParameters();
final int[] previewFpsRange = new int[2];
params.getPreviewFpsRange(previewFpsRange);
if (previewFpsRange[0] == previewFpsRange[1]) {
final List<int[]> supportedFpsRanges = params.getSupportedPreviewFpsRange();
for (int[] range : supportedFpsRanges) {
if (range[0] != range[1]) {
params.setPreviewFpsRange(range[0], range[1]);
break;
}
}
}*/
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
try {
// create the surface and start camera preview
if (mCamera == null) {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
}
} catch (IOException e) {
Log.d(VIEW_LOG_TAG, "Error setting camera preview: " + e.getMessage());
}
}
public void refreshCamera(Camera camera) {
if (mHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
setCamera;
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
} catch (Exception e) {
Log.d(VIEW_LOG_TAG, "Error starting camera preview: " + e.getMessage());
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
refreshCamera(mCamera);
}
public void setCamera(Camera camera) {
//method to set a camera instance
mCamera = camera;
Camera.Parameters parameters = mCamera.getParameters();
// parameters.setPreviewFpsRange(1500,3000);
parameters.setExposureCompensation(parameters.getMaxExposureCompensation());
if(parameters.isAutoExposureLockSupported())
{
parameters.setAutoExposureLock(false);
}
// parameters.setColorEffect(Camera.Parameters.WHITE_BALANCE_INCANDESCENT);
parameters.getAutoExposureLock();
parameters.set("iso",50);
// parameters.setWhiteBalance();
parameters.setAutoWhiteBalanceLock(true);
parameters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_SHADE);
/*Filter filter = new Filter();
filter.addSubFilter(new BrightnessSubFilter(parameters));*/
mCamera.setParameters(parameters);
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
// mCamera.release();
}
//for brightness
public static Bitmap doBrightness(Bitmap src, int value) {
// image size
int width = src.getWidth();
int height = src.getHeight();
// create output bitmap
Bitmap bmOut = Bitmap.createBitmap(width, height, src.getConfig());
// color information
int A, R, G, B;
int pixel;
// scan through all pixels
for(int x = 0; x < width; ++x) {
for(int y = 0; y < height; ++y) {
// get pixel color
pixel = src.getPixel(x, y);
A = Color.alpha(pixel);
R = Color.red(pixel);
G = Color.green(pixel);
B = Color.blue(pixel);
// increase/decrease each channel
R += value;
if(R > 255) { R = 255; }
else if(R < 0) { R = 0; }
G += value;
if(G > 255) { G = 255; }
else if(G < 0) { G = 0; }
B += value;
if(B > 255) { B = 255; }
else if(B < 0) { B = 0; }
// apply new pixel color to output bitmap
bmOut.setPixel(x, y, Color.argb(A, R, G, B));
}
}
// return final image
return bmOut;
}
}
there is no method in Camera.Parameters to achieve this. You have to read this documentation for this class to check the available parameters and this class is deprecated in API 21 and above.

Treeviews QueueDraw doesn't render current row?

I'm working with a treeview, which contains several columns, also one displaying a pixbuf, if audio is playing or paused. If the user double clicks on one row, audio playback starts and the row needs to be rerendered in order to display the pixbuf icon. I used QueueDraw for this, but that does only work, if the cursor leaves the current row. How can I update the pixbuf directly?
CODE:
protected void trvMainCuesheetRowActivated (object o, RowActivatedArgs args)
{
log.debug("trvMainCuesheetRowActivated called");
TreeIter ti = TreeIter.Zero;
this.lsCuesheetData.GetIter(out ti,args.Path);
if (this.lsCuesheetData.GetValue(ti,0) != null)
{
Track tCurTrack = (Track)this.lsCuesheetData.GetValue(ti,0);
if (this.objProgram.getAudioManager().getPlayState() == AudioCuesheetEditor.AudioBackend.PlayState.Stopped)
{
this.objProgram.getAudioManager().play(tCurTrack);
this.refresh();
}
else
{
if (this.objProgram.getAudioManager().getPlayState() == AudioCuesheetEditor.AudioBackend.PlayState.Playing)
{
this.objProgram.getAudioManager().seek(tCurTrack);
this.refresh();
}
}
}
}
private void renderPlaying(TreeViewColumn _tvcColumn, CellRenderer _crCell, TreeModel _tmModel, TreeIter _tiIter)
{
Track tCurTrack = (Track)_tmModel.GetValue (_tiIter, 0);
//Just display an icon, if we are playing
if (this.objProgram.getAudioManager().getPlayState() == AudioCuesheetEditor.AudioBackend.PlayState.Playing)
{
if (this.objProgram.getAudioManager().getCurrentlyPlayingTrack() == tCurTrack)
{
Gdk.Pixbuf icon = this.RenderIcon(Stock.MediaPlay, IconSize.SmallToolbar, null);
(_crCell as CellRendererPixbuf).Pixbuf = icon;
}
else
{
(_crCell as CellRendererPixbuf).Pixbuf = null;
}
}
else
{
if (this.objProgram.getAudioManager().getPlayState() == AudioCuesheetEditor.AudioBackend.PlayState.Paused)
{
if (this.objProgram.getAudioManager().getCurrentlyPlayingTrack() == tCurTrack)
{
Gdk.Pixbuf icon = this.RenderIcon(Stock.MediaPause, IconSize.SmallToolbar, null);
(_crCell as CellRendererPixbuf).Pixbuf = icon;
}
else
{
(_crCell as CellRendererPixbuf).Pixbuf = null;
}
}
else
{
(_crCell as CellRendererPixbuf).Pixbuf = null;
}
}
}
//Purpose: Function used to refresh the MainWindow depending on new options set.
public void refresh()
{
//QueueDraw is needed since it fires a signal to cellrenderers to update
this.trvMainCuesheet.QueueDraw();
this.sbMainWindow.Visible = this.objProgram.getObjOption().getBShowStatusbar();
this.mwToolbar.Visible = this.objProgram.getObjOption().getBToolbarVisible();
}
Greetings
Sven
Found the error myself.
this.objProgram.getAudioManager().getCurrentlyPlayingTrack()
didn't always return a track, where I expected one, so the renderer worked right. Bug is fixed, thanks anyway ;).

ActionScript 3 coding error

I need a little bit of help with some code issue. I am trying to connect to the server from a click of a button. When I execute the code I receive this error in my output: 1120: Access of undefined property nc.close
Which is strange because I already declared the nc property in the btnStart function. I have been tweaking around with the code but I keep getting the same annoying error. If you have any suggestions how to resolve this that be great!
Thanks
Code is below:
btn_One.addEventListener(MouseEvent.CLICK, btnStart);
btn_Two.addEventListener(MouseEvent.CLICK, btnClose);
function btnStart(event:MouseEvent):void{
trace("Connecting...");
var nc:NetConnection = new NetConnection();
nc.addEventListener(NetStatusEvent.NET_STATUS, netStatusHandler);
nc.connect("rtmfp://localhost/streamingLive");
}
//// ERROR LINE NC.CLOSE();
function btnClose(event:MouseEvent):void{
trace("Closing time");
nc.close();
}
function netStatusHandler(event:NetStatusEvent):void{
switch(event.info.code){
case "NetConnection.Connect.Success":
trace("Awesome connection");
break;
case "NetConnection.Connect.Failed":
trace("Unable to connect");
break;
case "NetConnection.Connect.Rejected":
trace("WHoooops");
break;
case "NetGroup.Connect.Success":
trace("GroupConnection");
break;
case "NetGroup.Connect.Failed":
trace("Group failed");
break;
case "NetGroup.Connect.Rejected":
trace("Ouch!!!");
break;
var ns:NetStream = new NetStream();
ns.publish("live", "streaming");
ns.attachCamera();
ns.attachAudio();
ns.connect(nc);
var ng:NetGroup = new NetGroup(nc, groupspec.groupspecWithAuthorizations());
ng.addEventListener(NetStatusEvent.NET_STATUS, netStatusHandler);
}
}
var cam:Camera = Camera.getCamera();
cam.setMode(420, 320, 15);
cam.setQuality(0, 85);
cam.addEventListener(StatusEvent.STATUS, statusHandler);
var vid:Video = new Video();
vid.width = cam.width;
vid.height = cam.height;
vid.x = 100;
vid.y = 100;
vid.attachCamera(cam);
addChild(vid);
var mic:Microphone = Microphone.getMicrophone();
mic.gain = 50;
mic.framesPerPacket = 1;
mic.setSilenceLevel(0, 2000);
mic.codec = SoundCodec.SPEEX;
//camera access permissions
function statusHandler(event:StatusEvent):void
{
switch (event.code)
{
case "Camera.Muted":
trace("User clicked Deny.");
break;
case "Camera.Unmuted":
trace("User clicked Accept.");
break;
}
}
//audio access permissions
function micStatus(event:StatusEvent):void
{
if (event.code == "Microphone.Unmuted")
{
trace("Microphone access was allowed.");
}
else if (event.code == "Microphone.Muted")
{
trace("Microphone access was denied.");
}
}
//array of camera names
var cameraA:Array = Camera.names;
for ( var i : int = 0; i < cameraA.length; i++){
trace ( "Camera: ", cameraA[i] );
}
var groupspec:GroupSpecifier = new GroupSpecifier("groupone");
groupspec.multicastEnabled = true;
//group postings
groupspec.postingEnabled = true;
//specific peer posting
groupspec.routingEnabled = true;
//automatic peer discovery
groupspec.serverChannelEnabled = true;
nc is declared inside one function, but you're trying to use it inside of another function. Sometimes you can just pass the variable around as an argument, but in this case, that won't really work out too well. So in this case, do the following:
var nc:NetConnection = new NetConnection();
function btnStart(event:MouseEvent):void{
trace("Connecting...");
// var nc:NetConnection = new NetConnection(); // removed
nc.addEventListener(NetStatusEvent.NET_STATUS, netStatusHandler);
nc.connect("rtmfp://localhost/streamingLive");
}
//// ERROR LINE NC.CLOSE();
function btnClose(event:MouseEvent):void{
trace("Closing time");
nc.close();
}

How to write a custom FindElement routine in Selenium?

I'm trying to figure out how to write a custom FindElement routine in Selenium 2.0 WebDriver. The idea would be something like this:
driver.FindElement(By.Method( (ISearchContext) => {
/* examine search context logic here... */ }));
The anonymous method would examine the ISearchContext and return True if it matches; False otherwise.
I'm digging through the Selenium code, and getting a bit lost. It looks like the actual By.* logic is carried out server-side, not client side. That seems to be complicating matters.
Any suggestions?
I do a multi-staged search. I have a method that performs a try catch and then a method that gets the element. In theory you could do a try catch until instead of this way but I like this way better because of my setup.
public bool CheckUntil(IWebDriver driver, string selectorType, string selectorInfo)
{
int Timer = 160;
bool itemFound = false;
for (int i = 0; i < Timer; i++)
if(itemFound)
{
i = 0
}
else
{
Thread.Sleep(500);
if(selectorType.ToLower() == "id" && TryCatch(driver, selectorType, selectorInfo))
{
if(driver.FindElement(By.Id(selectorInfo).Displayed)
{
itemFound = true;
}
}
else if(selectorType.ToLower() == "tagname" && TryCatch(driver, selectorType, selectorInfo))
{
if(driver.FindElement(By.TagName(selectorInfo).Displayed)
{
itemFound = true;
}
}
}
return itemFound;
}
Here's my try catch method you can add as many different types as you want id, cssselector, xpath, tagname, classname, etc.
public bool TryCatch(IWebDriver driver, string selectorType, string selectorInfo)
{
bool ElementFound = false;
try
{
switch(selectorType)
{
case "id":
driver.FindElement(By.Id(selectorInfo);
break;
case "tagname":
driver.FindElement(By.TagName(selectorInfo);
break;
}
ElementFound = truel
}
catch
{
ElementFound = false;
}
return ElementFound;
}
Ok, I figured out how to do this. I'm leveraging driver.ExecuteScript() to run custom js on the webdriver. It looks a bit like this:
function elementFound(elem) {
var nodeType = navigator.appName == ""Microsoft Internet
Explorer"" ? document.ELEMENT_NODE : Node.ELEMENT_NODE;
if(elem.nodeType == nodeType)
{
/* Element identification logic here */
}
else { return false; }
}
function traverseElement(elem) {
if (elementFound(elem) == true) {
return elem;
}
else {
for (var i = 0; i < elem.childNodes.length; i++) {
var ret = traverseElement(elem.childNodes[i]);
if(ret != null) { return ret; }
}
}
}
return traverseElement(document);

air process adt flex

I have two air applications and installed them in desktop and executed them and two air processes are listed in taskbar manager.Now how can I execute some method of one air application from another air application?
Use LocalConnection.
You can Host a Connection in one AIR application and connect from the the other AIR guy... From there - you can call methods.
BEWARE: LocalConnection can be a little tricky and odd (for example, connections are global and the names can't overlap).
From the Example Doc listed above....
// Code in LocalConnectionSenderExample.as
package {
import flash.display.Sprite;
import flash.events.MouseEvent;
import flash.net.LocalConnection;
import flash.text.TextField;
import flash.text.TextFieldType;
import flash.events.StatusEvent;
import flash.text.TextFieldAutoSize;
public class LocalConnectionSenderExample extends Sprite {
private var conn:LocalConnection;
// UI elements
private var messageLabel:TextField;
private var message:TextField;
private var sendBtn:Sprite;
public function LocalConnectionSenderExample() {
buildUI();
sendBtn.addEventListener(MouseEvent.CLICK, sendMessage);
conn = new LocalConnection();
conn.addEventListener(StatusEvent.STATUS, onStatus);
}
private function sendMessage(event:MouseEvent):void {
conn.send("myConnection", "lcHandler", message.text);
}
private function onStatus(event:StatusEvent):void {
switch (event.level) {
case "status":
trace("LocalConnection.send() succeeded");
break;
case "error":
trace("LocalConnection.send() failed");
break;
}
}
private function buildUI():void {
const hPadding:uint = 5;
// messageLabel
messageLabel = new TextField();
messageLabel.x = 10;
messageLabel.y = 10;
messageLabel.text = "Text to send:";
messageLabel.autoSize = TextFieldAutoSize.LEFT;
addChild(messageLabel);
// message
message = new TextField();
message.x = messageLabel.x + messageLabel.width + hPadding;
message.y = 10;
message.width = 120;
message.height = 20;
message.background = true;
message.border = true;
message.type = TextFieldType.INPUT;
addChild(message);
// sendBtn
sendBtn = new Sprite();
sendBtn.x = message.x + message.width + hPadding;
sendBtn.y = 10;
var sendLbl:TextField = new TextField();
sendLbl.x = 1 + hPadding;
sendLbl.y = 1;
sendLbl.selectable = false;
sendLbl.autoSize = TextFieldAutoSize.LEFT;
sendLbl.text = "Send";
sendBtn.addChild(sendLbl);
sendBtn.graphics.lineStyle(1);
sendBtn.graphics.beginFill(0xcccccc);
sendBtn.graphics.drawRoundRect(0, 0,
(sendLbl.width + 2 + hPadding + hPadding), (sendLbl.height + 2), 5, 5);
sendBtn.graphics.endFill();
addChild(sendBtn);
}
}
}
// Code in LocalConnectionReceiverExample.as
package {
import flash.display.Sprite;
import flash.net.LocalConnection;
import flash.text.TextField;
public class LocalConnectionReceiverExample extends Sprite {
private var conn:LocalConnection;
private var output:TextField;
public function LocalConnectionReceiverExample() {
buildUI();
conn = new LocalConnection();
conn.client = this;
try {
conn.connect("myConnection");
} catch (error:ArgumentError) {
trace("Can't connect...the connection name is already
being used by another SWF");
}
}
public function lcHandler(msg:String):void {
output.appendText(msg + "\n");
}
private function buildUI():void {
output = new TextField();
output.background = true;
output.border = true;
output.wordWrap = true;
addChild(output);
}
}
}