I am new to UCMA and I am learning as I go through examples. I am trying to build 2 Lync clients A and B with the scenario as follows,
A calls B
B answers
A plays audio
B records it using Recorder.
I am stuck at trying to record the call at B. For B its an incoming call. I need to attach the audiovideoflow to the recorder, but I am not sure on how to do it. I will appreciate any help.
Apologies on the unformatted code, I am not sure how to format it properly, I tried.
Thanks.
Kris
Client B Code:
Accepts an incoming call
Records the media received in the incoming call. ***This is the part I have trouble
using System;
using System.Threading;
using Microsoft.Rtc.Collaboration;
using Microsoft.Rtc.Collaboration.AudioVideo;
using Microsoft.Rtc.Signaling;
using Microsoft.Rtc.Collaboration.Lync;
namespace Microsoft.Rtc.Collaboration.LyncUAS
{
public class LyncUAS
{
#region Locals
private LyncUASConfigurationHelper _helper;
private UserEndpoint _userEndpoint;
private AudioVideoCall _audioVideoCall;
private AudioVideoFlow _audioVideoFlow;
private Conversation _incomingConversation;
//Wait handles are only present to keep things synchronous and easy to read.
private AutoResetEvent _autoResetEvent = new AutoResetEvent(false);
private EventHandler<AudioVideoFlowConfigurationRequestedEventArgs> _audioVideoFlowConfigurationRequestedEventHandler;
private EventHandler<MediaFlowStateChangedEventArgs> _audioVideoFlowStateChangedEventHandler;
private AutoResetEvent _waitForAudioVideoCallEstablishCompleted = new AutoResetEvent(false);
private AutoResetEvent _waitForAudioVideoFlowStateChangedToActiveCompleted = new AutoResetEvent(false);
private AutoResetEvent _waitForPrepareSourceCompleted = new AutoResetEvent(false);
#endregion
#region Methods
/// <summary>
/// Instantiate and run the DeclineIncomingCall quickstart.
/// </summary>
/// <param name="args">unused</param>
public static void Main(string[] args)
{
LyncUAS lyncUAS = new LyncUAS();
lyncUAS.Run();
}
private void Run()
{
string filename = "received.wma";
_helper = new LyncUASConfigurationHelper();
// Create a user endpoint, using the network credential object
// defined above.
_userEndpoint = _helper.CreateEstablishedUserEndpoint("Lync UAS" /*endpointFriendlyName*/);
_userEndpoint.RegisterForIncomingCall<AudioVideoCall>(On_AudioVideoCall_Received);
Console.WriteLine("Waiting for incoming call...");
_autoResetEvent.WaitOne();
Console.WriteLine("came after call is connected");
//start recording for audio.
Recorder recorder = new Recorder();
recorder.StateChanged += new EventHandler<RecorderStateChangedEventArgs>(recorder_StateChanged);
recorder.VoiceActivityChanged += new EventHandler<VoiceActivityChangedEventArgs>(recorder_VoiceActivityChanged);
//**********This is the issue, currently _audioVideoFlow is null, it is not attached to the flow
//So this will fail, how to attach _audioVideoFlow to an incoming call ?? HELP !!!
// recorder.AttachFlow(_audioVideoFlow); ------------> HELP!
WmaFileSink sink = new WmaFileSink(filename);
recorder.SetSink(sink);
recorder.Start();
Console.WriteLine("Started Recording ...");
_autoResetEvent.WaitOne();
recorder.Stop();
Console.WriteLine("Stopped Recording ...");
recorder.DetachFlow();
Console.WriteLine("Exiting");
Thread.Sleep(2000);
}
private void audioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
{
Console.WriteLine("Flow state changed from " + e.PreviousState + " to " + e.State);
//When flow is active, media operations can begin
if (e.State == MediaFlowState.Active)
{
// Flow-related media operations normally begin here.
_waitForAudioVideoFlowStateChangedToActiveCompleted.Set();
}
// call sample event handler
if (_audioVideoFlowStateChangedEventHandler != null)
{
_audioVideoFlowStateChangedEventHandler(sender, e);
}
}
void recorder_VoiceActivityChanged(object sender, VoiceActivityChangedEventArgs e)
{
Console.WriteLine("Recorder detected " + (e.IsVoice ? "voice" : "silence") + " at " + e.TimeStamp);
}
void recorder_StateChanged(object sender, RecorderStateChangedEventArgs e)
{
Console.WriteLine("Recorder state changed from " + e.PreviousState + " to " + e.State);
}
void On_AudioVideoCall_Received(object sender, CallReceivedEventArgs<AudioVideoCall> e)
{
//Type checking was done by the platform; no risk of this being any
// type other than the type expected.
_audioVideoCall = e.Call;
// Call: StateChanged: Only hooked up for logging, to show the call
// state transitions.
_audioVideoCall.StateChanged += new
EventHandler<CallStateChangedEventArgs>(_audioVideoCall_StateChanged);
_incomingConversation = new Conversation(_userEndpoint);
Console.WriteLine("Call Received! From: " + e.RemoteParticipant.Uri + " Toast is: " +e.ToastMessage.Message);
_audioVideoCall.BeginAccept(
ar =>
{
try {
_audioVideoCall.EndAccept(ar);
Console.WriteLine("Call must be connected at this point. "+_audioVideoCall.State);
_autoResetEvent.Set();
} catch (RealTimeException ex) { Console.WriteLine(ex); }
}, null);
}
//Just to record the state transitions in the console.
void _audioVideoCall_StateChanged(object sender, CallStateChangedEventArgs e)
{
Console.WriteLine("Call has changed state. The previous call state was: " + e.PreviousState +
" and the current state is: " + e.State);
if (e.State == CallState.Terminated)
{
Console.WriteLine("Shutting down");
_autoResetEvent.Set();
_helper.ShutdownPlatform();
}
}
#endregion
}
}
I think I have figured out what's not quite right here.
Your Code
// Create a user endpoint, using the network credential object
// defined above.
_userEndpoint = _helper.CreateEstablishedUserEndpoint("Lync UAS" /*endpointFriendlyName*/);
_userEndpoint.RegisterForIncomingCall<AudioVideoCall>(On_AudioVideoCall_Received);
Console.WriteLine("Waiting for incoming call...");
_autoResetEvent.WaitOne();
Console.WriteLine("came after call is connected");
//start recording for audio.
Recorder recorder = new Recorder();
recorder.StateChanged += new EventHandler<RecorderStateChangedEventArgs>(recorder_StateChanged);
recorder.VoiceActivityChanged += new EventHandler<VoiceActivityChangedEventArgs>(recorder_VoiceActivityChanged);
//**********This is the issue, currently _audioVideoFlow is null, it is not attached to the flow //So this will fail, how to attach _audioVideoFlow to an incoming call ?? HELP !!!
// recorder.AttachFlow(_audioVideoFlow); ------------> HELP!
Looking good so far. I'm assuming you're establishing and such in your CreateEstablishedUserEndpoint method, but I'm not seeing where you're getting the value for _audioVideoFlow.
I'm guessing you might be doing it elsewhere, but on the off chance that's actually where you're running into problems, here's that bit:
Simplest pattern to get AVFlow
public static void RegisterForIncomingCall(LocalEndpoint localEndpoint)
{
localEndpoint.RegisterForIncomingCall
<AudioVideoCall>(IncomingCallDelegate);
}
private static void IncomingCallDelegate(object sender, CallReceivedEventArgs<AudioVideoCall> e)
{
e.Call.AudioVideoFlowConfigurationRequested += IncomingCallOnAudioVideoFlowConfigurationRequested;
}
private static void IncomingCallOnAudioVideoFlowConfigurationRequested(object sender, AudioVideoFlowConfigurationRequestedEventArgs e)
{
AudioVideoFlow audioVideoFlow = e.Flow; // <--- There's your flow, gentleman.
}
Now, instead of registering for your incoming call, just call RegisterForIncomingCall(_userEndpoint);.
Your AVFlow will be hanging off e.Flow above, you could then pass that into your recorder: recorder.AttachFlow(e.Flow) or simply assign the flow to a field in your class and autoResetEvent.WaitOne(); and set that up where you're setting that up now.
Obviously this is a pretty naive implementation. A lot can go wrong in those few lines of code (exception handling/static event handler memory leak comes immediately to mind); don't forget to wire up events related to status changes on the conversation/call and endpoints, as well as any of the recovery related items.
Related
Windows 10-64.
I coded a small VB application last fall, to drive a "PR-705" serial device from the RS-232 port.
It worked flawlessly. Now, whenever I fire that code, the PR-705 is not responding so nicely?
I originally used a FTDI-based USB to RS232 adapter (Thunderlinx 1000) which worked perfect. I figured, the problem might have to do with the "driver"? So I added a PCI-Express serial port card from StarTech to my PC. The result: no improvement - same erratic behavior? Out of curiosity, this morning, I fired my trusty Windows HyperTerminal and it worked flawlessly (out of the PCI-e port)! So I know the trouble does not come from the device itself (sigh!) but somewhere in the chain of communications.
The problem I run into is as follows (hopefully, you can detect a "pattern"):
a) Open the serial port
b) Send a command to the port, in an ASCII string (e.g. "M1")
<- Device responds by sending data back
In order to obtain ALL data from device, however, I have to send additional commands:
c) "D2"
d) "D3"
e) "D4"
f) "D5"
When I run these commands from HyperTerminal, I get stellar behavior.
In my application, however, the additional commands ("D2", "D3", "D4" and "D5") are not getting THROUGH to the device, somehow? When I send "D2", for example, I get "XYZ" data back, which is OK.
But when I follow-up with "D3", I still get the same "XYZ" data back?
It is as though the new command ("D3") never reached the device?
My (ugly) workaround is to resend the same command over and THEN I get the new data (most of the time).
Here is some of my code:
Dim myPort As New IO.Ports.SerialPort("COM2") With {
.BaudRate = 9600,
.DataBits = 8,
.StopBits = IO.Ports.StopBits.One,
.Parity = IO.Ports.Parity.None,
.Handshake = IO.Ports.Handshake.RequestToSend,
.RtsEnable = True,
.DtrEnable = True,
.ReadTimeout = 1000000,
.WriteTimeout = 10000
}
myPort.Open()
myPort.WriteLine("M1")
myPort.WriteLine("D2")
Incoming = myPort.ReadLine()
myPort.WriteLine("D2")
Incoming = myPort.ReadLine()
myPort.WriteLine("D3")
Incoming = myPort.ReadLine()
myPort.WriteLine("D4")
Incoming = myPort.ReadLine()
myPort.WriteLine("D5")
Incoming = myPort.ReadLine()
I know the TimeOut value is "extreme" but the device can take up to 15 minutes to reply, depending on the circumstances. I can't close the port before the device responds back otherwise I'll get an error, something to do with CTS, and then device automatically reboots -- not ideal. I'm not a RS-232 expert but I do the best I can with the little knowledge I have.
Found a partial answer to my serial port problem but this solution is not without its own problems. I found some c# code I adapted where the serial port "Received" is separated into its own "event". This works very well. As you'll see in the code below, it does the job. But now I'm facing the question of how to go about "successively" polling the port to get all the data returned by the instrument?
Here's the c# code first:
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using System.IO.Ports;
using System.Threading;
namespace WindowsApplication14
{
public partial class Form1 : Form
{
string Operation, MesureM1, MesureD2, MesureD3, MesureD4;
SerialPort _serialPort;
// delegate is used to write to a UI control from a non-UI thread
private delegate void SetTextDeleg(string text);
public Form1()
{
InitializeComponent();
}
private void btnStart_Click(object sender, EventArgs e)
{
try
{
if (!_serialPort.IsOpen)
_serialPort.Open();
_serialPort.Write("PR705\r\n");
}
catch (Exception ex)
{
MessageBox.Show("Error opening serial port :: " + ex.Message, "Error!");
}
}
private void Form1_Load(object sender, EventArgs e)
{
_serialPort = new SerialPort("COM2", 9600, Parity.None, 8, StopBits.One);
_serialPort.Handshake = Handshake.RequestToSend;
_serialPort.RtsEnable = true;
_serialPort.DtrEnable = true;
_serialPort.DataReceived += new SerialDataReceivedEventHandler(sp_DataReceived);
_serialPort.ReadTimeout = 500;
_serialPort.WriteTimeout = 500;
_serialPort.Open();
_serialPort.Write("PR705\r\n");
}
private void btnM1_Click(object sender, EventArgs e)
{
Operation = "M1";
_serialPort.Write("M1\r\n"); // 0000,111,3.896e+002,0.3371,0.3563
Thread.Sleep(500);
}
private void btnD2_Click(object sender, EventArgs e)
{
Operation = "D2";
_serialPort.Write("D2\r\n");
Thread.Sleep(500);
}
private void btnD3_Click(object sender, EventArgs e)
{
Operation = "D3";
_serialPort.Write("D3\r\n");
Thread.Sleep(500);
}
private void btnD4_Click(object sender, EventArgs e)
{
Operation = "D4";
_serialPort.Write("D4\r\n");
Thread.Sleep(500);
}
void sp_DataReceived(object sender, SerialDataReceivedEventArgs e)
{
Thread.Sleep(500);
string data = _serialPort.ReadLine();
this.BeginInvoke(new SetTextDeleg(si_DataReceived), new object[] { data });
}
private void si_DataReceived(string data)
{
String Result = data.Trim();
textBox1.Text = Result;
switch (Operation)
{
case "M1":
MesureM1= Result; // MesureBase = "0000,111,3.884e+002,0.3377,0.3570"
break;
case "D2":
MesureD2 = Result; // MesureD2 = "0000,111,3.674e+002,3.884e+002,3.322e+002"
break;
case "D3":
MesureD3 = Result; // MesureD3 = "0000,111,3.884e+002,0.2044,0.4862"
break;
case "D4":
MesureD4 = Result; //
break;
default:
break;
}
}
}
}
In my main form, I have four buttons and a textbox.
Button M1 is used to take a measurement (send "M1" command to the instrument).
Buttons D2, D3 and D4 are used to retrieve data from the instrument.
In my old VB application, I used code like this, to successively retrieve data from the instrument:
myPort.WriteLine("M1")
myPort.WriteLine("D2")
Incoming = myPort.ReadLine()
myPort.WriteLine("D3")
Incoming = myPort.ReadLine()
myPort.WriteLine("D4")
Incoming = myPort.ReadLine()
That was but I don't know how to accomplish the same thing using the 'separate' approach in c#? I tried lumping the read operations in the same "event" sub as the M1 button, like this :
private void btnM1_Click(object sender, EventArgs e)
{
Operation = "M1";
_serialPort.Write("M1\r\n"); // 0000,111,3.896e+002,0.3371,0.3563
Thread.Sleep(500);
Operation = "D2";
_serialPort.Write("D2\r\n");
Thread.Sleep(500);
Operation = "D3";
_serialPort.Write("D3\r\n");
Thread.Sleep(500);
Operation = "D4";
_serialPort.Write("D4\r\n");
Thread.Sleep(500);
}
But it does not work, I only get the "last" serial port "Read", the D4. The code never goes through D2 and D3.
How would you suggest I go about this? I have not shown you D5 yet… The D5 operation is the most involved as I have to do "two reads", one for one piece of data and then 201 successive reads, to retrieve wavelength data, like this :
myPort.WriteLine("D5")
Incoming = myPort.ReadLine()
Temp = Split(Incoming, ",")
PeakWL = Convert.ToDouble(Temp(2))
For i = 1 To 201
Incoming = myPort.ReadLine()
Temp = Split(Incoming, ",")
nmValue(i) = CDbl(Temp(0))
SpectralValue(i) = CDbl(Temp(1))
Next
I'm going to try to solve my problem myself but I would appreciate suggestions.
I'm a little bit confused on how to use IoT Edge Offline mode. I though it was out-of-box!
The location of my IoT Hub is in West US. When I disconnect my Edge device from the network nothing happen. The datas is not saved or resend after reconnecting it online.
I got only one module that send data to the IoT Hub, I can see the datas flowing with Device Explorer Twin app and I saved the data in a database.
After disconnecting, wait 5 minutes and reconnecting, I don't see the datas that I was trying to send during offline mode in the database.
All messages while offline are missing (I'm sequencing the message with datetime stamp).
Did I missed a configuration?
Any idea why the offline mode doesn't work for me?
I'm using Iot Edge Runtime v1.0.6 and Windows Containers.
Here the source code of my testing module:
using Microsoft.Azure.Devices.Client;
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Runtime.Loader;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
class Program
{
static int monitoringInterval { get; set; } = 60;// 60 seconds
static System.Timers.Timer testTimer;
static ModuleClient ioTHubModuleClient;
static void Main(string[] args)
{
Init().Wait();
StartTestTimer();
// Wait until the app unloads or is cancelled
var cts = new CancellationTokenSource();
AssemblyLoadContext.Default.Unloading += (ctx) => cts.Cancel();
Console.CancelKeyPress += (sender, cpe) => cts.Cancel();
WhenCancelled(cts.Token).Wait();
}
/// <summary>
/// Handles cleanup operations when app is cancelled or unloads
/// </summary>
public static Task WhenCancelled(CancellationToken cancellationToken)
{
var tcs = new TaskCompletionSource<bool>();
cancellationToken.Register(s => ((TaskCompletionSource<bool>)s).SetResult(true), tcs);
return tcs.Task;
}
/// <summary>
/// Initializes the ModuleClient and sets up the callback to receive
/// messages containing temperature information
/// </summary>
static async Task Init()
{
AmqpTransportSettings amqpSetting = new AmqpTransportSettings(TransportType.Amqp_Tcp_Only);
ITransportSettings[] settings = { amqpSetting };
// Open a connection to the Edge runtime
ioTHubModuleClient = await ModuleClient.CreateFromEnvironmentAsync(settings);
await ioTHubModuleClient.OpenAsync();
Console.WriteLine("IoT Hub module client initialized.");
}
static void StartTestTimer()
{
Console.WriteLine("Start Monitoring Timer: " + monitoringInterval + " seconds");
// Set up a timer that triggers every minute.
testTimer = new System.Timers.Timer();
testTimer.Interval = monitoringInterval * 1000; // 60 seconds
testTimer.Elapsed += new System.Timers.ElapsedEventHandler(SendEvent);
testTimer.Start();
SendEvent(null, null);
}
async static void SendEvent(object sender, System.Timers.ElapsedEventArgs args)
{
DateTime today = DateTime.Now;
Console.WriteLine("[" + today + "] Send Data has started...");
try
{
//IoT device connection string
string connectionString = "HostName=xxxxxx.azure-devices.net;DeviceId=IOT-Device1;SharedAccessKey=ett8xxxxxxxxx";
// Connect to the IoT hub using the MQTT protocol
DeviceClient _DeviceClient = DeviceClient.CreateFromConnectionString(connectionString, TransportType.Mqtt);
_DeviceClient.OperationTimeoutInMilliseconds = 10000;
Dictionary<string, Object> telemetryDataPoint = new Dictionary<string, Object>();
string dateTime = DateTime.Now.ToLongDateString() + " " + DateTime.Now.ToLongTimeString();
telemetryDataPoint.Add("DateTime", dateTime);
string messageString = JsonConvert.SerializeObject(telemetryDataPoint);
Message message = new Message(Encoding.ASCII.GetBytes(messageString));
// Send the telemetry message
Console.WriteLine("\n*> Sending message: {0}", messageString);
await _DeviceClient.SendEventAsync(message).ConfigureAwait(false);
Console.WriteLine("Message sent!");
}
catch (Exception e)
{
Console.WriteLine("Message not sent. Connection error to Iot Hub:" + e.Message);
}
}
}
Why is the code creating a moduleClient in Init(), but then attempting to send an a message directly to IoT Hub using a deviceClient in SendEvent()? This bypasses the edge runtime (specifically edgeHub) completely which is what facilitates offline store and forward.
Here is an example of the right way to do this: https://github.com/Azure/iotedge/blob/ad41fec507bb91a2e57a07cd32e287ada0ca08d8/edge-modules/SimulatedTemperatureSensor/src/Program.cs#L95
I am trying SqlDepenedency for the first time. I am not getting any notifications on Database Update.
I am placing breakpoints inside :
OnChange(object sender, SqlNotificationEventArgs e),
but it never gets hit.
Here is my code :
protected void Page_Load(object sender, EventArgs e)
{
Label1.Text = "Cache Refresh: " + DateTime.Now.ToLongTimeString();
DateTime.Now.ToLongTimeString();
// Create a dependency connection to the database.
SqlDependency.Start(GetConnectionString());
using (SqlConnection connection = new SqlConnection(GetConnectionString()))
{
using (SqlCommand command = new SqlCommand(GetSQL(), connection))
{
SqlDependency dependency =
new SqlDependency(command);
// Refresh the cache after the number of minutes
// listed below if a change does not occur.
// This value could be stored in a configuration file.
connection.Open();
dgHomeRequests.DataSource = command.ExecuteReader();
dgHomeRequests.DataBind();
}
}
}
private string GetConnectionString()
{
// To avoid storing the connection string in your code,
// you can retrieve it from a configuration file.
//return "Data Source=(local);Integrated Security=true;" +"Initial Catalog=AdventureWorks;";
return ConfigurationManager.ConnectionStrings["TestData"].ConnectionString;
}
private string GetSQL()
{
return "Select [Address] From [UserAccount1]";
}
void OnChange(object sender, SqlNotificationEventArgs e)
{
// have breakpoint here:
SqlDependency dependency = sender as SqlDependency;
// Notices are only a one shot deal
// so remove the existing one so a new
// one can be added
dependency.OnChange -= OnChange;
// Fire the event
/* if (OnNewMessage != null)
{
OnNewMessage();
}*/
}
I have also placed some code in Global.asax file :
public class Global : HttpApplication
{
void Application_Start(object sender, EventArgs e)
{
// Code that runs on application startup
RouteConfig.RegisterRoutes(RouteTable.Routes);
BundleConfig.RegisterBundles(BundleTable.Bundles);
SqlDependency.Start(ConfigurationManager.ConnectionStrings["TestData"].ConnectionString);
}
protected void Application_End()
{
// Shut down SignalR Dependencies
SqlDependency.Stop(ConfigurationManager.ConnectionStrings["TestData"].ConnectionString);
}
}
The SQL server is on local machine. I am running the code through Visual Studio(IIS Express).
To enable service broker on database:
ALTER DATABASE SET ENABLE_BROKER
GO
To subscribe query notification, we need to give permission to IIS service account
GRANT SUBSCRIBE QUERY NOTIFICATIONS TO “<serviceAccount>”
I guessed the 2nd point is not needed as it is local. But I tried giving it some permissions. Don't know if they are right as I don't think it is using app pool.And don't need the permission on local env. if I am the user myself and created the schema myself.
One of the questions that I saw was granting :
alter authorization on database::<dbName> to [sa];
I gave that permission too.
I was missing :
dependency.OnChange += new OnChangeEventHandler(OnChange);
The new code would look like :
SqlDependency dependency = new SqlDependency(command);
dependency.OnChange += new OnChangeEventHandler(OnChange);
now I can fire
void OnChange(object sender, SqlNotificationEventArgs e)
how i get buffering status while media player trying to connect audio streaming link(ie 2%,4%...100% then online radio start to play) in android.
this is my code.
but i have no idea how i solve my problem.thanks is advance to any kind of help.
player.setOnBufferingUpdateListener(new OnBufferingUpdateListener() {
public void onBufferingUpdate(MediaPlayer mp, int percent) {
playSeekBar.setSecondaryProgress(percent);
Log.i("Buffering", "" + percent);
}
});
i solve this problem. here is the link. http://coderfriend.blogspot.com/
as per request here i share blog content..
when user click play button to play radio then i want to show connecting status(buffering 1%,2%.. 99%). when status will be 100% radio start to play. i was face problem to solve this. so here i share this code for all.
//at first create this class
public class StreamingMediaPlayer {
private static final int INTIAL_KB_BUFFER = 96*10/8;//assume 96kbps*10secs/8bits per byte
private TextView textStreamed;
private ImageButton playButton;
private ProgressBar progressBar;
// Track for display by progressBar
private long mediaLengthInKb, mediaLengthInSeconds;
private int totalKbRead = 0;
// Create Handler to call View updates on the main UI thread.
private final Handler handler = new Handler();
private MediaPlayer mediaPlayer;
private File downloadingMediaFile;
private boolean isInterrupted;
private Context context;
private int counter = 0;
public StreamingMediaPlayer(Context context,TextView textStreamed, ImageButton playButton, Button streamButton,ProgressBar progressBar)
{
this.context = context;
this.textStreamed = textStreamed;
this.playButton = playButton;
this.progressBar = progressBar;
}
/**
* Progressivly download the media to a temporary location and update the MediaPlayer as new content becomes available.
*/
public void startStreaming(final String mediaUrl, long mediaLengthInKb, long mediaLengthInSeconds) throws IOException {
this.mediaLengthInKb = mediaLengthInKb;
this.mediaLengthInSeconds = mediaLengthInSeconds;
Runnable r = new Runnable() {
public void run() {
try {
downloadAudioIncrement(mediaUrl);
} catch (IOException e) {
Log.e(getClass().getName(), "Unable to initialize the MediaPlayer for fileUrl=" + mediaUrl, e);
return;
}
}
};
new Thread(r).start();
}
/**
* Download the url stream to a temporary location and then call the setDataSource
* for that local file
*/
public void downloadAudioIncrement(String mediaUrl) throws IOException {
URLConnection cn = new URL(mediaUrl).openConnection();
cn.connect();
InputStream stream = cn.getInputStream();
if (stream == null) {
Log.e(getClass().getName(), "Unable to create InputStream for mediaUrl:" + mediaUrl);
}
downloadingMediaFile = new File(context.getCacheDir(),"downloadingMedia.dat");
if (downloadingMediaFile.exists()) {
downloadingMediaFile.delete();
}
FileOutputStream out = new FileOutputStream(downloadingMediaFile);
byte buf[] = new byte[16384];
int totalBytesRead = 0, incrementalBytesRead = 0;
do {
int numread = stream.read(buf);
if (numread <= 0)
break;
out.write(buf, 0, numread);
totalBytesRead += numread;
incrementalBytesRead += numread;
totalKbRead = totalBytesRead/1000;
testMediaBuffer();
fireDataLoadUpdate();
} while (validateNotInterrupted());
stream.close();
if (validateNotInterrupted()) {
fireDataFullyLoaded();
}
}
private boolean validateNotInterrupted() {
if (isInterrupted) {
if (mediaPlayer != null) {
mediaPlayer.pause();
//mediaPlayer.release();
}
return false;
} else {
return true;
}
}
/**
* Test whether we need to transfer buffered data to the MediaPlayer.
* Interacting with MediaPlayer on non-main UI thread can causes crashes to so perform this using a Handler.
*/
private void testMediaBuffer() {
Runnable updater = new Runnable() {
public void run() {
if (mediaPlayer == null) {
// Only create the MediaPlayer once we have the minimum buffered data
if ( totalKbRead >= INTIAL_KB_BUFFER) {
try {
startMediaPlayer();
} catch (Exception e) {
Log.e(getClass().getName(), "Error copying buffered conent.", e);
}
}
} else if ( mediaPlayer.getDuration() - mediaPlayer.getCurrentPosition() <= 1000 ){
// NOTE: The media player has stopped at the end so transfer any existing buffered data
// We test for < 1second of data because the media player can stop when there is still
// a few milliseconds of data left to play
transferBufferToMediaPlayer();
}
}
};
handler.post(updater);
}
private void startMediaPlayer() {
try {
File bufferedFile = new File(context.getCacheDir(),"playingMedia" + (counter++) + ".dat");
// We double buffer the data to avoid potential read/write errors that could happen if the
// download thread attempted to write at the same time the MediaPlayer was trying to read.
// For example, we can't guarantee that the MediaPlayer won't open a file for playing and leave it locked while
// the media is playing. This would permanently deadlock the file download. To avoid such a deadloack,
// we move the currently loaded data to a temporary buffer file that we start playing while the remaining
// data downloads.
moveFile(downloadingMediaFile,bufferedFile);
Log.e(getClass().getName(),"Buffered File path: " + bufferedFile.getAbsolutePath());
Log.e(getClass().getName(),"Buffered File length: " + bufferedFile.length()+"");
mediaPlayer = createMediaPlayer(bufferedFile);
// We have pre-loaded enough content and started the MediaPlayer so update the buttons & progress meters.
mediaPlayer.start();
startPlayProgressUpdater();
playButton.setEnabled(true);
} catch (IOException e) {
Log.e(getClass().getName(), "Error initializing the MediaPlayer.", e);
return;
}
}
private MediaPlayer createMediaPlayer(File mediaFile)
throws IOException {
MediaPlayer mPlayer = new MediaPlayer();
mPlayer.setOnErrorListener(
new MediaPlayer.OnErrorListener() {
public boolean onError(MediaPlayer mp, int what, int extra) {
Log.e(getClass().getName(), "Error in MediaPlayer: (" + what +") with extra (" +extra +")" );
return false;
}
});
// It appears that for security/permission reasons, it is better to pass a FileDescriptor rather than a direct path to the File.
// Also I have seen errors such as "PVMFErrNotSupported" and "Prepare failed.: status=0x1" if a file path String is passed to
// setDataSource(). So unless otherwise noted, we use a FileDescriptor here.
FileInputStream fis = new FileInputStream(mediaFile);
mPlayer.setDataSource(fis.getFD());
mPlayer.prepare();
return mPlayer;
}
/**
* Transfer buffered data to the MediaPlayer.
* NOTE: Interacting with a MediaPlayer on a non-main UI thread can cause thread-lock and crashes so
* this method should always be called using a Handler.
*/
private void transferBufferToMediaPlayer() {
try {
// First determine if we need to restart the player after transferring data...e.g. perhaps the user pressed pause
boolean wasPlaying = mediaPlayer.isPlaying();
int curPosition = mediaPlayer.getCurrentPosition();
// Copy the currently downloaded content to a new buffered File. Store the old File for deleting later.
File oldBufferedFile = new File(context.getCacheDir(),"playingMedia" + counter + ".dat");
File bufferedFile = new File(context.getCacheDir(),"playingMedia" + (counter++) + ".dat");
// This may be the last buffered File so ask that it be delete on exit. If it's already deleted, then this won't mean anything. If you want to
// keep and track fully downloaded files for later use, write caching code and please send me a copy.
bufferedFile.delete();
moveFile(downloadingMediaFile,bufferedFile);
// Pause the current player now as we are about to create and start a new one. So far (Android v1.5),
// this always happens so quickly that the user never realized we've stopped the player and started a new one
mediaPlayer.pause();
// Create a new MediaPlayer rather than try to re-prepare the prior one.
mediaPlayer = createMediaPlayer(bufferedFile);
mediaPlayer.seekTo(curPosition);
// Restart if at end of prior buffered content or mediaPlayer was previously playing.
// NOTE: We test for < 1second of data because the media player can stop when there is still
// a few milliseconds of data left to play
boolean atEndOfFile = mediaPlayer.getDuration() - mediaPlayer.getCurrentPosition() <= 1000;
if (wasPlaying || atEndOfFile){
mediaPlayer.start();
}
// Lastly delete the previously playing buffered File as it's no longer needed.
oldBufferedFile.delete();
}catch (Exception e) {
Log.e(getClass().getName(), "Error updating to newly loaded content.", e);
}
}
private void fireDataLoadUpdate() {
Runnable updater = new Runnable() {
public void run() {
if((totalKbRead>19)&&(totalKbRead<120))
textStreamed.setText((totalKbRead-19 + "% Buffering"));//show buffering status.. ie 1%,2%. in ui
else if(totalKbRead<19)
textStreamed.setText(("Connecting..."));
else
textStreamed.setText((""));
float loadProgress = ((float)totalKbRead/(float)mediaLengthInKb);
progressBar.setSecondaryProgress((int)(loadProgress*100));
}
};
handler.post(updater);
}
private void fireDataFullyLoaded() {
Runnable updater = new Runnable() {
public void run() {
transferBufferToMediaPlayer();
// Delete the downloaded File as it's now been transferred to the currently playing buffer file.
downloadingMediaFile.delete();
textStreamed.setText(("Audio full loaded: " + totalKbRead + " Kb read"));
}
};
handler.post(updater);
}
public MediaPlayer getMediaPlayer() {
return mediaPlayer;
}
public void startPlayProgressUpdater() {
float progress = (((float)mediaPlayer.getCurrentPosition()/1000)/mediaLengthInSeconds);
progressBar.setProgress((int)(progress*100));
if (mediaPlayer.isPlaying()) {
Runnable notification = new Runnable() {
public void run() {
startPlayProgressUpdater();
}
};
handler.postDelayed(notification,1000);
}
}
public void interrupt() {
playButton.setEnabled(false);
isInterrupted = true;
validateNotInterrupted();
}
/**
* Move the file in oldLocation to newLocation.
*/
public void moveFile(File oldLocation, File newLocation)
throws IOException {
if ( oldLocation.exists( )) {
BufferedInputStream reader = new BufferedInputStream( new FileInputStream(oldLocation) );
BufferedOutputStream writer = new BufferedOutputStream( new FileOutputStream(newLocation, false));
try {
byte[] buff = new byte[8192];
int numChars;
while ( (numChars = reader.read( buff, 0, buff.length ) ) != -1) {
writer.write( buff, 0, numChars );
}
} catch( IOException ex ) {
throw new IOException("IOException when transferring " + oldLocation.getPath() + " to " + newLocation.getPath());
} finally {
try {
if ( reader != null ){
writer.close();
reader.close();
}
} catch( IOException ex ){
Log.e(getClass().getName(),"Error closing files when transferring " + oldLocation.getPath() + " to " + newLocation.getPath() );
}
}
} else {
throw new IOException("Old location does not exist when transferring " + oldLocation.getPath() + " to " + newLocation.getPath() );
}
}
}
//now copy the below code in activity
StreamingMediaPlayer audioStreamer =
new StreamingMediaPlayer(this,textStreamed,playButton,
streamButton,progressBar);
audioStreamer.startStreaming("your streaming station name",5208, 216);
i think this helps you :)
My project is about Remoting and i want to add a webcam component to it. Here it goes: I have 3 project in my solution... Client, Server, Remote.dll. In Remote.dll is a common class which has methods works in server machine. When i call these methods from Client it executes in server side. So now my question is i put the code of Webcam in remote.dll and it has an event called "video_NewFrame" which it works everytime when webcam catch an image. But i cant reach to the images from my Client side because when code drops to this event it executes infinitely
and my timer in Client side doesnt work as well. I tried to assing image to my global variable but whenever code goes to client and comes to Remote.dll again my variable is null...
How can i reach simultaneously captured images from my client? here is my code:
(i use AForge framework for webcam)
private bool DeviceExist = true;
private FilterInfoCollection videoDevices;
private VideoCaptureDevice videoSource = null;
public bool WebCamStart(int DeviceIndex)
{
if (DeviceExist)
{
videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
//string myDevice = videoDevices[0].Name;
videoSource = new VideoCaptureDevice(videoDevices[0].MonikerString);
videoSource.NewFrame += new NewFrameEventHandler(video_NewFrame);
CloseVideoSource();
videoSource.DesiredFrameSize = new Size(640, 480);
//videoSource.DesiredFrameRate = 10;
videoSource.Start();
return true;
}
else return false;
}
public Bitmap lastImg;
private void video_NewFrame(object sender, NewFrameEventArgs eventArgs)
{
Bitmap img = (Bitmap)eventArgs.Frame.Clone();
//in executes infinitely when execution comes here and i cant reach from Cliend side...
}
public string getFPS()
{
return videoSource.FramesReceived.ToString();
}
public void CloseVideoSource()
{
if (!(videoSource == null))
if (videoSource.IsRunning)
{
videoSource.SignalToStop();
videoSource.Stop();
videoSource = null;
}
}
public string getCamList()
{
string result = "No Device Found";
try
{
videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
//comboBox1.Items.Clear();
if (videoDevices.Count == 0)
throw new ApplicationException();
DeviceExist = true;
foreach (FilterInfo device in videoDevices)
{
//comboBox1.Items.Add(device.Name);
result = device.Name;
return result;
}
//comboBox1.SelectedIndex = 0; //make dafault to first cam
}
catch (ApplicationException)
{
DeviceExist = false;
//comboBox1.Items.Add("No capture device on your system");
return "No capture device on your system";
}
return result;
}
// and my client side...
private void timerWebCam_Tick(object sender, EventArgs e)
{
//lblFPS.Text ="Device Running... " + remObj.getFPS() + " FPS";
pictureBox1.Image = remObj.lastImg;
}