MbedTLS application on TCP - ssl

I am trying to use mbedtls on my lwip modbus application with stm32f4. I am working non-rtos systems.
I am using the lwip-altcp library. I need to change my function with altcp function,
This is old function:
BOOL
xMBTCPPortInit( USHORT usTCPPort )
{
struct tcp_pcb *pxPCBListenNew, *pxPCBListenOld;
BOOL bOkay = (BOOL)FALSE;
USHORT usPort;
if( usTCPPort == 0 )
{
usPort = MB_TCP_DEFAULT_PORT;
}
else
{
usPort = ( USHORT ) usTCPPort;
}
if( ( pxPCBListenNew = pxPCBListenOld = tcp_new( ) ) == NULL )
{
/* Can't create TCP socket. */
bOkay = (BOOL)FALSE;
}
else if( tcp_bind( pxPCBListenNew, IP_ADDR_ANY, ( u16_t ) usPort ) != ERR_OK )
{
/* Bind failed - Maybe illegal port value or in use. */
( void )tcp_close( pxPCBListenOld );
bOkay = (BOOL)FALSE;
}
else if( ( pxPCBListenNew = tcp_listen( pxPCBListenNew ) ) == NULL )
{
( void )tcp_close( pxPCBListenOld );
bOkay = (BOOL)FALSE;
}
else
{
/* Register callback function for new clients. */
tcp_accept( pxPCBListenNew, prvxMBTCPPortAccept );
/* Everything okay. Set global variable. */
pxPCBListen = pxPCBListenNew;
#ifdef MB_TCP_DEBUG
vMBPortLog( MB_LOG_DEBUG, "MBTCP-ACCEPT", "Protocol stack ready.\r\n" );
#endif
}
bOkay = (BOOL)TRUE;
return bOkay;
}
I am trying to change like this:
BOOL
xMBTCPPortInit( USHORT usTCPPort )
{
struct altcp_pcb *pxPCBListenNew, *pxPCBListenOld;
BOOL bOkay = (BOOL)FALSE;
USHORT usPort;
if( usTCPPort == 0 )
{
usPort = MB_TCP_DEFAULT_PORT;
}
else
{
usPort = ( USHORT ) usTCPPort;
}
if( ( pxPCBListenNew = pxPCBListenOld = altcp_new( NULL ) ) == NULL )
{
/* Can't create TCP socket. */
bOkay = (BOOL)FALSE;
}
else
if( altcp_bind( pxPCBListenNew, IP_ADDR_ANY, ( u16_t ) usPort ) != ERR_OK )
{
/* Bind failed - Maybe illegal port value or in use. */
( void )altcp_close( pxPCBListenOld );
bOkay = (BOOL)FALSE;
}
else if( ( pxPCBListenNew = altcp_listen( pxPCBListenNew ) ) == NULL )
{
( void )altcp_close( pxPCBListenOld );
bOkay = (BOOL)FALSE;
}
else
{
/* altcp_tls_new(pxPCBListenNew, IP_GET_TYPE(ip_addr))*/;
/* Register callback function for new clients. */
altcp_accept( pxPCBListenNew, prvxMBTCPPortAccept );
/* Everything okay. Set global variable. */
pxPCBListen = pxPCBListenNew;
#ifdef MB_TCP_DEBUG
vMBPortLog( MB_LOG_DEBUG, "MBTCP-ACCEPT", "Protocol stack ready.\r\n" );
#endif
SerialPrint("MBTCTP-ACCEPT");
}
altcp_tls_create_config_client(certfile, certlen);
bOkay = (BOOL)TRUE;
return bOkay;
}
I think I've a mistake with the altcp_new( NULL ) function because, when I interest another example code with using mbedtls-altcp, I see this line https://github.com/straight-coding/LPC407x-NoOS-LWIP-MBEDTLS-HTTPD-KEIL/blob/f3943f7487a296a16ddff51885c9c8d0ca07562a/LambdaIOT/httpd/http_core.c#L419
They used altcp_tls_new function, but this function needs altcp_tls_config parameters, but I have just tcp pcb parameters. How can I change my modbus-tcp connections functions ?
struct altcp_pcb *
altcp_tls_new(struct altcp_tls_config *config, u8_t ip_type)
{
struct altcp_pcb *inner_conn, *ret;
LWIP_UNUSED_ARG(ip_type);
inner_conn = altcp_tcp_new_ip_type(ip_type);
if (inner_conn == NULL) {
printf("altcp_tls_new inner_conn = nULL\r\n");
return NULL;
}
ret = altcp_tls_wrap(config, inner_conn);
if (ret == NULL) {
printf("altcp_tls_new altcp_tls_wrap return null\r\n");
altcp_close(inner_conn);
}
return ret;
}

To create the altcp_tls_config params you need to create a new instance of the altcp_tls_config structure and pass it to altcp_tls_new().
This link explains it in further detail.
https://www.nongnu.org/lwip/2_1_x/group__altcp__api.html

Related

OpenLDAP - Enabling CRL check for LDAP TLS connections

I have a client that connects to LDAP server using TLS. For this connection, I want to enable CRL check and reject the connection only if any server/client certificates are revoked.
In special cases (like CRL missing, CRL expired) I want to ignore the error and establish the connection.
So I though to overwrite the default SSL verify call back to ignore the specific errors.
But the call back is not called at all. Always only default call-back is called.
Here is my call back:
static int verify_callback(int ok, X509_STORE_CTX *ctx)
{
X509* cert = X509_STORE_CTX_get_current_cert(ctx);
if (ok)
return ok;
int sslRet = X509_STORE_CTX_get_error(ctx);
const char* err = NULL;
switch (sslRet)
{
case X509_V_ERR_UNABLE_TO_GET_CRL:
case X509_V_ERR_CRL_HAS_EXPIRED:
case X509_V_ERR_CRL_NOT_YET_VALID:
printf( "CRL: Verification failed... but ignored : %d\n", sslRet);
return 1;
default:
err = X509_verify_cert_error_string(sslRet);
if (err)
printf( "CRL: Failed to verify : %s\n",err);
return 0;
}
return sslRet;
}
Default verify call-back is overwritten using the ldap call-back set option:
void ldap_tls_cb(LDAP * ld, SSL * ssl, SSL_CTX * ctx, void * arg)
{
SSL_CTX_set_verify(ctx, SSL_VERIFY_PEER , verify_callback);
printf("verify call back is set...\n");
return;
}
Main Program:
int main( int argc, char **argv )
{
LDAP *ldap;
int auth_method = LDAP_AUTH_SIMPLE; //LDAP_AUTH_SASL
int ldap_version = LDAP_VERSION3;
char *ldap_host = "10.104.40.35";
int ldap_port = 389;
if ( (ldap = ldap_init(ldap_host, ldap_port)) == NULL ) {
perror( "ldap_init failed" );
return( EXIT_FAILURE );
}
int result = ldap_set_option(ldap, LDAP_OPT_PROTOCOL_VERSION, &ldap_version);
if (result != LDAP_OPT_SUCCESS ) {
ldap_perror(ldap, "ldap_set_option failed!");
return(EXIT_FAILURE);
}
int requireCert = LDAP_OPT_X_TLS_DEMAND;
result = ldap_set_option(NULL, LDAP_OPT_X_TLS_REQUIRE_CERT, &requireCert);
if (result != LDAP_OPT_SUCCESS ) {
ldap_perror(ldap, "ldap_set_option - req cert -failed!");
return(EXIT_FAILURE);
}
result = ldap_set_option(NULL, LDAP_OPT_X_TLS_CACERTFILE, "/etc/certs/Cert.pem");
if (result != LDAP_OPT_SUCCESS ) {
ldap_perror(ldap, "ldap_set_option - cert file - failed!");
return(EXIT_FAILURE);
}
int crlvalue = LDAP_OPT_X_TLS_CRL_ALL;
result =ldap_set_option(NULL, LDAP_OPT_X_TLS_CRLCHECK, &crlvalue);
if (result != LDAP_OPT_SUCCESS ) {
ldap_perror(ldap, "ldap_set_option failed!");
return(EXIT_FAILURE);
}
int debug = 7;
ldap_set_option(NULL, LDAP_OPT_DEBUG_LEVEL, &debug);
result = ldap_set_option(ldap, LDAP_OPT_X_TLS_CONNECT_CB, (void *)ldap_tls_cb);
if (result != LDAP_SUCCESS) {
fprintf(stderr, "ldap_set_option(LDAP_OPT_X_TLS_CONNECT_CB): %s\n", ldap_err2string(result));
return(1);
}
int msgidp = 0;
result = ldap_start_tls(ldap,NULL,NULL,&msgidp);
if (result != LDAP_OPT_SUCCESS ) {
ldap_perror(ldap, "start tls failed!");
return result;
} else {
printf("Start tls success.\n");
}
LDAPMessage *resultm;
struct timeval timeout;
result = ldap_result(ldap, msgidp, 0, &timeout, &resultm );
if ( result == -1 || result == 0 ) {
printf("ldap_result failed;retC=%d \n", result);
return result;
}
result = ldap_parse_extended_result(ldap, resultm, NULL, NULL, 0 );
if ( result == LDAP_SUCCESS ) {
result = ldap_install_tls (ldap);
printf("installing tls... %s\n", ldap_err2string(result));
}
int request_id = 0;
result = ldap_sasl_bind(ldap, "", LDAP_SASL_SIMPLE, NULL, 0, 0, &request_id);
if ( result != LDAP_SUCCESS ) {
fprintf(stderr, "ldap_x_bind_s: %s\n", ldap_err2string(result));
printf("LDAP bind error .. %d\n", result);
return(EXIT_FAILURE);
} else {
printf("LDAP connection successful.\n");
}
ldap_unbind(ldap);
return(EXIT_SUCCESS);
}
can someone help to check why my verify call-back is not called?
I think you need to set the callback on the SSL object directly instead of the context, so
void ldap_tls_cb(LDAP * ld, SSL * ssl, SSL_CTX * ctx, void * arg)
{
SSL_set_verify(ssl, SSL_VERIFY_PEER, verify_callback);
printf("verify call back is set...\n");
return;
}
The reason for this is that the SSL handle has already been initialised by the time your connect callback is called (see the OpenLDAP code), and
it's too late to set this callback through the context at that point:
If no special callback was set before, the default callback for the underlying ctx is used, that was valid at the time ssl was created with SSL_new(3).
OpenLDAP can be built with GnuTLS, so you may need to check that it's using OpenSSL before setting the callback. The LDAP_OPT_X_TLS_PACKAGE option could be used for this (note that I haven't tested this code):
char* package = NULL;
int result = ldap_get_option(NULL, LDAP_OPT_X_TLS_PACKAGE, (void *)&package);
if (result != LDAP_OPT_SUCCESS) {
ldap_perror(ldap, "ldap_get_option failed!");
return(EXIT_FAILURE);
} else {
if (strcmp(package, "OpenSSL") == 0) {
// Set your callback
}
ldap_memfree(package);
}

Send different metadata to different target streams - PDI

I have two target streams (Matches and mismatches) defined as below:
#Override
public StepIOMetaInterface getStepIOMeta() {
StepMeta stepMeta = new StepMeta();
if (ioMeta == null) {
ioMeta = new StepIOMeta(true, false, false, false, false, true);
StreamInterface matchStream = new Stream(StreamType.TARGET, null, "Matches", StreamIcon.TARGET, null);
StreamInterface mismatchStream = new Stream(StreamType.TARGET, null, "Mismatches", StreamIcon.TARGET, null);
ioMeta.addStream(matchStream);
ioMeta.addStream(mismatchStream);
}
return ioMeta;
}
I want to send different meta data to these two targets. The meta data is received from the previous steps. For match, it needs to be a concatenation of both input streams and for mismatch just the first input stream.
I am stuck on how to define the metadata separately for the two target streams.
Appreciate your help.
List<StreamInterface> targets=getStepIOMeta().getTargetStreams();
List<StreamInterface> infos=getStepIOMeta().getInfoStreams();
if ( info != null )
{
if(targets!=null)
{
if(nextStep.getName().equals(targets.get(0).getStepname()))
{
if ( info != null ) {
for ( int i = 0; i < info.length; i++ ) {
if ( info[i] != null ) {
r.mergeRowMeta( info[i] );
}
}
}
}
if(nextStep.getName().equals(targets.get(1).getStepname()))
{
if ( info != null ) {
if ( info.length > 0 && info[0] != null ) {
r.mergeRowMeta( info[0] );
}
}
}
if(nextStep.getName().equals(targets.get(2).getStepname()))
{
if ( info != null ) {
if ( info.length > 0 && info[0] != null ) {
r.mergeRowMeta( info[1] );
}
}
}
}

Arduino Automatic Light Switch 2

I'm currently working on an automatic light switch. Here's my code:
#include <Servo.h>
boolean time = false;
const int timeLim = 10000;
const int delLen = 5000;
int pirVal = 0;
const int pirPin = 2;
const int sensePin = 5;
boolean timeRet = false;
int lightVal;
Servo myServo;
unsigned long limit;
void setup() {
Serial.begin(9600);
pinMode(pirPin, INPUT);
myServo.attach(11);
myServo.write(40);
}
void loop() {
unsigned long Timer = millis();
pirVal = digitalRead(pirPin);
int lightVal = analogRead(sensePin);
Serial.print(pirVal);
Serial.print(' ');
Serial.print(lightVal);
Serial.print(' ');
Serial.print(Timer);
Serial.print(' ');
Serial.print(time);
Serial.print(' ');
Serial.print(limit);
Serial.print(' ');
Serial.println(timeRet);
if ( lightVal < 400 ) {
time = false;
limit = 0;
timeRet = false;
} if ( lightVal < 400 && pirVal == 1 ) {
unsigned long time = false;
pirVal = 0;
myServo.write(160);
} if ( lightVal > 400 && pirVal == 0 && timeRet == false){
limit = getTimeLim( timeLim, Timer );
pirVal = 0;
timeRet = true;
} if ( lightVal > 400 && pirVal == 0 && timeRet == true ) {
time = timeStat ( limit, Timer );
} if ( lightVal > 400 && time == true ) {
myServo.write(40);
}
}
int getTimeLim( const int timeLim, unsigned long Timer ) {
unsigned long limit = Timer + timeLim;
return limit;
}
boolean timeStat( unsigned long limit, unsigned long Timer ) {
if ( Timer < limit ) {
time = false;
} else if ( Timer > limit ) {
time = true;
}
return time;
}
The problem is that when you look at the serial the first time the getTimeLim function it works, but the second time is always some outrageous number (e.g. 4294937965). I don't know why it would give me this huge number. Help would be much appreciated.
since your code works now and you want to optimize it, i would suggest this:
// ( pseudo code since i'm not familiar with arduino )
void loop( ){
if( ( analogRead( sensePin ) < 400 )
&& ( digitalRead( pirPin ) ) ){
myServo.write( 160 ); // turn on light
int time_end = millis( ) + 60,000; // initiate timer value
while( millis( ) < time_end ); // poll time until at 60s
myServo.write( 40 ); // turn off light
}
}

Silverlight-DLL communication issue

Problems with Silverlight 4 application.
In this application every client session creates a separate process, which calls a DLL.
Communication with DLL is built as in the following callstack (for two functions: one working, another - not).
There are 2 functions in DLL (both work fine):
extern "C" BOOL __stdcall DocRunExternPageDividions(const char *docId, int num_form, int PageNum, int *Vcols, int **Vvalues, int *Hcols, int **Hvalues)
{
LOG_START_FUNCTION
BOOL res = 1;
__try {
res = DocRunExternPageDividions1(docId, num_form, PageNum, Vcols, Vvalues, Hcols, Hvalues);
}
__except(ExFilter(GetExceptionInformation()))
{
AfxThrowUserException();
}
LOG_STOP_FUNCTION
return res;
}
extern "C" BOOL __stdcall DocRunExternPageBreakRects(const char *docId, int num_form, int PageNum)
{
LOG_START_FUNCTION
BOOL res = 1;
__try {
res = DocRunExternPageBreakRects1(docId, num_form, PageNum);
}
__except(ExFilter(GetExceptionInformation()))
{
AfxThrowUserException();
}
LOG_STOP_FUNCTION
return res;
}
To call this functions server has two delegates:
private delegate void DocRunExternPageBreakRectsDelegate(string docId, int DocNum, int PageNum);
private delegate void DocRunExternPageDividionsDelegate(
string docId, int DocNum, int PageNum, out int Vcols, out IntPtr VoutArray, out int Hcols,
out IntPtr HoutArray);
... two delegate instances and corresponding functions:
private DocRunExternPageBreakRectsDelegate DocRunExternPageBreakRectsD;
DocRunExternPageBreakRectsD =
Marshal.GetDelegateForFunctionPointer(ptrDocRunExternPageBreakRects,
typeof (DocRunExternPageBreakRectsDelegate)) as
DocRunExternPageBreakRectsDelegate;
private DocRunExternPageDividionsDelegate DocRunExternPageDividionsD;
DocRunExternPageDividionsD =
Marshal.GetDelegateForFunctionPointer(ptrDocRunExternPageDividionsD,
typeof (DocRunExternPageDividionsDelegate)) as
DocRunExternPageDividionsDelegate;
public void DocRunExternPageDividions(string docId, int DocNum, int PageNum, out int[] vert, out int[] horz) {
IntPtr VoutArray, HoutArray;
int vcols, hcols;
DocRunExternPageDividionsD(docId, DocNum, PageNum, out vcols, out VoutArray, out hcols, out HoutArray);
marshal(VoutArray, out vert, vcols);
marshal(HoutArray, out horz, hcols);
}
public void DocRunExternPageBreakRects(string docId, int DocNum, int PageNum) {
DocRunExternPageBreakRectsD(docId, DocNum, PageNum);
}
Each of these functions is called here (server code):
public bool PageBreakRects(string docId, int DocNum, int PageNum, out int[] vert, out int[] horz) {
bool result;
vert = null;
horz = null;
Program.WriteUserMessage("Called PageBreakRects(" + docId + ", " + DocNum + ", " + PageNum + ")");
try {
DocRunExternPageBreakRects(docId, DocNum, PageNum);
DocRunExternPageDividions(docId, 0, PageNum, out vert, out horz);
result = true;
} catch (Exception ex) {}
return result;
}
public bool GetPageDividions(string docID, int Id, int pageNumber, out int[] vert, out int[] horz) {
bool result = false;
vert = null;
horz = null;
try {
DocRunExternPageDividions(docID, Id, pageNumber, out vert, out horz);
result = true;
} catch (Exception) {}
return result;
}
Each of them - are called here:
public DocDividionsResult PageBreakRects(string docID, int DocNum, int pageNumber) {
var result = new DocDividionsResult();
int[] vert;
int[] horz;
result.Data = new List<object> { Program.DllWrapper.PageBreakRects(docID, DocNum, pageNumber, out vert, out horz) };
result.Vert = vert;
result.Horz = horz;
return result;
}
public DocDividionsResult GetPageDividions(string docID, int formId, int pageNumber) {
var result = new DocDividionsResult();
int[] vert;
int[] horz;
result.Data = new List<object>
{Program.DllWrapper.GetPageDividions(docID, formId, pageNumber, out vert, out horz)};
result.Vert = vert;
result.Horz = horz;
return result;
}
Then - within lambda-expressions:
public bool GetPageDividions(string docID, int formId, int pageNumber, out int[] vert, out int[] horz) {
bool result = false;
int []localVert = null;
int []localHorz = null;
if (_wp != null) {
if (Service<IWPCommunication>.Use(TestService =>
{
TestService.Test(UserId);
},
WPService =>
{
DocDividionsResult br = WPService.GetPageDividions(docID, formId, pageNumber);
if (br != null && br.Data != null && br.Data.Length == 1)
{
result = (bool)br.Data[0];
localVert = br.Vert;
localHorz = br.Horz;
}
}, Id, FS) == 0)
{
...
result = false;
}
}
vert = localVert;
horz = localHorz;
return result;
}
public bool PageBreakRects(string docId, int DocNum, int PageNum) {
bool result = false;
if (_wp != null)
{
if (Service<IWPCommunication>.Use(TestService =>
{
TestService.Test(UserId);
},
WPService =>
{
DocDividionsResult br = WPService.PageBreakRects(docId, DocNum, PageNum);
if (br != null && br.Data != null && br.Data.Length == 1) {
result = (bool)br.Data[0];
}
}, Id, FS) == 0)
{
...
result = false;
}
}
return result;
}
The "Use" function (used above):
public static int Use(UseServiceDelegate<T> codeTest, UseServiceDelegate<T> codeBlock, string SessionId, FileStream fs, bool throwException) {
IClientChannel texy = (IClientChannel)_testFactory.CreateChannel(new EndpointAddress("net.pipe://localhost/X2WPServiceUID" + SessionId));
IClientChannel proxy = (IClientChannel)_channelFactory.CreateChannel(new EndpointAddress("net.pipe://localhost/X2WPServiceUID" + SessionId));
int returnCode = 0;
try {
if (codeTest != null) {
codeTest((T)texy);
texy.Close();
}
returnCode = 1;
if (codeBlock != null) {
codeBlock((T)proxy);
proxy.Close();
}
returnCode = 2;
} catch(Exception e) {
if (returnCode == 1 && throwException)
throw e;
} finally {
if (returnCode == 0 && codeTest != null)
texy.Abort();
else if (returnCode == 1 && codeBlock != null)
proxy.Abort();
}
return returnCode;
}
Client communication is omitted as exception is raised on the server side.
GetPageDividions function works fine, bug PageBreakRects - not: the line
DocDividionsResult br = WPService.PageBreakRects(docId, DocNum, PageNum);
throws the following exception:
"The message with Action 'http://tempuri.org/IWPCommunication/PageBreakRects'
cannot be processed at the receiver, due to a ContractFilter mismatch at the EndpointDispatcher.
This may be because of either a contract mismatch (mismatched Actions between sender and receiver)
or a binding/security mismatch between the sender and the receiver.
Check that sender and receiver have the same contract and the same binding
(including security requirements, e.g. Message, Transport, None)."
It is worse mentioning, that if in function PageBreakRects replace:
DocDividionsResult br = WPService.PageBreakRects(docId, DocNum, PageNum);
with
DocDividionsResult br = WPService.GetPageDividions(docID, formId, pageNumber);
then no exception is thrown.
Not sure if you have started at the very beginning or not, but I expect that none of the code you posted is causing the error. That error means that you are having trouble calling your webservice from the client (silverlight) because there is an error in the system.serviceModel section in your web config. You can often fix it by just refreshing your service reference.
Try running the app locally in visual studio and point the service reference to the service you installed on the server (right click on the service reference and select Configure Service reference then change the URl to correspond to the service location on your server). If you are already developing/testing in this configuration try right clicking on the service reference and select "Update Service Reference".

Playing sound on Metro XAudio2 and windows metro throws exception

I am trying to play a pcm udp audio stream on windows8. Is there an easier way than doing xaudio2 ?
I am very new to xaudio2 and creating an xaudio player is throwing an exception for me :
public ref class Player sealed
{
public:
void feedData(Platform::Array<unsigned char> ^byteArray)
{
buffer.AudioBytes = byteArray->Length;
buffer.pAudioData = new byte[byteArray->Length];
memcpy(buffer.pAudioData, &byteArray[0], byteArray->Length);
if( FAILED(hr = SourceVoice->SubmitSourceBuffer( &buffer ) ) )
throw Platform::Exception::CreateException(hr);
}
Player()
{
HRESULT hr;
if ( FAILED(hr = XAudio2Create( &XAudio2, 0, XAUDIO2_DEFAULT_PROCESSOR ) ) )
throw Platform::Exception::CreateException(hr);
if ( FAILED(hr = XAudio2->CreateMasteringVoice( &MasterVoice ) ) )
throw Platform::Exception::CreateException(hr);
ZeroMemory(&wfx,sizeof(WAVEFORMATEXTENSIBLE));
wfx.Format.wFormatTag = WAVE_FORMAT_PCM;
wfx.Format.nChannels = 1;
wfx.Format.nSamplesPerSec = 16000;
wfx.Format.nAvgBytesPerSec = 32000;
wfx.Format.nBlockAlign = 2;
wfx.Format.wBitsPerSample = 16;
if( FAILED(hr = XAudio2->CreateSourceVoice( &SourceVoice, (WAVEFORMATEX*)&wfx ) ))
throw Platform::Exception::CreateException(hr);
if ( FAILED(hr = SourceVoice->Start( 0 ) ) )
throw Platform::Exception::CreateException(hr);
}
~Player()
{
MasterVoice->DestroyVoice();
SourceVoice->DestroyVoice();
}
private:
Microsoft::WRL::ComPtr<IXAudio2> XAudio2;
IXAudio2MasteringVoice* MasterVoice;
IXAudio2SourceVoice* SourceVoice;
WAVEFORMATEXTENSIBLE wfx;
XAUDIO2_BUFFER buffer;
};
I am running it as a WinRT component dll and the exception occurs in this line:
if( FAILED(hr = XAudio2->CreateSourceVoice( &SourceVoice, (WAVEFORMATEX*)&wfx ) ))
throw Platform::Exception::CreateException(hr);
I stepped through the debugger and the wfx and SourceVoice structures look initiated okay. Can someone help me in figuring out what is going wrong ?