SecKeychainFindInternetPassword is not working - objective-c

I have a problem with storing and reading from my keychain. First I tried to store use a char like this "account_name" in the Method SecKeychainFindInternetPassword for accountname and this runs. But now i would like to store a variable inside. But if i run this code the Programm cannot find the Keychain Item.
Please Help me.
(Sorry for my bad Englisch, i am a german student)
-(void)StorePasswordKeychain:(void*)password :(UInt32)passwordLength
{
char *userString;
userString = (char *)[_username UTF8String];
SecKeychainAddInternetPassword(
NULL,
StrLength("myserver.com"),
"myserver.com",
0,
NULL,
StrLength(userString),
userString,
0,
nil,
0,
kSecProtocolTypeHTTPS,
kSecAuthenticationTypeHTMLForm,
passwordLength,
password,
NULL
);
}
-(OSStatus)GetPasswordKeychain:(void *)passwordData :(UInt32 *)passwordLength
{
OSStatus status;
char *userString;
userString = (char *)[_username UTF8String];
status = SecKeychainFindInternetPassword(
NULL,
StrLength("myserver.com"),
"myserver.com",
0,
NULL,
StrLength(userString),
userString,
0,
nil,
0,
kSecProtocolTypeHTTPS,
kSecAuthenticationTypeHTMLForm,
passwordLength,
passwordData,
NULL
);
return status;
}

Two suggestions.. don't pass null into itemRef (the last arg). Then you'll have a pointer to the keychain you wish to modify.
Also, you should really check the error code to see if your add function worked.
OSStatus result = SecKeychainAddInternetPassword(
NULL,
StrLength("myserver.com"),
"myserver.com",
0,
NULL,
StrLength(userString),
userString,
0,
nil,
0,
kSecProtocolTypeHTTPS,
kSecAuthenticationTypeHTMLForm,
passwordLength,
password,
NULL
);
if(result != noErr){
NSLog(#"Error AddPassword result=:%d", result );
}
This is my sample program with the same code that you provided and it works fine.
int main(int argc, const char * argv[])
{
#autoreleasepool {
char *inputpassword = "topsecret";
UInt32 inputpassLength = strlen(inputpassword);
OSStatus status;
NSString *_username = #"account_name";
char *userString;
userString = (char *)[_username UTF8String];
status = SecKeychainAddInternetPassword(
NULL,
StrLength("myserver.com"),
"myserver.com",
0,
NULL,
StrLength(userString),
userString,
0,
nil,
0,
kSecProtocolTypeHTTPS,
kSecAuthenticationTypeHTMLForm,
inputpassLength,
inputpassword,
NULL
);
NSLog(#"Adding Status:%d", status);
UInt32 returnpasswordLength = 0;
char *passwordData;
status = SecKeychainFindInternetPassword(
NULL,
StrLength("myserver.com"),
"myserver.com",
0,
NULL,
StrLength(userString),
userString,
0,
nil,
0,
kSecProtocolTypeHTTPS,
kSecAuthenticationTypeHTMLForm,
&returnpasswordLength,
(void *)&passwordData,
NULL
);
NSLog(#"Retrieving status:%d", status);
NSLog(#"Password:%#", [[NSString alloc] initWithBytes:passwordData
length:returnpasswordLength
encoding:NSUTF8StringEncoding]);
}
return 0;
}

Related

Why does update not update any records?

I am using the OCI functions to update a database table in Oracle but despite the commands all returning success nothing in the database table is changing. It changes if I hard code the Where clause values so I think I might be doing something wrong in the binding code?
If I create the database table as follows:
create table updatebuddTI(i char(10), j int);
insert into updatebuddTI values ('test1',1);
insert into updatebuddTI values ('test2',2);
insert into updatebuddTI values ('test3',3);
and then use the code:
#include "stdafx.h"
#include <string>
#include <oci.h>
#include <stdlib.h>
#define OCI_NOT_NULL 0
#define OCI_VALUE_NULL -1
#define OCI_VALUE_TRUNCATED -2
#define ORACLE_MAX_SESSIONS 30
int _tmain(int argc, _TCHAR* argv[]) {
// OCI handles
OCIEnv *envhp;
OCIError *errhp;
OCIServer *srvhp;
OCISvcCtx *svchp;
OCISession *authp;
OCIStmt *stmtp;
OCIDefine *defnpp;
// Connection information
text* user = (text*)"test";
text* pwd = (text*)"password";
text* sid = (text*)"oracle-server";
char *query = "UPDATE updatebuddTI SET I = 'test3' WHERE J = :1";// :2";
int dataReceivedI[10];
// Fetched data indicators, lengths and codes
ub2 dataReceived_len[10];
ub2 dataReceived_code[10];
sb2 dataReceived_indI[3];
ub2 dataReceived_lenI[3];
ub2 dataReceived_codeI[3];
oratext message[512];
sb4 errcode;
// Allocate environment
int rc = OCIEnvCreate(&envhp, OCI_DEFAULT, NULL, NULL, NULL, NULL, 0, NULL);
ub2 code = OCINlsCharSetNameToId(envhp, (const oratext *)"WE8MSWIN1252");
OCIEnv *envHandle(0);
if(code) {
rc = OCIEnvNlsCreate(&envHandle,
OCI_OBJECT | OCI_THREADED,
NULL,
NULL,
NULL,
NULL,
0,
NULL,
code,
code);
} else {
printf("problem with OCIEnvNlsCreate!\n");
}
// Allocate error handle
rc = OCIHandleAlloc(envhp, (void**)&errhp, OCI_HTYPE_ERROR, 0, NULL);
// Allocate server and service context handles
rc = OCIHandleAlloc(envhp, (void**)&srvhp, OCI_HTYPE_SERVER, 0, NULL);
rc = OCIHandleAlloc(envhp, (void**)&svchp, OCI_HTYPE_SVCCTX, 0, NULL);
// Attach to the server
//rc = OCIServerAttach(srvhp, errhp, sid, strlen((char*)sid), 0);
// Set server in the service context
rc = OCIAttrSet(svchp, OCI_HTYPE_SVCCTX, (dvoid*)srvhp, 0, OCI_ATTR_SERVER, errhp);
// Allocate session handle
rc = OCIHandleAlloc(envhp, (void**)&authp, OCI_HTYPE_SESSION, 0, NULL);
// Set user name and password
rc = OCIAttrSet(authp, OCI_HTYPE_SESSION, (void*)user, strlen((char*)user),
OCI_ATTR_USERNAME, errhp);
rc = OCIAttrSet(authp, OCI_HTYPE_SESSION, (void*)pwd, strlen((char *)pwd),
OCI_ATTR_PASSWORD, errhp);
std::string path("oracle-server");
rc = OCIServerAttach(srvhp, errhp, (text *)path.c_str(), (sb4)path.length(), 0);
// Connect
rc = OCISessionBegin(svchp, errhp, authp, OCI_CRED_RDBMS, OCI_DEFAULT);
// Set session in the service context
rc = OCIAttrSet(svchp, OCI_HTYPE_SVCCTX, authp, 0, OCI_ATTR_SESSION, errhp);
// Allocate statement handle
rc = OCIHandleAlloc(envhp, (void**)&stmtp, OCI_HTYPE_STMT, 0, NULL);
// Prepare the query
rc = OCIStmtPrepare(stmtp, errhp, (text*)query, strlen(query), OCI_NTV_SYNTAX, OCI_DEFAULT);
char text[10];
int option=0;
// Define the select list items
rc = OCIDefineByPos(stmtp, &defnpp, errhp, 1, (void*)text, 5, SQLT_CHR, (void*)dataReceivedI,
dataReceived_len, dataReceived_code, OCI_DEFAULT);
if (rc != 0) {
OCIErrorGet(errhp, (ub4)1, NULL, &errcode, message, sizeof(message), (ub4)OCI_HTYPE_ERROR);
printf("%s", message);
}
rc = OCIDefineByPos(stmtp, &defnpp, errhp, 2, (void*)option, sizeof(int), SQLT_NUM, (void*)dataReceived_indI,
dataReceived_lenI, dataReceived_codeI, OCI_DEFAULT);
if (rc != 0) {
OCIErrorGet(errhp, (ub4)1, NULL, &errcode, message, sizeof(message), (ub4)OCI_HTYPE_ERROR);
printf("%s", message);
}
OCIBind* bindHandle2;
rc = OCIBindByPos(stmtp, &bindHandle2, errhp, 1,
(dvoid *)&option, (sword) sizeof(int), SQLT_NUM,
(dvoid *)0, (ub2 *)0, (ub2 *)0, (ub4)0, (ub4 *)0, OCI_DEFAULT);
if (rc != 0) {
OCIErrorGet(errhp, (ub4)1, NULL, &errcode, message, sizeof(message), (ub4)OCI_HTYPE_ERROR);
printf("%s", message);
}
strcpy_s(text, "test3");
option = 2;
rc = OCIStmtExecute(svchp, stmtp, errhp, 1, 0, 0,0, OCI_DEFAULT);
if (rc != 0) {
OCIErrorGet(errhp, (ub4)1, NULL, &errcode, message, sizeof(message), (ub4)OCI_HTYPE_ERROR);
printf("%s", message);
}
rc = OCIHandleFree(stmtp, OCI_HTYPE_STMT);
// Disconnect
rc = OCISessionEnd(svchp, errhp, authp, OCI_DEFAULT);
rc = OCIServerDetach(srvhp, errhp, OCI_DEFAULT);
rc = OCIHandleFree(envhp, OCI_HTYPE_ENV);
}
Can anyone see what I have done wrong?

Encoding H.264 Compression Session with CGDisplayStream

I'm trying to create an H.264 Compression Session with the data from my screen. I've created a CGDisplayStreamRef instance like so:
displayStream = CGDisplayStreamCreateWithDispatchQueue(0, 100, 100, k32BGRAPixelFormat, nil, self.screenCaptureQueue, ^(CGDisplayStreamFrameStatus status, uint64_t displayTime, IOSurfaceRef frameSurface, CGDisplayStreamUpdateRef updateRef) {
//Call encoding session here
});
Below is how I currently have the encoding function setup:
- (void) encode:(CMSampleBufferRef )sampleBuffer {
CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
CMTime presentationTimeStamp = CMTimeMake(frameID++, 1000);
VTEncodeInfoFlags flags;
OSStatus statusCode = VTCompressionSessionEncodeFrame(EncodingSession,
imageBuffer,
presentationTimeStamp,
kCMTimeInvalid,
NULL, NULL, &flags);
if (statusCode != noErr) {
NSLog(#"H264: VTCompressionSessionEncodeFrame failed with %d", (int)statusCode);
VTCompressionSessionInvalidate(EncodingSession);
CFRelease(EncodingSession);
EncodingSession = NULL;
return;
}
NSLog(#"H264: VTCompressionSessionEncodeFrame Success");
}
I'm trying to understand how I can convert the data from my screen into a CMSampleBufferRef so I can properly call my encode function. So far, I've not been able to determine if this is possible, or the right approach for what I'm trying to do. Does anyone have any suggestions?
EDIT: I've gotten my IOSurfaceconverted to a CMBlockBuffer, but haven't yet figured out how to convert that to a CMSampleBufferRef:
void *mem = IOSurfaceGetBaseAddress(frameSurface);
size_t bytesPerRow = IOSurfaceGetBytesPerRow(frameSurface);
size_t height = IOSurfaceGetHeight(frameSurface);
size_t totalBytes = bytesPerRow * height;
CMBlockBufferRef blockBuffer;
CMBlockBufferCreateWithMemoryBlock(kCFAllocatorNull, mem, totalBytes, kCFAllocatorNull, NULL, 0, totalBytes, 0, &blockBuffer);
EDIT 2
Some more progress:
CMSampleBufferRef *sampleBuffer;
OSStatus sampleStatus = CMSampleBufferCreate(
NULL, blockBuffer, TRUE, NULL, NULL,
NULL, 1, 1, NULL,
0, NULL, sampleBuffer);
[self encode:*sampleBuffer];
Possibly, I'm a bit late but nevertheless, it could be helpful for others:
CGDisplayStreamCreateWithDispatchQueue(CGMainDisplayID(), 100, 100, k32BGRAPixelFormat, nil, self.screenCaptureQueue, ^(CGDisplayStreamFrameStatus status, uint64_t displayTime, IOSurfaceRef frameSurface, CGDisplayStreamUpdateRef updateRef) {
// The created pixel buffer retains the surface object.
CVPixelBufferRef pixelBuffer;
CVPixelBufferCreateWithIOSurface(NULL, frameSurface, NULL, &pixelBuffer);
// Create the video-type-specific description for the pixel buffer.
CMVideoFormatDescriptionRef videoFormatDescription;
CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &videoFormatDescription);
// All the necessary parts for creating a `CMSampleBuffer` are ready.
CMSampleBufferRef sampleBuffer;
CMSampleTimingInfo timingInfo;
CMSampleBufferCreateReadyWithImageBuffer(NULL, pixelBuffer, videoFormatDescription, &timingInfo, &sampleBuffer);
// Do the stuff
// Release the resources to let the frame surface be reused in the queue
// `kCGDisplayStreamQueueDepth` is responsible for the size of the queue
CFRelease(sampleBuffer);
CFRelease(pixelBuffer);
});

how to set Authentication setting data? VPN Mac OS Programatically

i am using this https://github.com/halo/macosvpn. Code run successfully and create new network but it's not showing Authentication Setting data which i Password e.g Shared Secret.
- (CFDictionaryRef) L2TPPPPConfig {
CFStringRef keys[4] = { NULL, NULL, NULL, NULL };
CFStringRef vals[4] = { NULL, NULL, NULL, NULL };
CFIndex count = 0;
keys[count] = kSCPropNetPPPCommRemoteAddress;
vals[count++] = (__bridge CFStringRef)self.endpoint;
keys[count] = kSCPropNetPPPAuthName;
vals[count++] = (__bridge CFStringRef)self.username;
keys[count] = kSCPropNetPPPAuthPassword;
vals[count++] = (__bridge CFStringRef)self.serviceID;
keys[count] = kSCPropNetPPPAuthPasswordEncryption;
vals[count++] = kSCValNetPPPAuthPasswordEncryptionKeychain;
return CFDictionaryCreate(NULL, (const void **)&keys, (const void **)&vals, count, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
Furthermore Server Address and Account Name set Successfully. i
These two lines look very suspicious:
keys[count] = kSCPropNetPPPAuthPassword;
vals[count++] = (__bridge CFStringRef)self.serviceID;
Shouldn't that second line be something like: "self.password"?
serviceID is something totally different than a password.

OpenCL - Kernel crashes on the second run

I am trying to run a code which works the first time but crashes the second time that it runs. The function which causes the crash is part of the class Octree_GPU and is this:
int Octree_GPU::runCreateNodeKernel(int length)
{
cl_uint nodeLength;
if(nodeNumsArray[length-1] == 0)
nodeLength = nodeAddArray[length-1];
else
nodeLength = nodeAddArray[length-1]+8;
nodeArray = (cl_uint*)malloc(sizeof(cl_uint)*nodeLength);
nodePointsArray = (cl_int*)malloc(sizeof(cl_uint)*nodeLength);
startIndexArray = (cl_int*)malloc(sizeof(cl_int)*nodeLength);
d_nodeAdd = clCreateBuffer(context, CL_MEM_READ_WRITE, sizeof(cl_uint)*length, NULL, &err);
d_nodeArray = clCreateBuffer(context,CL_MEM_READ_WRITE, sizeof(cl_uint)*temp_length, NULL, &err);
d_numPoints = clCreateBuffer(context, CL_MEM_READ_WRITE, sizeof(cl_uint)*length, NULL, &err);
d_pointIndex = clCreateBuffer(context, CL_MEM_READ_WRITE,sizeof(cl_uint)*length,NULL, &err);
d_nodePointsArray = clCreateBuffer(context, CL_MEM_READ_WRITE, sizeof(cl_int)*temp_length, NULL, &err);
d_nodeIndexArray = clCreateBuffer(context,CL_MEM_READ_WRITE, sizeof(cl_int)*temp_length, NULL, &err);
err |= clEnqueueWriteBuffer(commands, d_nodeAdd, CL_TRUE, 0, sizeof(cl_uint)*length, nodeAddArray, 0, NULL,NULL);
err |= clEnqueueWriteBuffer(commands, d_numPoints,CL_TRUE, 0, sizeof(cl_uint)*length,numPointsArray,0,NULL,NULL);
err |= clEnqueueWriteBuffer(commands, d_pointIndex, CL_TRUE, 0, sizeof(cl_uint)*length,pointStartIndexArray,0, NULL, NULL);
clFinish(commands);
err = clSetKernelArg(createNodeKernel, 0, sizeof(cl_mem), &d_odata);
err |= clSetKernelArg(createNodeKernel, 1, sizeof(cl_mem), &d_nodeNums);
err |= clSetKernelArg(createNodeKernel, 2, sizeof(cl_mem), &d_nodeAdd);
err |= clSetKernelArg(createNodeKernel, 3, sizeof(cl_mem), &d_numPoints);
err |= clSetKernelArg(createNodeKernel, 4, sizeof(cl_mem), &d_pointIndex);
err |= clSetKernelArg(createNodeKernel, 5, sizeof(cl_mem), &d_nodeArray);
err |= clSetKernelArg(createNodeKernel, 6, sizeof(cl_mem), &d_nodePointsArray);
err |= clSetKernelArg(createNodeKernel, 7, sizeof(cl_mem), &d_nodeIndexArray);
clFinish(commands);
if(err != CL_SUCCESS) {
printf("Cannot set Kernel Arg \n");
exit(1);
}
size_t global_size[1] = {limit-1};
err = clEnqueueNDRangeKernel(commands, createNodeKernel, 1, NULL, global_size, NULL, 0, NULL, NULL);
if(err != CL_SUCCESS) {
printf(" Kernel does not work \n");
exit(1);
}
clFinish(commands);
err = clEnqueueReadBuffer(commands, d_nodeArray, CL_TRUE, 0, sizeof(cl_uint)*temp_length, nodeArray, 0, NULL, NULL);
err|= clEnqueueReadBuffer(commands, d_nodePointsArray, CL_TRUE, 0, sizeof(cl_int)*nodeLength, nodePointsArray, 0, NULL, NULL);
err|= clEnqueueReadBuffer(commands, d_nodeIndexArray, CL_TRUE, 0, sizeof(cl_int)*nodeLength, startIndexArray, 0, NULL, NULL);
clFinish(commands);
clReleaseMemObject(d_nodeAdd);
clReleaseMemObject(d_numPoints);
clReleaseMemObject(d_nodeArray);
clReleaseMemObject(d_nodePointsArray);
clFinish(commands);
return 0;
}
Please note that d_odata and d_nodeNums have been declared in the previous functions. The kernel code is given below for the same:
__kernel void createNode(__global int* uniqueCode, __global int* nodeNums,__global int* nodeAdd, __global int* numPoints, __global int* pointIndex,__global int* nodeArray, __global int* nodePoints,__global int* nodeIndex)
{
int ig = get_global_id(0);
int add;
int num = uniqueCode[ig];
int pt = numPoints[ig];
int ind = pointIndex[ig];
int temp,j;
if(nodeNums[ig] == 8)
{
for(int i=0;i<8;i++)
{
temp = ((int)num/10)*10+i;
add = nodeAdd[ig] + i;
nodeArray[add] = temp;
nodePoints[add] = select(0, pt, temp==num);
nodeIndex[add] = select(-1, ind, temp==num);
barrier(CLK_LOCAL_MEM_FENCE);
}
}
else
{
j = num % 10;
nodeAdd[ig] = nodeAdd[ig-1];
add = nodeAdd[ig]+j;
nodePoints[add] = pt;
nodeIndex[add] = ind;
barrier(CLK_LOCAL_MEM_FENCE);
}
}
I have tried to find out why but have not succeeded. I might be overlooking something really simple. Thank you for your help.
I'm not 100% sure this is causing the crash, but where you've written
if(nodeNums[ig] == 8)
{
for(int i=0;i<8;i++)
{
barrier(CLK_LOCAL_MEM_FENCE);
}
}
else
{
barrier(CLK_LOCAL_MEM_FENCE);
}
This means that different threads in a work group will be executing different numbers of barriers, which may cause a hang/crash. A barrier (with CLK_LOCAL_MEM_FENCE) is for synchronising accesses to local memory, so all work items in a group must execute this before continuing
On a non crash note, it looks like you're using CLK_LOCAL_MEM_FENCE (ensure that local memory accesses are visible across threads) when you mean CLK_GLOBAL_MEM_FENCE (ensure that global memory accesses are visible across threads)
Also
nodeAdd[ig] = nodeAdd[ig-1];
Is not correct for ig == 0. This may not be causing the actual crash (because I've found that OpenCL can be unfortunately quite forgiving), but its worth fixing

SecKeychain load item

I want to store SMTP-Data from my Mac OSX application using the keychain. I read the Keychain Services Programming Guide of Apple and wrote this method to store the data:
- (BOOL)saveSMPTData
{
OSStatus err;
SecKeychainItemRef item = nil;
SecProtocolType protocol = kSecProtocolTypeSMTP;
const char *accessLabelUTF8 = [KEYCHAIN_NAME UTF8String];
const char *serverNameUTF8 = [self.serverName UTF8String];
const char *usernameUTF8 = [self.username UTF8String];
const char *passwordUTF8 = [self.password UTF8String];
SecAccessRef access = createAccess(KEYCHAIN_NAME);
SecKeychainAttribute attrs[] = {
{ kSecLabelItemAttr, (int)strlen(accessLabelUTF8), (char *)accessLabelUTF8 },
{ kSecAccountItemAttr, (int)strlen(usernameUTF8), (char *)usernameUTF8 },
{ kSecServerItemAttr, (int)strlen(serverNameUTF8), (char *)serverNameUTF8 },
{ kSecProtocolItemAttr, sizeof(SecProtocolType), (SecProtocolType *)&protocol }
};
SecKeychainAttributeList attributes = { sizeof(attrs) / sizeof(attrs[0]), attrs };
err = SecKeychainItemCreateFromContent(kSecInternetPasswordItemClass,
&attributes,
(int)strlen(passwordUTF8),
passwordUTF8,
NULL,
access,
&item);
if (access) CFRelease(access);
if (item) CFRelease(item);
return (err == noErr);
}
SecAccessRef createAccess(NSString *accessLabel)
{
OSStatus err;
SecAccessRef access = nil;
NSArray *trustedApplications = nil;
SecTrustedApplicationRef myself;
err = SecTrustedApplicationCreateFromPath(NULL, &myself);
trustedApplications = [NSArray arrayWithObjects:(__bridge id)myself, nil];
err = SecAccessCreate((__bridge CFStringRef)accessLabel,
(__bridge CFArrayRef)trustedApplications, &access);
if (err) return nil;
return access;
}
Of course I also want to load them. My first try looks like this:
- (BOOL)loadDataFromKeychain
{
uint32_t serverNameLength = 0;
const char *serverName = NULL;
uint32_t usernameLength = 0;
const char *username = NULL;
uint32_t passwordLength = 0;
void **password = NULL;
OSStatus err = SecKeychainFindInternetPassword(NULL,
serverNameLength, serverName,
0, NULL,
usernameLength, username,
0, NULL,
0, 0,
0,
&passwordLength, password,
NULL); // How do I get the ItemRef?
return (err == noErr);
}
But this does not work, and I think I know why not. I don’t know how to get the SecKeychainItemRef for the SecKeychainFindInternetPassword method.
Maybe anyone can help me?
Instead of declaring password a void **, declare it a void * and pass &password for the second-to-last parameter.
You probably don't need the SecKeychainItemRef for what you're trying to accomplish.
By the way, have you tried using Keychain Access to verify the items are getting into the keychain?