I’m implementing RSA-OAEP with windows CNG libraries. So far I’ve been able to have a full (encryption/decryption) flow with CNG library and able to verify results with OpenSSL. However, this only works if the hashing function is the same as the MGF1. If these two are different my implementation with CNG fails, For example, if OpenSSL command is changed from:
pkeyutl -encrypt -in test.txt -pubin -inkey keypair.pem -out out.bin -pkeyopt rsa_padding_mode:oaep -pkeyopt rsa_oaep_md:sha256 -pkeyopt rsa_mgf1_md:sha256
To:
pkeyutl -encrypt -in test.txt -pubin -inkey keypair.pem -out out.bin -pkeyopt rsa_padding_mode:oaep -pkeyopt rsa_oaep_md:sha256 -pkeyopt rsa_mgf1_md:sha1
CNG will fail to decrypt (Note the change from SHA256 to SHA1 in mgf1 parameter). My guess is that I need to specify the use of SHA1 as the mask generation function to CNG APIs, but I'm unable to figure out how to do so. So far my research has pointed the existence of the CRYPT_RSAES_OAEP_PARAMETERS struct which allows to specify the mask generation function. But I haven’t been able to find a sample on how to use this parameters with CNG.
Any help, deeply appreciated.
Here is my CNG code:
BCRYPT_OAEP_PADDING_INFO paddingParams = { BCRYPT_SHA256_ALGORITHM, NULL, 0 };
///Encryption
status = BCryptEncrypt(hKey, pbInput, cbInput, &paddingParams, NULL /*pbIV*/, 0 /*cbIV*/, NULL /*pbOutput*/, 0 /*cbOutput*/, &cbBuffer, BCRYPT_PAD_OAEP);
if (!NT_SUCCESS(status))
{
printf("Failed to get required size of buffer..status : %08x\n", status);
}
pbBuffer = (PUCHAR) LocalAlloc(0, cbBuffer);
status = BCryptEncrypt(hKey, pbInput, cbInput, &paddingParams, NULL /*pbIV*/, 0 /*cbIV*/, pbBuffer, cbBuffer, &cbBuffer, BCRYPT_PAD_OAEP);
if (!NT_SUCCESS(status))
{
printf("Failed encrypt data..status : %08x\n", status);
}
//Decryption
status = BCryptDecrypt(hKey, pbBuffer, cbBuffer, &paddingParams, NULL/*pbIV*/, 0/*cbIV*/, NULL, 0, &cbBufferRaw, BCRYPT_PAD_OAEP);
if (!NT_SUCCESS(status))
{
printf("Failed to get required size of buffer..status : %08x\n", status);
}
pBufferRaw = (PUCHAR) LocalAlloc(0, cbBufferRaw);
status = BCryptDecrypt(hKey, pbBuffer, cbBuffer, &paddingParams, NULL/*pbIV*/, 0/*cbIV*/, pBufferRaw, cbBufferRaw, &cbBufferRaw, BCRYPT_PAD_OAEP);
if (!NT_SUCCESS(status))
{
printf("Failed to get required size of buffer..status : %08x\n", status);
}
Related
I am implementing Apple's App Attestation service.
As part of the process, i receive a EC key and a signature.
Sample key:
-----BEGIN PUBLIC KEY-----
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEd34IR9wYL76jLyZ148O/hjXo9iaF
z/q/xEMXCwYPy6yxbxYzWDZPegG4FH+snXaXQPYD6QIzZNY/kcMjIGtUTg==
-----END PUBLIC KEY-----
Sample signature:
MEUCIQDXR/22YAi90PUdKrtTHwigrDxWFoiCqPLB/Of1bZPCKQIgNLxFAeUU2x+FSWfhRGX0SOKUIDxPRoigsCHpJxgGXXU=
Sample sha256 hash:
S3i6LAEzew5SDjQbq59/FraEAvGDg9y7fRIfbnhHPf4=
If i put this into a couple of txt files like so:
System.IO.File.WriteAllBytes("/wherever/sig", Convert.FromBase64String(sampleSignature));
System.IO.File.WriteAllBytes("/wherever/hash", Convert.FromBase64String(sampleSha256Hash));
Then i can validate the signature with Openssl like so
openssl dgst -sha256 -verify sampleKey.pem -signature /wherever/sig /wherever/hash
(the above outputs)
Verified OK
I can verify the signature using Bouncy Castle like so:
var bouncyCert = DotNetUtilities.FromX509Certificate(certificate);
var bouncyPk = (ECPublicKeyParameters)bouncyCert.GetPublicKey();
var verifier = SignerUtilities.GetSigner("SHA-256withECDSA");
verifier.Init(false, bouncyPk);
verifier.BlockUpdate(sha256HashByteArray, 0, sha256HashByteArray.Length);
var valid = verifier.VerifySignature(signature); // Happy days, this is true
Since i don't want to share my whole certificate here, the same sample may be achieved as follows:
// these are the values from the sample key shared at the start of the post
// as returned by BC. Note that .Net's Y byte array is completely different.
Org.BouncyCastle.Math.BigInteger x = new Org.BouncyCastle.Math.BigInteger(Convert.FromBase64String("d34IR9wYL76jLyZ148O/hjXo9iaFz/q/xEMXCwYPy6w="));
Org.BouncyCastle.Math.BigInteger y = new Org.BouncyCastle.Math.BigInteger(Convert.FromBase64String("ALFvFjNYNk96AbgUf6yddpdA9gPpAjNk1j+RwyMga1RO"));
X9ECParameters nistParams = NistNamedCurves.GetByName("P-256");
ECDomainParameters domainParameters = new ECDomainParameters(nistParams.Curve, nistParams.G, nistParams.N, nistParams.H, nistParams.GetSeed());
var G = nistParams.G;
Org.BouncyCastle.Math.EC.ECCurve curve = nistParams.Curve;
Org.BouncyCastle.Math.EC.ECPoint q = curve.CreatePoint(x, y);
ECPublicKeyParameters pubkeyParam = new ECPublicKeyParameters(q, domainParameters);
var verifier = SignerUtilities.GetSigner("SHA-256withECDSA");
verifier.Init(false, pubkeyParam);
verifier.BlockUpdate(sha256HashByteArray, 0, sha256HashByteArray.Length);
var valid = verifier.VerifySignature(signature); // again, happy days.
However, i really want to avoid using bouncy castle.
So i am trying to use ECDsa available in .net core:
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
var certificate = new X509Certificate2(cert);
var publicKey = certificate.GetECDsaPublicKey();
var valid = publicKey.VerifyHash(sha256HashByteArray, signature); // FALSE :(
if you want to try to run the above here's the sample that creates the keys without the whole certificate:
using System.Security.Cryptography;
var ecParams = new ECParameters();
ecParams.Curve = ECCurve.CreateFromValue("1.2.840.10045.3.1.7");
ecParams.Q.X = Convert.FromBase64String("d34IR9wYL76jLyZ148O/hjXo9iaFz/q/xEMXCwYPy6w=");
// I KNOW that this is different from BC sample - i got each respective values from
// certificates in respective libraries, and it seems the way they format the coordinates
// are different.
ecParams.Q.Y = Convert.FromBase64String("sW8WM1g2T3oBuBR/rJ12l0D2A+kCM2TWP5HDIyBrVE4=");
var ecDsa = ECDsa.Create(ecParams);
var isValid = ecDsa.VerifyHash(nonce, signature); // FALSE :(
I tried using VerifyData() instead and feeding raw data and HashAlgorithmName.SHA256 with no luck.
I found a response here (https://stackoverflow.com/a/49449863/2057955) that seems to suggest that .net expects the signature as r,s concatenation, so i pulled them out of the DER sequence that i get back from my device (see sample signature) however that had no luck at all, i just can't get that 'true' back.
Question: how can i verify this EC signature using .Net Core on LINUX/MacOs (so unable to use ECDsaCng class)?
SignerUtilities.GetSigner() hashes implicitly, i.e. sha256HashByteArray is hashed again. Therefore instead of ECDsa#VerifyHash() (does not hash implicitly) the method ECDsa#VerifyData() (hashes implicitly) must be used.
Also, SignerUtilities.GetSigner() returns a signature in ASN.1 format, and ECDsa#VerifyData() expects a signature in r|s format (as you already figured out).
If both are taken into account, the verification is successful:
byte[] publicKey = Convert.FromBase64String("MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEd34IR9wYL76jLyZ148O/hjXo9iaFz/q/xEMXCwYPy6yxbxYzWDZPegG4FH+snXaXQPYD6QIzZNY/kcMjIGtUTg==");
byte[] sha256HashByteArray = Convert.FromBase64String("S3i6LAEzew5SDjQbq59/FraEAvGDg9y7fRIfbnhHPf4=");
byte[] signatureRS = Convert.FromBase64String("10f9tmAIvdD1HSq7Ux8IoKw8VhaIgqjywfzn9W2Twik0vEUB5RTbH4VJZ+FEZfRI4pQgPE9GiKCwIeknGAZddQ==");
var ecDsa = ECDsa.Create();
ecDsa.ImportSubjectPublicKeyInfo(publicKey, out _);
var isValid = ecDsa.VerifyData(sha256HashByteArray, signatureRS, HashAlgorithmName.SHA256);
Console.WriteLine(isValid); // True
Regarding the signature formats:
The posted signature in ASN.1 format
MEUCIQDXR/22YAi90PUdKrtTHwigrDxWFoiCqPLB/Of1bZPCKQIgNLxFAeUU2x+FSWfhRGX0SOKUIDxPRoigsCHpJxgGXXU=
is hex encoded
3045022100d747fdb66008bdd0f51d2abb531f08a0ac3c56168882a8f2c1fce7f56d93c229022034bc4501e514db1f854967e14465f448e294203c4f4688a0b021e92718065d75
From this, the signature in r|s format can be derived as (s. here)
d747fdb66008bdd0f51d2abb531f08a0ac3c56168882a8f2c1fce7f56d93c22934bc4501e514db1f854967e14465f448e294203c4f4688a0b021e92718065d75
or Base64 encoded:
10f9tmAIvdD1HSq7Ux8IoKw8VhaIgqjywfzn9W2Twik0vEUB5RTbH4VJZ+FEZfRI4pQgPE9GiKCwIeknGAZddQ==
Is there a Microsoft API equivalent of the following OpenSSL (extracts private key from .pfx file and saves as a new file) ?
openssl pkcs12 -in mycert.pfx -nocerts -out mycert.key -passin pass:Password -passout pass:Password
Try this:
RSACryptoServiceProvider rsa = (RSACryptoServiceProvider)YOUR_CERTIFICATE.PrivateKey;
MemoryStream memoryStream = new MemoryStream();
TextWriter streamWriter = new StreamWriter(memoryStream);
PemWriter pemWriter = new PemWriter(streamWriter);
AsymmetricCipherKeyPair keyPair = DotNetUtilities.GetRsaKeyPair(rsa);
pemWriter.WriteObject(keyPair.Private);
streamWriter.Flush();
string output = Encoding.ASCII.GetString(memoryStream.GetBuffer()).Trim();
int index_of_footer = output.IndexOf("-----END RSA PRIVATE KEY-----");
memoryStream.Close();
streamWriter.Close();
string PrivKey = output.Substring(0, index_of_footer + 29);
I am using this library, node-jwks-rsa, to fetch JWT keys from my auth0 jwks.json file in order to verify that the id_token my application retrieves after authentication is actually coming from my auth provider.
Under the hood it uses this method to build a public key PEM
export function certToPEM(cert) {
cert = cert.match(/.{1,64}/g).join('\n');
cert = `-----BEGIN CERTIFICATE-----\n${cert}\n-----END CERTIFICATE-----\n`;
return cert;
}
(Using the x50c as argument from the .jwks file).
which I then use in combination with jsonwebtoken to verify that the JWT(id_token) is valid.
How is this method of verification different from generating a private key(RSA) from the modulus and exponent of the jwks.json file and using it for verification instead? (as example see this library)
Additionally here is function as demonstration that generates a PEM from a mod and exponent (taken from http://stackoverflow.com/questions/18835132/xml-to-pem-in-node-js)
export function rsaPublicKeyToPEM(modulusB64, exponentB64) {
const modulus = new Buffer(modulusB64, 'base64');
const exponent = new Buffer(exponentB64, 'base64');
const modulusHex = prepadSigned(modulus.toString('hex'));
const exponentHex = prepadSigned(exponent.toString('hex'));
const modlen = modulusHex.length / 2;
const explen = exponentHex.length / 2;
const encodedModlen = encodeLengthHex(modlen);
const encodedExplen = encodeLengthHex(explen);
const encodedPubkey = '30' +
encodeLengthHex(modlen + explen + encodedModlen.length / 2 + encodedExplen.length / 2 + 2) +
'02' + encodedModlen + modulusHex +
'02' + encodedExplen + exponentHex;
const der = new Buffer(encodedPubkey, 'hex')
.toString('base64');
let pem = `-----BEGIN RSA PUBLIC KEY-----\n`;
pem += `${der.match(/.{1,64}/g).join('\n')}`;
pem += `\n-----END RSA PUBLIC KEY-----\n`;
return pem;
};
The aforementioned jsonwebtoken library can verify a JWT using either -- but why? If both of these verification methods can validate a JWT signature why do they both exist? What are the tradeoffs between them? Is one more secure than the other? Which should I use to verify most fully?
Using a RSA assymetric key pair, the JWT is signed with the private key and verified with the public. You can not verify a digital signature with the private key
Modulus and exponent are the components of the public key and you can use it to build the public key in PEM format, which is a base64 representation of the public key (modulus and exponent) encoded in DER binary format. You can use PEM, DER or modulus and exponent because the contain the same information
But anybody can't build the private key with modulus and exponent. He would need the private RSA elements, which must be kept secret so that no one can sign for you.
I'm currently working on a Java TLS server. I'm trying to get the following CipherSuite to work : TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA
When I test it using openssl s_client I get the following error after the ServerKeyExchange message :
140735242416208:error:1414D172:SSL
routines:tls12_check_peer_sigalg:wrong signature type:t1_lib.c:1130:
Here is the TLS message as seen in Wireshark
The Handshake fails on a decode_error fatal error.
So I guess the client doesn't like the signature algorithm chosen.
But I am only using the default SignatureAndHashAlgorithm for now as per RFC 5246 Section-7.4.1.4.1
If the negotiated key exchange algorithm is one of (RSA, DHE_RSA,
DH_RSA, RSA_PSK, ECDH_RSA, ECDHE_RSA), behave as if client had sent
the value {sha1,rsa}.
(I'm still checking if the client do offer theses default values though)
Since I'm doing ECDHE_RSA I believe I should hash and sign the serverECDHparams as per RFC 4492 Section 5.4 (First post here so only 2 links sorry :) )
ServerKeyExchange.signed_params.sha_hash
SHA(ClientHello.random + ServerHello.random +
ServerKeyExchange.params);
struct {
select (KeyExchangeAlgorithm) {
case ec_diffie_hellman:
ServerECDHParams params;
Signature signed_params;
};
} ServerKeyExchange;
And I should do this as per RFC 2246 Section 7.4.3
select (SignatureAlgorithm) {
case rsa:
digitally-signed struct {
opaque md5_hash[16];
opaque sha_hash[20];
};
} Signature;
md5_hash
MD5(ClientHello.random + ServerHello.random + ServerParams);
sha_hash
SHA(ClientHello.random + ServerHello.random + ServerParams);
My Java code regarding signing the serverParams :
private byte[] getSignedParams(ChannelBuffer params)
throws NoSuchAlgorithmException, DigestException,
SignatureException, InvalidKeyException {
byte[] signedParams = null;
ChannelBuffer signAlg = ChannelBuffers.buffer(2);
MessageDigest md5 = MessageDigest.getInstance("MD5");
MessageDigest sha = MessageDigest.getInstance("SHA-1");
switch (session.cipherSuite.sign) {
case rsa:
signAlg.writeByte(2); // 2 for SHA1
sha.update(clientRandom);
sha.update(serverRandom);
sha.update(params.toByteBuffer());
md5.update(clientRandom);
md5.update(serverRandom);
md5.update(params.toByteBuffer());
signedParams = concat(md5.digest(), sha.digest());
break;
}
signAlg.writeByte(session.cipherSuite.sign.value); // for RSA he byte is one
ChannelBuffer signLength = ChannelBuffers.buffer(2);
signLength.writeShort(signedParams.length);
return concat(signAlg.array(),concat(signLength.array(),signedParams));
}
So my question is basically : Am I wrong about all this ? and if so, what am I doing wrong ?
Thank you for your time ! :)
It's me again, I seem to have fixed my particular problem 2 things I noted :
Regarding my Java code, the MessageDigest class only does hashing no signing so I now use the Signature class instead.
It seems I only need to sign using SHA1 in TLS1.2 I don't need to do MD5 at all.
The second item is what I should have found in the RFC but didn't (maybe it is written somewhere, I don't know) I think this could be useful for people even if they're not doing Java ;)
How my code looks now :
private byte[] getSignedParams(ChannelBuffer params)
throws NoSuchAlgorithmException, DigestException,
SignatureException, InvalidKeyException {
byte[] signedParams = null;
Signature signature = Signature.getInstance(selectedSignAndHash.toString());
ChannelBuffer signAlg = ChannelBuffers.buffer(2);
signAlg.writeByte(selectedSignAndHash.hash.value);
signature.initSign(privateKey);
signature.update(clientRandom);
signature.update(serverRandom);
signature.update(params.toByteBuffer());
signedParams = signature.sign();
signAlg.writeByte(session.cipherSuite.sign.value);
ChannelBuffer signLength = ChannelBuffers.buffer(2);
signLength.writeShort(signedParams.length);
return concat(signAlg.array(), concat(signLength.array(), signedParams));
}
The code is different because in between I added a function to choose the SignatureAndHashAlgorithm to use from the list the client provides. But you could modify this to only respond using SHA1withRSA as this seems to be the default HashAndSignatureAlgorithm.
I'm using openSSL to create a pkcs12 file in a mac project.
This method does not return null in my environment, but instead in the customer's environment. I'm unable to reproduce the problem in my environment.
Here is the code, what do you think ? should I install the openSSL library in the customer's environment? I'm new to this library.
thanks.
#include "PKCS12Util.h"
BUF_MEM* createPKCS12File(char* pkcs7_pem, BIO* pkey_bio, char* password, char* name) {
X509 *cert;
EVP_PKEY* pkey;
STACK_OF(X509) *cacert = sk_X509_new_null();
PKCS12 *pk12;
if (BIO_eof(pkey_bio)) {
BIO_reset(pkey_bio);
}
pkey = PEM_read_bio_PrivateKey(pkey_bio, NULL, NULL, NULL);
if (!pkey) {
fprintf(stderr, "Error constructing pkey from pkey_bio\n");
ERR_print_errors_fp(stderr);
}
SSLeay_add_all_algorithms();
ERR_load_crypto_strings();
pkcs7_pem = make_PEM(pkcs7_pem);
BIO *pkcs7_pem_bio = BIO_new_mem_buf((void *)pkcs7_pem, (int)strlen(pkcs7_pem));
PKCS7 *pkcs7 = PEM_read_bio_PKCS7(pkcs7_pem_bio, NULL, NULL, NULL);
if (!pkcs7) {
fprintf(stderr, "Error:\n");
ERR_print_errors_fp(stderr);
}
STACK_OF(X509) *pk7_certs = pkcs7->d.sign->cert;
// the first cert is the ca root cert, the last one is the client cert
cert = sk_X509_value(pk7_certs, sk_X509_num(pk7_certs) - 1);
sk_X509_push(cacert, sk_X509_value(pk7_certs, 0));
pk12 = PKCS12_create(password, name, pkey, cert, cacert, 0,0,0,0,0);
if(!pk12) {
fprintf(stderr, "Error creating PKCS#12 structure\n");
ERR_print_errors_fp(stderr);
return NULL;
}
BIO* pk12_bio = BIO_new(BIO_s_mem());
i2d_PKCS12_bio(pk12_bio, pk12);
// get the BUF_MEM from the BIO to return it
BUF_MEM *bptr;
BIO_get_mem_ptr(pk12_bio, &bptr);
BIO_set_close(pk12_bio, BIO_NOCLOSE); // So BIO_free() leaves BUF_MEM alone
PKCS12_free(pk12);
BIO_free(pkcs7_pem_bio);
BIO_free(pk12_bio);
return bptr;
}
I found the problem.
I was trying to compact a private key with a "MisMatched-Intermediate" certificate in the certificate chain.