Related
For industrial purposes, I want to decrypt an AES-encrypted message with an RSA-encrypted key in C. At first, I thought doing it step-by-step by first, using OpenSSL libcrypto library, by first RSA decoding the key then AES decoding the data.
I have found out that EVP tools were commonly seen as a better way to do this since it actually does what the low-levels functions do but correctly.
Here is what I see the flow of the program :
Initialize OpenSSL;
Read and store the RSA private key;
Initialize the decryption by specifying the decryption algorithm (AES) and the private key;
Update the decryption by giving the key, the data, the key and their length
Finally decrypt the data and return it.
I have been a lot confused by the fact that so far we do not intend to use any IV or ADD (although IV might come up later in the project). I have followed this guide it is not very clear and does not fit the way I use EVP.
So here is my actual code :
#include <openssl/evp.h>
#include <openssl/conf.h>
#include <openssl/pem.h>
#include <openssl/rsa.h>
#include <openssl/aes.h>
#include <openssl/err.h>
#include "openssl\applink.c"
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include <string.h>
const char PRIVATE_KEY_PATH[] = "C:/Users/Local_user/privateKey.pem";
EVP_PKEY* initializePrivateKey(void)
{
FILE* privateKeyfile;
if ((privateKeyfile = fopen(PRIVATE_KEY_PATH, "r")) == NULL) // Check PEM file opening
{
perror("Error while trying to access to private key.\n");
return NULL;
}
RSA *rsaPrivateKey = RSA_new();
EVP_PKEY *privateKey = EVP_PKEY_new();
if ((rsaPrivateKey = PEM_read_RSAPrivateKey(privateKeyfile, &rsaPrivateKey, NULL, NULL)) == NULL) // Check PEM file reading
{
fprintf(stderr, "Error loading RSA Private Key File.\n");
ERR_print_errors_fp(stderr);
return NULL;
}
if (!EVP_PKEY_assign_RSA(privateKey, rsaPrivateKey))
{
fprintf(stderr, "Error when initializing EVP private key.\n");
ERR_print_errors_fp(stderr);
return NULL;
}
return privateKey;
}
const uint8_t* decodeWrappingKey(uint8_t const* data, const size_t data_len, uint8_t const* wrappingKey, const size_t wrappingKey_len)
{
// Start Decryption
EVP_CIPHER_CTX *ctx;
if (!(ctx = EVP_CIPHER_CTX_new())) // Initialize context
{
fprintf(stderr, "Error when initializing context.\n");
ERR_print_errors_fp(stderr);
return NULL;
}
EVP_PKEY *privateKey = initializePrivateKey();
if (1 != EVP_DecryptInit_ex(ctx, EVP_aes_256_gcm(), NULL, privateKey, NULL)) // Initialize decryption
{
fprintf(stderr, "Error when initializing decryption.\n");
ERR_print_errors_fp(stderr);
return NULL;
}
uint8_t* res;
if ((res = calloc(data_len, sizeof(uint8_t))) == NULL) // Check memory allocating
{
perror("Memory allocating error ");
return NULL;
}
puts("Initialization done. Decoding..\n");
size_t res_len = 0;
if (1 != EVP_DecryptUpdate(ctx, res, &res_len, data, data_len))
{
fprintf(stderr, "Error when preparing decryption.\n");
ERR_print_errors_fp(stderr);
}
if (1 != EVP_DecryptFinal_ex(ctx, res, &res_len))
{
fprintf(stderr, "Error when decrypting.\n");
ERR_print_errors_fp(stderr);
}
return res;
}
void hexToBytes(uint8_t *des, char const *source, const size_t size) {
for (int i = 0; i < size - 1; i += 2)
sscanf(source + i, "%02x", des + (i / 2));
}
int main(void) {
char const *strWrap = "5f82c48f85054ef6a3b2621819dd0e969030c79cc00deb89........";
char const *strData = "ca1518d44716e3a4588af741982f29ad0a3e7a8d67.....";
uint8_t *wrap = calloc(strlen(strWrap), sizeof(uint8_t));
hexToBytes(wrap, strWrap, strlen(strWrap)); // Converts string to raw data
uint8_t *data = calloc(strlen(strData), sizeof(uint8_t));
hexToBytes(data, strData, strlen(strData));
/* Load the human readable error strings for libcrypto */
ERR_load_crypto_strings();
/* Load all digest and cipher algorithms */
OpenSSL_add_all_algorithms();
/* Load config file, and other important initialisation */
OPENSSL_config(NULL);
const uint8_t *res = decodeWrappingKey(data, strlen(strData) / 2, wrap, strlen(strWrap) / 2);
if (res == NULL)
return 1;
return 0;
}
My output is the following one :
Initialization done. Decoding..
Error when preparing decryption.
Error when decrypting.
Obviously it fails when updating and finalising the decryption but I can't figure out why and the ERR_print_errors_fp(stderr); which had always worked for me so far seems to be mute.
Here is a complete working example of how you can encrypt a key using RSA, and encrypt a message using that key using AES, followed by the subsequent decryption of those things. It assumes AES-256-CBC is being used. If you want to use AES-256-GCM instead then you will need to make some changes to get and set the tag (ask me if you need some pointers on how to do this). It also assumes that the RSA encryption is done with PKCS#1 padding (which is all that the EVP_Seal* APIs support). If you need some other kind of padding then you will need to use a different method. Finally it assumes you are using OpenSSL 1.1.0. If you are using 1.0.2 then some changes will probably be necessary (at least you will need to explicitly init and de-init the library - that isn't required in 1.1.0).
The code reads the RSA private and public keys from files called privkey.pem and pubkey.pem which are in the current working directory. I generated these files like this:
openssl genrsa -out privkey.pem 2048
openssl rsa -in privkey.pem -pubout -out pubkey.pem
I've tested this on Linux only. The code is as follows:
#include <openssl/evp.h>
#include <openssl/pem.h>
#include <openssl/err.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
static int envelope_seal(EVP_PKEY *pub_key, unsigned char *plaintext,
int plaintext_len, unsigned char **encrypted_key,
int *encrypted_key_len, unsigned char **iv,
int *iv_len, unsigned char **ciphertext,
int *ciphertext_len)
{
EVP_CIPHER_CTX *ctx;
int len, ret = 0;
const EVP_CIPHER *type = EVP_aes_256_cbc();
unsigned char *tmpiv = NULL, *tmpenc_key = NULL, *tmpctxt = NULL;
if((ctx = EVP_CIPHER_CTX_new()) == NULL)
return 0;
*iv_len = EVP_CIPHER_iv_length(type);
if ((tmpiv = malloc(*iv_len)) == NULL)
goto err;
if ((tmpenc_key = malloc(EVP_PKEY_size(pub_key))) == NULL)
goto err;
if ((tmpctxt = malloc(plaintext_len + EVP_CIPHER_block_size(type)))
== NULL)
goto err;
if(EVP_SealInit(ctx, type, &tmpenc_key, encrypted_key_len, tmpiv, &pub_key,
1) != 1)
goto err;
if(EVP_SealUpdate(ctx, tmpctxt, &len, plaintext, plaintext_len) != 1)
goto err;
*ciphertext_len = len;
if(EVP_SealFinal(ctx, tmpctxt + len, &len) != 1)
goto err;
*ciphertext_len += len;
*iv = tmpiv;
*encrypted_key = tmpenc_key;
*ciphertext = tmpctxt;
tmpiv = NULL;
tmpenc_key = NULL;
tmpctxt = NULL;
ret = 1;
err:
EVP_CIPHER_CTX_free(ctx);
free(tmpiv);
free(tmpenc_key);
free(tmpctxt);
return ret;
}
int envelope_open(EVP_PKEY *priv_key, unsigned char *ciphertext,
int ciphertext_len, unsigned char *encrypted_key,
int encrypted_key_len, unsigned char *iv,
unsigned char **plaintext, int *plaintext_len)
{
EVP_CIPHER_CTX *ctx;
int len, ret = 0;
unsigned char *tmpptxt = NULL;
if((ctx = EVP_CIPHER_CTX_new()) == NULL)
return 0;
if ((tmpptxt = malloc(ciphertext_len)) == NULL)
goto err;
if(EVP_OpenInit(ctx, EVP_aes_256_cbc(), encrypted_key, encrypted_key_len,
iv, priv_key) != 1)
return 0;
if(EVP_OpenUpdate(ctx, tmpptxt, &len, ciphertext, ciphertext_len) != 1)
return 0;
*plaintext_len = len;
if(EVP_OpenFinal(ctx, tmpptxt + len, &len) != 1)
return 0;
*plaintext_len += len;
*plaintext = tmpptxt;
tmpptxt = NULL;
ret = 1;
err:
EVP_CIPHER_CTX_free(ctx);
free(tmpptxt);
return ret;
}
int main(void)
{
EVP_PKEY *pubkey = NULL, *privkey = NULL;
FILE *pubkeyfile, *privkeyfile;
int ret = 1;
unsigned char *iv = NULL, *message = "Hello World!\n";
unsigned char *enc_key = NULL, *ciphertext = NULL, *plaintext = NULL;
int iv_len = 0, enc_key_len = 0, ciphertext_len = 0, plaintext_len = 0, i;
if ((pubkeyfile = fopen("pubkey.pem", "r")) == NULL) {
printf("Failed to open public key for reading\n");
goto err;
}
if ((pubkey = PEM_read_PUBKEY(pubkeyfile, &pubkey, NULL, NULL)) == NULL) {
fclose(pubkeyfile);
goto err;
}
fclose(pubkeyfile);
if ((privkeyfile = fopen("privkey.pem", "r")) == NULL) {
printf("Failed to open private key for reading\n");
goto err;
}
if ((privkey = PEM_read_PrivateKey(privkeyfile, &privkey, NULL, NULL))
== NULL) {
fclose(privkeyfile);
goto err;
}
fclose(privkeyfile);
if (!envelope_seal(pubkey, message, strlen(message), &enc_key, &enc_key_len,
&iv, &iv_len, &ciphertext, &ciphertext_len))
goto err;
printf("Ciphertext:\n");
for (i = 0; i < ciphertext_len; i++)
printf("%02x", ciphertext[i]);
printf("\n");
printf("Encrypted Key:\n");
for (i = 0; i < enc_key_len; i++)
printf("%02x", enc_key[i]);
printf("\n");
printf("IV:\n");
for (i = 0; i < iv_len; i++)
printf("%02x", iv[i]);
printf("\n");
if (!envelope_open(privkey, ciphertext, ciphertext_len, enc_key,
enc_key_len, iv, &plaintext, &plaintext_len))
goto err;
plaintext[plaintext_len] = '\0';
printf("Plaintext: %s\n", plaintext);
ret = 0;
err:
if (ret != 0) {
printf("Error\n");
ERR_print_errors_fp(stdout);
}
EVP_PKEY_free(pubkey);
EVP_PKEY_free(privkey);
free(iv);
free(enc_key);
free(ciphertext);
free(plaintext);
return ret;
}
Your key is encrypted with RSA, so you will decrypt it with RSA APIs like RSA_private_decrypt first, not with EVP* APIs.
Once you get key decrypted you need to use it with (EVP*) APIs to decrypt the data with AES.
I developp a xmpp client using libmesode(fork libstrophe).
I want to secure the authentication with certification.
the server side has a certificate server.pem
the client side has its certificate client.pem
in lidmesode the functions used to check the certificate:
tls_t *tls_new(xmpp_ctx_t *ctx, sock_t sock, xmpp_certfail_handler certfail_handler, char *tls_cert_path)
{
_xmppctx = ctx;
_certfail_handler = certfail_handler;
_cert_handled = 0;
_last_cb_res = 0;
tls_t *tls = xmpp_alloc(ctx, sizeof(*tls));
if (tls) {
int ret;
memset(tls, 0, sizeof(*tls));
tls->ctx = ctx;
tls->sock = sock;
tls->ssl_ctx = SSL_CTX_new(SSLv23_client_method());
if (tls->ssl_ctx == NULL)
goto err;
SSL_CTX_set_client_cert_cb(tls->ssl_ctx, NULL);
SSL_CTX_set_mode(tls->ssl_ctx, SSL_MODE_ENABLE_PARTIAL_WRITE);
SSL_CTX_set_verify(tls->ssl_ctx, SSL_VERIFY_PEER, verify_callback);
if (tls_cert_path) {
SSL_CTX_load_verify_locations(tls->ssl_ctx, NULL, tls_cert_path);
//SSL_CTX_use_certificate_file(tls->ssl_ctx, certif, SSL_FILETYPE_PEM);
}
tls->ssl = SSL_new(tls->ssl_ctx);
if (tls->ssl == NULL)
goto err_free_ctx;
ret = SSL_set_fd(tls->ssl, sock);
if (ret <= 0)
goto err_free_ssl;
}
return tls;
err_free_ssl:
SSL_free(tls->ssl);
err_free_ctx:
SSL_CTX_free(tls->ssl_ctx);
err:
xmpp_free(ctx, tls);
_tls_log_error(ctx);
return NULL;
}
static int
verify_callback(int preverify_ok, X509_STORE_CTX *x509_ctx)
{
const STACK_OF(X509) *sk = X509_STORE_CTX_get1_chain(x509_ctx);
int slen = sk_X509_num(sk);
unsigned i;
X509 *certsk;
xmpp_debug(_xmppctx, "TLS", "STACK");
for(i=0; i<slen; i++) {
certsk = sk_X509_value(sk, i);
_print_certificate(certsk);
}
xmpp_debug(_xmppctx, "TLS", "ENDSTACK");
if (preverify_ok) {
sk_X509_pop_free(sk, X509_free);
printf("=========> 1\n");
return 1;
} else if (_cert_handled) {
if (_last_cb_res == 0) {
X509_STORE_CTX_set_error(x509_ctx, X509_V_ERR_APPLICATION_VERIFICATION);
}
sk_X509_pop_free(sk, X509_free);
return _last_cb_res;
} else {
int err = X509_STORE_CTX_get_error(x509_ctx);
const char *errstr = X509_verify_cert_error_string(err);
xmpp_debug(_xmppctx, "TLS", "ERROR: %s", errstr);
X509 *user_cert = sk_X509_value(sk, 0);
struct _tlscert_t *tlscert = _x509_to_tlscert(_xmppctx, user_cert);
int cb_res = 0;
if (_certfail_handler) {
cb_res = _certfail_handler(tlscert, errstr);
}
xmpp_conn_free_tlscert(_xmppctx, tlscert);
_cert_handled = 1;
_last_cb_res = cb_res;
if (cb_res == 0) {
X509_STORE_CTX_set_error(x509_ctx, X509_V_ERR_APPLICATION_VERIFICATION);
}
sk_X509_pop_free(sk, X509_free);
return cb_res;
}
}
when the xmpp client connect to the ejabbed xmpp server with TLS, openssl check the server certificate that is self signed but continue to connect !!.
the second thing is: the openssl doesn't check the server certificate with the client certificate to check of the server certificate is trusted or no.
is openssl do these check ?
1) disconnect if self signed certificate
2) check if certificate is trusted.
the following capture show that the server send the certificate
and then the client send the key.
is this key should be taken from client certification !
I'm trying to debug a code that is using a libevent library. In that library, there is a function event_new that is suppose to create an event_cb. Somehow after I dispatch the event base, the event_cb cannot be called or accessed. This problem only happens on hpux itanium. This code works on hpux pa-risc, Redhat, AIX, and Solaris. Is there any certain thing that need to be set?
This is part of the code
int ttypread (int fd, Header *h, char **buf)
{
int c,k;
struct user_data user_data;
struct bufferevent *in_buffer;
struct event_config *evconfig;
log_debug("inside ttypread");
in_buffer = NULL;
user_data.fd = fd;
user_data.h = h;
user_data.buf = buf;
log_debug("from user_data, fd = %d",user_data.fd); //the log_debug is a debugging function for me to check the value sent by the system. I use it to compare between each platform
log_debug("from user_data, buf = %s",user_data.buf);
log_debug("from user_data, h.len = %d",user_data.h->len);
log_debug("from user_data, h.type = %d",user_data.h->type);
evconfig = event_config_new();
if (evconfig == NULL) {
log_error("event_config_new failed");
return -1;
}
if (event_config_require_features(evconfig, EV_FEATURE_FDS)!=0) {
log_error("event_config_require_features failed");
return -1;
}
base = event_base_new_with_config(evconfig);
if (!base) {
log_error("ttypread:event_base_new failed");
return -1;
}
const char* method; //these 3 lines are the new line edited
method = event_base_get_method(base);
log_debug("ttyread is using method = %s",method);
ev = event_new(base, fd, EV_READ|EV_PERSIST, ttypread_event_cb, &user_data);
c = event_add(ev, NULL);
log_debug("ttypread passed event_add with c value is %d",c);
in_buffer = bufferevent_socket_new(base, STDIN_FILENO, BEV_OPT_CLOSE_ON_FREE);
log_debug("ttypread passed bufferevent_socket_new");
if(in_buffer == NULL){
log_debug("problem with bufferevent_socket_new");
}
bufferevent_setcb(in_buffer, in_read_cb, NULL, in_event_cb, NULL);
bufferevent_disable(in_buffer, EV_WRITE);
bufferevent_enable(in_buffer, EV_READ);
k =event_base_dispatch(base);
log_debug("event_base have been dispatched"); //when looking at the debugging file, the other plaform will go to ttypread_event_cb function. But for hpux itanium, it stays here.
if (k == 0){
log_debug("event_base_dispatch returned 0");
} else if (k == -1){
log_debug("event_base_dispatch returned -1");
} else {
log_debug("event_base_dispatch returned 1");
}
event_base_free(base);
event_free(ev);
log_debug("finish ttypread");
log_debug("ttypread_ret will return [%d]",ttypread_ret);
return ttypread_ret;
}
void ttypread_event_cb(evutil_socket_t fd, short events, void *arg)
{
int nread;
struct timeval t;
struct user_data *user_data;
user_data = (struct user_data*)arg;
nread = 0;
log_debug("inside ttypread_event_cb");
if (events & EV_READ) {
log_debug("got events & EV_READ");
nread = ttyread(fd, user_data->h, user_data->buf);
if (nread == -1) {
ttypread_ret = -1;
event_del(ev);
event_base_loopexit(base, NULL);
} else if (nread == 0) {
if (access(input_filename, F_OK)!=0) {
log_debug("cannot access [%s]",input_filename);
tcsetattr(0, TCSANOW, &old); /* Return terminal state */
exit(EXIT_SUCCESS);
}
t.tv_sec = 0;
t.tv_usec = 250000;
select(0, 0, 0, 0, &t);
} else {
ttypread_ret = 1;
event_del(ev);
event_base_loopexit(base, NULL);
}
}
else if (events & EV_WRITE) {
log_debug("got events & EV_WRITE");
}
}
Not sure if this help. But just some info on the hpux itanium
uname -a = HP-UX hpux-ita B.11.23 U ia64
If you need any additional info or other declaration on function, just leave a comment and I will edit the question.
EDIT : i've added a function inside ttypread. Somehow for hpux itanium its returning devpoll while other platform are returning poll. Im not sure if this is the problem. But if that is so, is there any way for me to change it?
After checking the result from event_base_get_method, I found out that only on my hpux-itanium used devpoll method. This is how I solve it.
char string[8] = "devpoll";
struct user_data user_data;
struct bufferevent *in_buffer;
struct event_config *evconfig;
const char *method;
const char *devpoll;
devpoll = string;
in_buffer = NULL;
user_data.fd = fd;
user_data.h = h;
user_data.buf = buf;
evconfig = event_config_new();
if (evconfig == NULL) {
log_error("event_config_new failed");
return -1;
}
if (event_config_require_features(evconfig, EV_FEATURE_FDS)!=0) {
log_error("event_config_require_features failed");
return -1;
}
if (event_config_avoid_method(evconfig,devpoll) != 0)
{
log_error("Failed to ignore devpoll method");
}
Force the libevent to ignore using devpoll and use poll instead.
I currently use ldap_bind_s to bind to the server in my C application with SEC_WINNT_AUTH_IDENTITY struct, but the function is marked as deprecated. For this reason I would like to change it to the ldap_sasl_bind_s function.
int main(void) {
LDAP *ld;
int rc = 0;
char *binddn = "cn=admin,dc=local";
const int version = LDAP_VERSION3;
SEC_WINNT_AUTH_IDENTITY wincreds;
struct berval saslcred;
wincreds.User = "admin";
wincreds.UserLength = 5;
wincreds.Password = "secret";
wincreds.PasswordLength = 6;
wincreds.Domain = NULL;
wincreds.DomainLength = 0;
wincreds.Flags = SEC_WINNT_AUTH_IDENTITY_ANSI;
ld = ldap_initA("localhost", LDAP_PORT);
ldap_set_optionA(ld, LDAP_OPT_PROTOCOL_VERSION, &version);
rc = ldap_bind_sA(ld, binddn, (PCHAR)&wincreds, LDAP_AUTH_DIGEST);
printf("0x%x\n", rc); // It's OK (0x0)
ldap_unbind(ld);
saslcred.bv_val = "secret";
saslcred.bv_len = 6;
rc = ldap_sasl_bind_sA(ld, binddn, "DIGEST-MD5", &saslcred, NULL, NULL, NULL);
printf("0x%x\n", rc); // Returns with 0x59
ldap_unbind(ld)
return 0;
}
The ldap_sasl_bind_s returns with LDAP_PARAM_ERROR code. Clearly, the function parameters are wrong above, but I can't find a working sample code with winldap and SASL binding.
I would be grateful for some guide, how to make this code working.
The last parameter of ldap_sasl_bind_sA cannot be NULL. It has to point to a place the function can put the server's response (struct berval*).
...
struct berval* serverResponse = NULL;
rc = ldap_sasl_bind_sA(ld, binddn, "DIGEST-MD5", &saslcred, NULL, NULL, &serverResponse);
...
So finally, after some research and debugging in the past two weeks, I've managed to write a working example code that uses DIGEST-MD5 authentication with WinLDAP's ldap_sasl_bind_s function. The corresponding RFC, this answer and the official SSPI documentation gave me a lot of helps.
Some gotchas that I ran into:
Regardless what documentation says about the ldap_connect function: If you would like to use the ldap_sasl_bind_s function it is not just a "good programming practice" to call it first, it is necessary. Without it the ldap_sasl_bind_s returns with LDAP_SERVER_DOWN (0x51) error code.
The valid pszTargetName (digest-uri) parameter is crucial for the InitializeSecurityContext function to avoid invalid token error.
I hope it will help others to spend less time about figuring out how to use SASL binding mechanisms with WinLDAP.
#include <stdio.h>
#include <windows.h>
#include <winldap.h>
#define SECURITY_WIN32 1
#include <security.h>
#include <sspi.h>
int _tmain(int argc, _TCHAR* argv[]) {
LDAP *ld;
int rc = 0;
const int version = LDAP_VERSION3;
SEC_WINNT_AUTH_IDENTITY wincreds;
struct berval *servresp = NULL;
SECURITY_STATUS res;
CredHandle credhandle;
CtxtHandle newhandle;
SecBufferDesc OutBuffDesc;
SecBuffer OutSecBuff;
SecBufferDesc InBuffDesc;
SecBuffer InSecBuff;
unsigned long contextattr;
ZeroMemory(&wincreds, sizeof(wincreds));
// Set credential information
wincreds.User = (unsigned short *)L"root";
wincreds.UserLength = 4;
wincreds.Password = (unsigned short *)L"p#ssword";
wincreds.PasswordLength = 8;
wincreds.Domain = NULL;
wincreds.DomainLength = 0;
wincreds.Flags = SEC_WINNT_AUTH_IDENTITY_UNICODE;
res = AcquireCredentialsHandle(NULL, L"WDigest", SECPKG_CRED_OUTBOUND,
NULL, &wincreds, NULL, NULL, &credhandle, NULL);
// Buffer for the output token.
OutBuffDesc.ulVersion = 0;
OutBuffDesc.cBuffers = 1;
OutBuffDesc.pBuffers = &OutSecBuff;
OutSecBuff.BufferType = SECBUFFER_TOKEN;
OutSecBuff.pvBuffer = NULL;
ld = ldap_init(L"localhost", LDAP_PORT);
rc = ldap_set_option(ld, LDAP_OPT_PROTOCOL_VERSION, (void*)&version);
rc = ldap_connect(ld, NULL); // Need to connect before SASL bind!
do {
if (servresp != NULL) {
InBuffDesc.ulVersion = 0;
InBuffDesc.cBuffers = 1;
InBuffDesc.pBuffers = &InSecBuff;
/* The digest-challenge will be passed as an input buffer to
InitializeSecurityContext function */
InSecBuff.cbBuffer = servresp->bv_len;
InSecBuff.BufferType = SECBUFFER_TOKEN;
InSecBuff.pvBuffer = servresp->bv_val;
/* The OutBuffDesc will contain the digest-response. */
res = InitializeSecurityContext(&credhandle, &newhandle, L"ldap/localhost", ISC_REQ_MUTUAL_AUTH | ISC_REQ_ALLOCATE_MEMORY,
0, 0, &InBuffDesc, 0, &newhandle, &OutBuffDesc, &contextattr, NULL);
}
else {
res = InitializeSecurityContext(&credhandle, NULL, L"ldap/localhost", ISC_REQ_MUTUAL_AUTH, 0, 0, NULL, 0, &newhandle, &OutBuffDesc, &contextattr, NULL);
}
switch (res) {
case SEC_I_COMPLETE_NEEDED:
case SEC_I_COMPLETE_AND_CONTINUE:
case SEC_E_OK:
case SEC_I_CONTINUE_NEEDED:
break;
case SEC_E_INVALID_HANDLE:
return -2;
case SEC_E_INVALID_TOKEN:
return -1;
default:
break;
}
struct berval cred;
cred.bv_len = OutSecBuff.cbBuffer;
/* The digest-response will be passed to the server
as credential after the second (loop)run. */
cred.bv_val = (char *)OutSecBuff.pvBuffer;
// The servresp will contain the digest-challange after the first call.
rc = ldap_sasl_bind_s(ld, L"", L"DIGEST-MD5", &cred, NULL, NULL, &servresp);
ldap_get_option(ld, LDAP_OPT_ERROR_NUMBER, &res)
} while (res == LDAP_SASL_BIND_IN_PROGRESS);
if (rc != LDAP_SUCCESS) {
printf("Bind failed with 0x%x\n", rc);
} else {
printf("Bind succeeded\n");
}
return 0;
}
Can anybody help me with example of usage of OpenSSL gost engine. I have to sign data using GOST R 34.10-2001 signature algorithm but can't find any working examples or documention.
BTW if I'm not going to use that OpenSSL command line utility is there any sense in modifying that openssl.cnf file? If not how do I load engine in code? And what compile flags are needed to build OpenSSL with static gost engine?
Thanks in advance.
----Solution----
Finally the following verifies successfully for me:
ENGINE * LoadEngine()
{
ENGINE *e = NULL;
ENGINE_load_gost();
e = ENGINE_by_id("gost");
if(!e)
{
printf("Filed to get structural reference to engine\n");
}
if(!ENGINE_init(e))
{
ENGINE_free(e);
printf("Failed to get functional reference to engine\n");
}
ENGINE_set_default(e, ENGINE_METHOD_ALL);
OpenSSL_add_all_algorithms();
return e;
}
EVP_PKEY * GenerateKeys(ENGINE *e)
{
EVP_PKEY *pkey = EVP_PKEY_new();
EVP_PKEY_CTX *ctx = EVP_PKEY_CTX_new_id(NID_id_GostR3410_2001, e);
EVP_PKEY_paramgen_init(ctx);
EVP_PKEY_CTX_ctrl(ctx,
NID_id_GostR3410_2001,
EVP_PKEY_OP_PARAMGEN,
EVP_PKEY_CTRL_GOST_PARAMSET,
NID_id_GostR3410_2001_CryptoPro_A_ParamSet,
NULL);
EVP_PKEY_keygen_init(ctx);
EVP_PKEY_keygen(ctx, &pkey);
EVP_PKEY_CTX_free(ctx);
return pkey;
}
int main()
{
ENGINE *e = NULL;
EVP_PKEY_CTX *ctx = NULL;
EVP_PKEY *pkey = NULL;
unsigned char msg[] = "this is a test message";
Binary hash(32);
SHA256_Calc(msg, sizeof(msg), &hash[0]);
size_t siglen = 0;
int status = 0;
e = LoadEngine();
pkey = GenerateKeys(e);
ctx = EVP_PKEY_CTX_new(pkey, e);
if(ctx == NULL)
{
printf("Failed to create context\n");
return -1;
}
EVP_PKEY_sign_init(ctx);
status = EVP_PKEY_sign_init(ctx);
if(status != 1)
{
printf("Failed to init signing context\n");
return -1;
}
status = EVP_PKEY_sign(ctx, NULL, &siglen, &hash[0], hash.size());
if(status != 1)
{
printf("Failed to get signature length\n");
return -1;
}
Binary signature(siglen);
status = EVP_PKEY_sign(ctx, &signature[0], &siglen, &hash[0], hash.size());
if(status != 1)
{
printf("Failed to sign a message\n");
return -1;
}
EVP_PKEY_verify_init(ctx);
bool result = EVP_PKEY_verify(ctx, &signature[0], siglen, &hash[0], hash.size());
printf("%s\n", result ? "SUCCESS" : "FAILURE");
ENGINE_cleanup();
}