OpenSSL EVP AES Encryption and Base64 Encoding producing unusable results - c

So I'm trying to reproduce an encryption and encoding operation in C, that I've managed to make work in C#, JScript, Python and Java. Now, it's mostly just for obfuscating data - not actual encryption - so it's basically for aesthetic purposes only.
First thing's first, the data string that's being encrypted looks like this:
"[3671,3401,736,1081,0,32558], [3692,3401,748,1105,0,32558], [3704,3401,774,1162,0,32558], [3722,3401,774,1162,0,32558], [3733,3401,769,1172,0,32558]"
Biggest first issue for C is that this can vary in length. Each [x,y,z,a,b,c] represents some data point, and the actual string that will be encrypted can have anywhere from one data point, to 100. So I'm sure my memory management might be broken somewhere as well. Second issue is, I don't seem to be getting the correct expected result after encoding. After encrypting, the byte result of the C cipher is the same as the python cipher. But when I encode to base64 in C, it does not get the expected result at all.
#include <X11/Xlib.h>
#include <assert.h>
#include <unistd.h>
#include <stdio.h>
#include <stdint.h>
#include <stdlib.h>
#include <malloc.h>
#include <time.h>
#include <errno.h>
#include <linux/input.h>
#include <fcntl.h>
#include <string.h>
#include <openssl/sha.h>
#include <openssl/hmac.h>
#include <openssl/evp.h>
#include <openssl/kdf.h>
#include <openssl/params.h>
#include <openssl/bio.h>
#include <openssl/buffer.h>
void PBKDF2_HMAC_SHA_1(const char* pass, int passlen, const unsigned char* salt, int saltlen, int32_t iterations, uint32_t outputBytes, char* hexResult, uint8_t* binResult)
{
unsigned int i;
unsigned char digest[outputBytes];
PKCS5_PBKDF2_HMAC(pass, passlen, salt, saltlen, iterations, EVP_sha1(), outputBytes, digest);
for (i = 0; i < sizeof(digest); i++)
{
sprintf(hexResult + (i * 2), "%02x", 255 & digest[i]);
binResult[i] = digest[i];
}
}
int main(void){
char intext[] = "[3671,3401,736,1081,0,32558], [3692,3401,748,1105,0,32558], [3704,3401,774,1162,0,32558], [3722,3401,774,1162,0,32558], [3733,3401,769,1172,0,32558]";
int outlen, final_length;
EVP_CIPHER_CTX *ctx;
ctx = EVP_CIPHER_CTX_new();
size_t i;
char sid[] = "u9SXNMeTkvyBr3n81SJ7Lj216w04gJ99";
char pk[] = "jeIHjod1cZeM1U04cy8z7488AeY1Sl25";
uint32_t outputBytes = 48;
uint32_t iterations = 128;
unsigned char byteresult[2*outputBytes+1];
char hexresult[2*outputBytes+1];
memset(byteresult,0,sizeof(byteresult));
uint8_t binResult[outputBytes+1];
memset(binResult,0,sizeof(binResult));
char *finResult = NULL;
char key[65];
memset(key,0,sizeof(key));
char * keystart = hexresult +32;
char iv[33];
memset(iv,0,sizeof(iv));
PBKDF2_HMAC_SHA_1(sid,strlen(sid),pk,strlen(pk),iterations,outputBytes,hexresult,binResult);
memcpy(key, keystart,64);
memcpy(iv, hexresult,32);
EVP_CipherInit_ex(ctx, EVP_aes_256_cbc(), NULL,(unsigned char *)key, (unsigned char *)iv, 1);
unsigned char *outbuf;
int outbuflen = sizeof(intext) + EVP_MAX_BLOCK_LENGTH - (sizeof(intext) % 16);
outbuf = (unsigned char *)malloc(outbuflen);
EVP_CipherUpdate(ctx, outbuf, &outbuflen,(unsigned char *)intext, strlen(intext));
EVP_CipherFinal_ex(ctx, outbuf + outbuflen, &final_length);
outlen += final_length;
EVP_CIPHER_CTX_free(ctx);
char bytesout[strlen(outbuf) + outbuflen];
int buflen = 0;
for (i=0;i< outbuflen + final_length;i++)
{
buflen += 1;
sprintf(bytesout + (i * 2),"%02x", outbuf[i]);
}
printf("bytesout: %s\n", bytesout);
char outtext[sizeof(bytesout)];
memset(outtext,0, sizeof(outtext));
int outtext_len = sizeof(outtext);
EVP_ENCODE_CTX *ectx = EVP_ENCODE_CTX_new();
EVP_EncodeInit(ectx);
EVP_EncodeBlock(outtext, bytesout, sizeof(bytesout));
EVP_EncodeFinal(ectx, (unsigned char*)outtext, &outtext_len);
EVP_ENCODE_CTX_free(ectx);
printf("b64Encoded String %s \n", outtext);}
Makefile:
gcc simplecipher.c -o simplecipher -lX11 -lncurses -lssl -lcrypto
Result:
bytesout: eafafcde5c00eb6e649d61a09f9b52d13dd8c783d73afcbc03dfb5cea0cd3ab627528ec1b2997105871d570c0b972349943800aacd063093d97f7f39554775aa4256bd26599dde66bb76b925d9f021f6b657d1a91eb08e1900b6ad91f7f65b97e1a7e17b8d959a65d6893af458e26761536b3ffdf470f89f1aac24ca02782fb8a691c25b368549387890dc73143bb213e0ce616264e5b30add3b480c24f5edc6
b64Encoded String ZWFmYWZjZGU1YzAwZWI2ZTY0OWQ2MWEwOWY5YjUyZDEzZGQ4Yzc4M2Q3M2FmY2JjMDNkZmI1Y2VhMGNkM2FiNjI3NTI4ZWMxYjI5OTcxMDU4NzFkNTcwYzBiOTcyMzQ5OTQzODAwYWFjZDA2MzA5M2Q5N2Y3ZjM5NTU0Nzc1YWE0MjU2YmQyNjU5OWRkZTY2YmI3NmI=
When I do a similar script in python:
import base64
from Cryptodome.Cipher import AES
from Cryptodome.Random import get_random_bytes
from Cryptodome.Protocol.KDF import PBKDF2
from Crypto.Util.Padding import pad
import binascii
symmetric_key = "u9SXNMeTkvyBr3n81SJ7Lj216w04gJ99"
salt = "jeIHjod1cZeM1U04cy8z7488AeY1Sl25"
pbbytes = PBKDF2(symmetric_key.encode("utf-8"), salt.encode("utf-8"), 48, 128)
iv = pbbytes[0:16]
key = pbbytes[16:48]
half_iv=iv[0:8]
half_key=key[0:16]
cipher = AES.new(key, AES.MODE_CBC, iv)
cipher = AES.new(binascii.hexlify(bytes(half_key)), AES.MODE_CBC, binascii.hexlify(bytes(half_iv)))
print("test encoding:")
intext = b"[3671,3401,736,1081,0,32558], [3692,3401,748,1105,0,32558], [3704,3401,774,1162,0,32558], [3722,3401,774,1162,0,32558], [3733,3401,769,1172,0,32558]"
print("intext pre padding: ", intext)
paddedtext = pad(intext,16)
print("intext post padding: ", paddedtext)
en_bytes = cipher.encrypt(paddedtext)
print("encrypted bytes: ", binascii.hexlify(bytearray(en_bytes)))
en_data = base64.b64encode(en_bytes)
en_bytes_string = ''.join(map(chr, en_bytes))
print("encoded bytes: ", en_data)
Result:
encrypted bytes: b'eafafcde5c00eb6e649d61a09f9b52d13dd8c783d73afcbc03dfb5cea0cd3ab627528ec1b2997105871d570c0b972349943800aacd063093d97f7f39554775aa4256bd26599dde66bb76b925d9f021f6b657d1a91eb08e1900b6ad91f7f65b97e1a7e17b8d959a65d6893af458e26761536b3ffdf470f89f1aac24ca02782fb8a691c25b368549387890dc73143bb213e0ce616264e5b30add3b480c24f5edc6'
encoded bytes: b'6vr83lwA625knWGgn5tS0T3Yx4PXOvy8A9+1zqDNOrYnUo7BsplxBYcdVwwLlyNJlDgAqs0GMJPZf385VUd1qkJWvSZZnd5mu3a5JdnwIfa2V9GpHrCOGQC2rZH39luX4afhe42VmmXWiTr0WOJnYVNrP/30cPifGqwkygJ4L7imkcJbNoVJOHiQ3HMUO7IT4M5hYmTlswrdO0gMJPXtxg=='
So as you can see, the encoded portion comes out completely differently in the C application. In Jscript, C#, and Java it comes out exactly as in the python script. The encrypted portion, however, is the same between the two. Just encoding seems to break it. Now this could be 100% because I've absolutely butchered something when passing the bytes/char arrays around. I just can't seem to find out where in the chain I've broken down here. Any suggestions?

The C code base64s the wrong buffer. namely bytesout, which is already an ASCII text:
for (i=0;i< outbuflen + final_length;i++)
{
buflen += 1;
sprintf(bytesout + (i * 2),"%02x", outbuf[i]);
}
You need to encode outbuf instead.
PS: the code cries for a serious cleanup.

Alright,
Just wanted to say thanks to everyone who commented, and answered but I did figure it out this morning, basically using
EVP_EncodeBlock(outtext, outbuf, buflen);
Is what solved it. Before I'd pass in either sizeof(outtext) or sizeof(outbuf) and that would only encode what looked like a part of the first data point (likely up to the first ',' or something). But this fixes it. I can now encrypt a string of datapoints regardless of their starting size, and decrypt it in python. I had buflen in there just to debug the amount of bytes that were being written to the bytesout char array, but it seemed to do the trick.
Cheers, everyone!

I was trying to do the same thing, and just finished doing so. I believe your question is misleading. You are not actually encoding a digest in base64. Rather, you are encoding the hexadecimal representation of a digest in base64 (as user58697 already stated in his own response). Also, as specified in Ian Abbott's comment, you're using EVP_ENCODE_CTX wrong.
I believe most people would actually want to encode the digest itself in base64. If you're trying to implement stuff like xmlenc (and I assume most specifications that use these base64 encoded digests), it can be done in the following fashion, using libcrypto~3.0:
void base64_digest(const char* input, int input_length)
{
// Generating a digest
EVP_MD_CTX* context = EVP_MD_CTX_new();
const EVP_MD* md = EVP_sha512();
unsigned char md_value[EVP_MAX_MD_SIZE];
unsigned int md_len;
EVP_DigestInit_ex2(context, md, NULL);
EVP_DigestUpdate(context, input, input_length);
EVP_DigestFinal_ex(context, md_value, &md_len);
// Encoding digest to base64
char output[EVP_MAX_MD_SIZE]; // not sure this is the best size for this buffer,
// but it's not gonna need more than EVP_MAX_MD_SIZE
EVP_EncodeBlock((unsigned char*)output, md_value, md_len);
// cleanup
EVP_MD_CTX_free(context);
printf("Base64-encoded digest: %s\n", output);
}
Incidentally, the result will be much shorter (with padding, 88 characters is the expected length, while I believe you'll get 172 characters by encoding the hex digest instead).
You also don't need to use EVP_ENCODE_CTX, EVP_EncodeInit nor EVP_EncodeFinal, as EVP_EncodeBlock doesn't need any of these.
For C++ developers, I also have an implementation at https://github.com/crails-framework/libcrails-encrypt (check out the MessageDigest class).

Related

Convert 32 bit network order to host in C

I'm trying to convert a uint32_t from network byte order to host format. I'm reading from a tcp connection 4 bytes that I store in the buffer like this:
ssize_t read = 0;
char *file_buf;
size_t fb_size = 4 * sizeof(char);
file_buf = malloc(fb_size);
read = recv(socket_file_descriptor,file_buf,fb_size,0);
so I store the number in file_buf but I want a number, how can I do this?
This looks straightforward:
ssize_t read = 0;
uint32_t myInteger; // Declare a 32-bit uint.
// Pass a pointer to the integer, and the size of the integer.
read = recv(socket_file_descriptor,&myInteger,sizeof(myInteger),0);
myInteger = ntohl(myInteger); // Change from Network order to Host order.
Here's how I would do it. Note the use of ntohl() to convert the data from network-endian to host-endian form:
#include <stdio.h>
#include <stdint.h>
#include <arpa/inet.h>
#include <sys/socket.h>
[...]
char file_buf[4];
if (recv(socket_file_descriptor,file_buf,fb_size,0) == sizeof(file_buf))
{
uint32_t * p = (uint32_t *) file_buf;
uint32_t num = ntohl(*p);
printf("The number is %u\n", num);
}
else printf("Short read or network error?\n");
Some OSes (Linux with glibc, BSDs) have size-specific endianness conversion functions too, to supplement ntohl() and ntohs().
#include <endian.h> // Might need <sys/endian.h> instead on some BSDs
void your_function(uint32_t bigend_int) {
uint32_t host_int = be32toh(bigend_int);
}
Edit:
But since you seem to have easy access to the individual bytes, there's always Rob Pike's preferred approach:
uint32_t host_int = (file_buf[3]<<0) | (file_buf[2]<<8) | (file_buf[1]<<16) | (file_buf[0]<<24);

SHA1 checksumming a length-prefixed message in a streaming fashion

Git-SHAs are computed by prefixing "blob $DecimalMessageLength\0" to a message and then SHA1-checksumming the prefixed message.
From the properties of the SHA1 algorithm, is it possible to do this in a streaming fashion, i.e., prepend the prefix after the message body has been hashed?
C example below (link with -lcrypto with libssl-dev installed; it's probably not very useful since this one doesn't even expose the SHA1 algorithm but I was playing...):
#include <openssl/sha.h>
#include <stdio.h>
#include <stdlib.h>
int pr_dgst(unsigned char const Dgst[static SHA_DIGEST_LENGTH])
{
char const digits[]="0123456789abcdef";
char digest_pr[(SHA_DIGEST_LENGTH)*2+1];
for(size_t i=0;i<SHA_DIGEST_LENGTH;i++){
digest_pr[i*2+0]=digits[Dgst[i]/16];
digest_pr[i*2+1]=digits[Dgst[i]%16];
}
digest_pr[(SHA_DIGEST_LENGTH)*2]='\0';
return puts(digest_pr);
}
int main()
{
system("echo gitsha; printf '%s' 'abc' | git hash-object --stdin");
#define STR_STRLEN(A) A, (sizeof(A)/sizeof(*(A))-1) //paste string literal and its length
unsigned char digest[SHA_DIGEST_LENGTH];
SHA_CTX ctx;
SHA1_Init(&ctx); SHA1_Update(&ctx,STR_STRLEN("blob 3\0abc")); SHA1_Final(digest,&ctx);
pr_dgst(digest); //prints the same as the system command
//do this in a streaming fashion??
SHA1_Init(&ctx);
size_t len = 0;
SHA1_Update(&ctx,STR_STRLEN("a")); len++;
SHA1_Update(&ctx,STR_STRLEN("b")); len++;
SHA1_Update(&ctx,STR_STRLEN("c")); len++;
//"prepend" "blob 3\0" now?
SHA1_Final(digest,&ctx);
/*pr_dgst(digest);*/
}
It is only possible to add bytes to the end of the message stream - otherwise the hash function would be cryptographically broken.
One of the upsides of having a prefix for 2 files is that you can store 2 files with a known bare SHA-1 collision into the same repository and they would get different blob IDs!

Computing websocket Sec-WebSocket-Accept value using libtomcrypt

RFC6455 specifies a method of computing the Sec-WebSocket-Accept response header from the value of the Sec-WebSocket-Key header. This method is based on SHA-1 hashing and Base64-encoding the result.
How can I implement this method in plain C using libtomcrypt for SHA-1 and Base64?
Note: This question intentionally does not show any effort because I immediately answered it myself. See below for my effort.
Here's a full compilable example that uses only libtomcrypt without any dynamic memory allocation and successfully computes the reference example from RFC6455:
//This file is licensed under CC0 1.0 Universal (public domain)
//Compile like this: gcc -o wsencodetest wsencodetest.c -ltomcrypt
#include <stdio.h>
#include <string.h>
#include <stdint.h>
#include <ctype.h>
#include <tomcrypt.h>
#define SHA1_HASHSIZE 20
//Magic GUID as defined in RFC6455 section 1.3
static const char magicGUID[] = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11";
/**
* Compute the value of the Sec-WebSocket-Accept response header
* from the value of the Sec-WebSocket-Key header.
* #param key The whitespace or NUL terminated Sec-WebSocket-Key value
* #param out Where to store the base64-encoded output. Must provide 29 bytes of memory.
* The 29 bytes starting at out contain the resulting value (plus a terminating NUL byte)
*/
void computeWebsocketSecAccept(const char* key, char* dst) {
/**
* Determine start & length of key minus leading/trailing whitespace
* See RFC6455 section 1.3
*/
//Skip leading whitespace
while(isspace(*key)) {
key++;
}
//Determine key size.
size_t keySize = 0;
while(!isspace(key[keySize]) && key[keySize] != 0) {
keySize++;
}
//Compute SHA1 hash. See RFC6455 section 1.3
char hashOut[SHA1_HASHSIZE];
hash_state md;
sha1_desc.init(&md);
sha1_desc.process(&md, key, keySize);
sha1_desc.process(&md, magicGUID, sizeof(magicGUID));
sha1_desc.done(&md, hashOut);
//Encode hash to output buffer
size_t outlen = 29; //We know the output is 28 in size
base64_encode(hashOut, SHA1_HASHSIZE, dst, &outlen);
}
/**
* Usage example
*/
int main(int argc, char** argv) {
//Whitespace needs to be removed according to RFC6455
//Example from RFC6455
const char* key = " dGhlIHNhbXBsZSBub25jZQ== ";
char buf[29];
//Perform computation
computeWebsocketSecAccept(key, buf);
//Should print s3pPLMBiTxaQ9kYGzzhZRbK+xOo=
printf("%s\n", buf);
}

Why am I getting different results from different skein hash APIs?

I've tried a few. Python's pyskein; a javascript skein calculator I found online somewhere; and the skein calculator being used for xkcd's april fools' comic all give the same output for a given input.
But when I download version 1.3 of the reference C source here I get different results. Worst of all, the results I get from the C API perfectly match the "known answer test" examples that come with the source code, so I assume I'm using it right.
My C code:
#include <stdio.h>
#include <stdlib.h>
#include "SHA3api_ref.h"
int main(int argc, const char * argv[])
{
const int BITS = 256; // length of hash in bits
const int LENGTH = 32; // length of data in bits
BitSequence *hashval = calloc(BITS/8, 1);
const BitSequence content[] = {0xC1, 0xEC, 0xFD, 0xFC};
Hash(BITS, content, LENGTH, hashval);
for (int i = 0; i < BITS/8; i++) {
printf("%02X", hashval[i]);
}
return 0;
}
result hex: 2638B1711F1346D08BF02B5D1A575CD924140A608512AF5B8E4475632599A896
Python code for the same hash on the same data:
import skein
print( skein.skein256(bytes([0xC1, 0xEC, 0xFD, 0xFC])).hexdigest() )
result hex: 07e785ce898fa5cfa22e15294481717935923985ea90f67fc65cb5b3cb718190
Note that the C answer is the expected answer according to the KAT_MCT/ShortMsgKAT_256.txt file that comes with the code. But pyskein gives results that everyone else seems to agree are correct. What am I missing?

C Libmcrypt cannot encrypt/decrypt successfully

I am working with libmcrypt in c and attempting to implement a simple test of encryption and decryption using rijndael-256 as the algorithm of choice. I have mirrored this test implementation pretty closely to the man pages examples with rijndael as opposed to their chosen algorithms. When compiled with the string gcc -o encryption_test main.c -lmcrypt, the following source code produces output similar to:
The encrypted message buffer contains j��A��8 �qj��%`��jh���=ZЁ�j
The original string was ��m"�C��D�����Y�G�v6��s��zh�
Obviously, the decryption part is failing, but as it is just a single function call it leads me to believe the encryption scheme is not behaving correctly as well. I have several questions for the libmcrypt gurus out there if you could point me in the right direction.
First, what is causing this code to produce this broken output?
Second, when dealing with mandatory fixed-sizes such as the key size and block-size, for example a 256-bit key does the function expect 32-bytes of key + a trailing null byte, 31-bytes of key + a trailing null byte, or 32-bytes of key with the 33rd byte being irrelevant? The same question holds true for block-size as well.
Lastly, one of the examples I noted used mhash to generate a hash of the key-text to supply to the encryption call, this is of course preferable but it was commented out and linking in mhash seems to fail. What is the accepted way of handling this type of key-conversion when working with libmcrypt? I have chosen to leave any such complexities out as to prevent further complicating already broken code, but I would like to incorporate this into the final design. Below is the source code in question:
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <mcrypt.h>
int main(int argc, char *argv[])
{
MCRYPT mfd;
char *key;
char *plaintext;
char *IV;
unsigned char *message, *buffered_message, *ptr;
int i, blocks, key_size = 32, block_size = 32;
message = "Test Message";
/** Buffer message for encryption */
blocks = (int) (strlen(message) / block_size) + 1;
buffered_message = calloc(1, (blocks * block_size));
key = calloc(1, key_size);
strcpy(key, "&*GHLKPK7G1SD4CF%6HJ0(IV#X6f0(PK");
mfd = mcrypt_module_open(MCRYPT_RIJNDAEL_256, NULL, "cbc", NULL);
if(mfd == MCRYPT_FAILED)
{
printf("Mcrypt module open failed.\n");
return 1;
}
/** Generate random IV */
srand(time(0));
IV = malloc(mcrypt_enc_get_iv_size(mfd));
for(i = 0; i < mcrypt_enc_get_iv_size(mfd); i++)
{
IV[i] = rand();
}
/** Initialize cipher with key and IV */
i = mcrypt_generic_init(mfd, key, key_size, IV);
if(i < 0)
{
mcrypt_perror(i);
return 1;
}
strncpy(buffered_message, message, strlen(message));
mcrypt_generic(mfd, buffered_message, block_size);
printf("The encrypted message buffer contains %s\n", buffered_message);
mdecrypt_generic(mfd, buffered_message, block_size);
printf("The original string was %s\n", buffered_message);
mcrypt_generic_deinit(mfd);
mcrypt_module_close(mfd);
return 0;
}
You need to re-initialize the descriptor mfd for decryption, you cannot use the same descriptor for both encryption and decryption.

Resources