When using the terminal utility "openssl" with the following command:
echo -n "Hello World!" | openssl sha1
This is the output produced:
(stdin)= 2ef7bde608ce5404e97d5f042f95f89f1c232871
I've tried producing the same output with following C code:
#include <stdio.h>
#include <stdlib.h>
#include <openssl/sha.h>
int main(void){
const unsigned char source_string[1024] = "Hello World!";
unsigned char dest_string[1024];
SHA1(source_string, 16, dest_string);
printf("String: %s\nHashed string: %s\n", source_string, dest_string);
return 0;
}
However, it produces this weird non-unicode output:
String: Hello World!
Hashed string: #�V�#��#�����T\�
How can I make it produce the same output as the before shown openssl terminal command?
You should
Pass the correct length of the string (it is 12-byte long) to SHA1.
Print the result in hexadecimal, not as string.
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <openssl/sha.h>
int main(void){
const unsigned char source_string[1024] = "Hello World!";
unsigned char dest_string[1024];
int i;
SHA1(source_string, strlen((const char*)source_string), dest_string);
printf("String: %s\nHashed string: ", source_string);
for (i = 0; i < 20; i++) printf("%02x", dest_string[i]);
putchar('\n');
return 0;
}
I didn't check by running this, so there may be other errors.
Related
I have the following C program:
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <ctype.h>
void strLower(char *string, char *stringLow);
int main(int argc, char *argv[])
{
int len = strlen(argv[1]);
char myString[len];
strLower(argv[1], myString);
char *myStringP = (char *)malloc(sizeof(char)*len);
strLower(argv[1], myStringP);
printf("String[]: %s\nString *: %s\n", myString, myStringP);
}
void strLower(char *string, char *stringLow)
{
int len = strlen(string);
for (int i=0; i<len; i++)
{
stringLow[i] = tolower(string[i]);
}
}
I compiled this doing gcc test.c -o test where test.c is the name of my file. After that, I run my program ./test helloworld and the output was the following:
./test
String[]: helloworld#=\U
String *: helloworld
I did this multiple times and the last characters on String[] where always different. Why does this happens?
I want to mprotect the data section. The following program will not run correctly. I understand the first argument of mprotect() should be aligned. But how to get an aligned memory address for the data section?
#include <string.h>
#include <sys/mman.h>
#include <stdio.h>
char s[] = "Hello World!";
int main() {
if(mprotect(s, strlen(s) + 1, PROT_EXEC) == -1) {
perror("mprotect()");
return 1;
}
}
$ ./mprotect_prog
mprotect(): Invalid argument
EDIT: I use the following code to get the page size.
{
builtin printf %s '#define PAGESIZE '
getconf PAGESIZE
} > pagesize.h
Then the C code is changed to the following.
#include <string.h>
#include <sys/mman.h>
#include <stdio.h>
#include "pagesize.h"
char s[] __attribute__((aligned(PAGESIZE))) = "Hello World!";
int main() {
if(mprotect(s, strlen(s) + 1, PROT_EXEC) == -1) {
perror("mprotect()");
return 1;
}
}
Then, I get a segmentation fault. Can anybody reproduce this error? What is wrong with it?
$ ./mprotect_prog
Segmentation fault
EDIT2: I have to add the following line below the 's' line to make sure s occupies a whole page on its own. Then, the program works.
char r[] __attribute__((aligned(PAGESIZE))) = "Hello World!";
{
builtin printf %s '#define PAGESIZE '
getconf PAGESIZE
} > pagesize.h
#include <string.h>
#include <sys/mman.h>
#include <stdio.h>
#include "pagesize.h"
char s[] __attribute__((aligned(PAGESIZE))) = "Hello World!";
char r[] __attribute__((aligned(PAGESIZE))) = "Hello World!";
int main() {
if(mprotect(s, strlen(s) + 1, PROT_EXEC) == -1) {
perror("mprotect()");
return 1;
}
}
I have for example "asd" and I want it to be randomized to DAS, DSA, SAD, you know. How can I code this? Tried a few solutions but It didnt really work.
#include <stdio.h>
#include <stdlib.h>
#include <conio.h>
#include <string.h>
int main()
{
printf("type in the word\n");
char haslo[128];
scanf("%s", haslo);
char set[128];
char hasloa[128];
strcpy(set, haslo);
unsigned int Ind = 0;
srand(time(NULL) + rand());
int len = strlen(set);
while(Ind < len)
{
hasloa[Ind++] = set[rand()%62];
}
hasloa[len] = '\0';
printf("%s", hasloa);
return 0;
}
Change 62 inside the while loop to "len"
I was curious if I could write C programs in the Mac Terminal. It seems yes, but when I start trying to use Strings, I get errors when compiling.
#include <stdio.h>
#include <string.h>
int main(void) {
string s = "chris";
printf("hello %s \n", s);
}
When I compile this I get a message saying use of undeclared identifier 'string' - string s = "chris";
I have trying adding using namespace std; but is says that using is undefined. I have tried both #include <string> and #include <string.h>
Any thoughts would be appreciated.
string is a standard C++ library class. Use const char * instead:
#include <stdio.h>
int main(int argc, const char **argv) {
const char *s = "chris";
printf("hello %s \n", s);
return 0;
}
My C program is pasted below. In bash, the program print "char is ", Ω
is not printed. My locale are all en_US.utf8.
#include <stdio.h>
#include <wchar.h>
#include <stdlib.h>
int main() {
int r;
wchar_t myChar1 = L'Ω';
r = wprintf(L"char is %c\n", myChar1);
}
This was quite interesting. Apparently the compiler translates the omega from UTF-8 to UNICODE but somehow the libc messes it up.
First of all: the %c-format specifier expects a char (even in the wprintf-version) so you have to specify %lc (and therefore %ls for strings).
Secondly if you run your code like that the locale is set to C (it isn't automatically taken from the environment). You have to call setlocale with an empty string to take the locale from the environment, so the libc is happy again.
#include <stdio.h>
#include <wchar.h>
#include <stdlib.h>
#include <locale.h>
int main() {
int r;
wchar_t myChar1 = L'Ω';
setlocale(LC_CTYPE, "");
r = wprintf(L"char is %lc (%x)\n", myChar1, myChar1);
}
Alternatively to the answer suggesting fixing LIBC, you can do this:
#include <stdio.h>
#include <wchar.h>
#include <stdlib.h>
// NOTE: *NOT* thread safe, not re-entrant
const char* unicode_to_utf8(wchar_t c)
{
static unsigned char b_static[5];
unsigned char* b = b_static;
if (c<(1<<7))// 7 bit Unicode encoded as plain ascii
{
*b++ = (unsigned char)(c);
}
else if (c<(1<<11))// 11 bit Unicode encoded in 2 UTF-8 bytes
{
*b++ = (unsigned char)((c>>6)|0xC0);
*b++ = (unsigned char)((c&0x3F)|0x80);
}
else if (c<(1<<16))// 16 bit Unicode encoded in 3 UTF-8 bytes
{
*b++ = (unsigned char)(((c>>12))|0xE0);
*b++ = (unsigned char)(((c>>6)&0x3F)|0x80);
*b++ = (unsigned char)((c&0x3F)|0x80);
}
else if (c<(1<<21))// 21 bit Unicode encoded in 4 UTF-8 bytes
{
*b++ = (unsigned char)(((c>>18))|0xF0);
*b++ = (unsigned char)(((c>>12)&0x3F)|0x80);
*b++ = (unsigned char)(((c>>6)&0x3F)|0x80);
*b++ = (unsigned char)((c&0x3F)|0x80);
}
*b = '\0';
return b_static;
}
int main() {
int r;
wchar_t myChar1 = L'Ω';
r = printf("char is %s\n", unicode_to_utf8(myChar1));
return 0;
}
Use {glib,libiconv,ICU} to convert it to UTF-8 before outputting.