My C program is pasted below. In bash, the program print "char is ", Ω
is not printed. My locale are all en_US.utf8.
#include <stdio.h>
#include <wchar.h>
#include <stdlib.h>
int main() {
int r;
wchar_t myChar1 = L'Ω';
r = wprintf(L"char is %c\n", myChar1);
}
This was quite interesting. Apparently the compiler translates the omega from UTF-8 to UNICODE but somehow the libc messes it up.
First of all: the %c-format specifier expects a char (even in the wprintf-version) so you have to specify %lc (and therefore %ls for strings).
Secondly if you run your code like that the locale is set to C (it isn't automatically taken from the environment). You have to call setlocale with an empty string to take the locale from the environment, so the libc is happy again.
#include <stdio.h>
#include <wchar.h>
#include <stdlib.h>
#include <locale.h>
int main() {
int r;
wchar_t myChar1 = L'Ω';
setlocale(LC_CTYPE, "");
r = wprintf(L"char is %lc (%x)\n", myChar1, myChar1);
}
Alternatively to the answer suggesting fixing LIBC, you can do this:
#include <stdio.h>
#include <wchar.h>
#include <stdlib.h>
// NOTE: *NOT* thread safe, not re-entrant
const char* unicode_to_utf8(wchar_t c)
{
static unsigned char b_static[5];
unsigned char* b = b_static;
if (c<(1<<7))// 7 bit Unicode encoded as plain ascii
{
*b++ = (unsigned char)(c);
}
else if (c<(1<<11))// 11 bit Unicode encoded in 2 UTF-8 bytes
{
*b++ = (unsigned char)((c>>6)|0xC0);
*b++ = (unsigned char)((c&0x3F)|0x80);
}
else if (c<(1<<16))// 16 bit Unicode encoded in 3 UTF-8 bytes
{
*b++ = (unsigned char)(((c>>12))|0xE0);
*b++ = (unsigned char)(((c>>6)&0x3F)|0x80);
*b++ = (unsigned char)((c&0x3F)|0x80);
}
else if (c<(1<<21))// 21 bit Unicode encoded in 4 UTF-8 bytes
{
*b++ = (unsigned char)(((c>>18))|0xF0);
*b++ = (unsigned char)(((c>>12)&0x3F)|0x80);
*b++ = (unsigned char)(((c>>6)&0x3F)|0x80);
*b++ = (unsigned char)((c&0x3F)|0x80);
}
*b = '\0';
return b_static;
}
int main() {
int r;
wchar_t myChar1 = L'Ω';
r = printf("char is %s\n", unicode_to_utf8(myChar1));
return 0;
}
Use {glib,libiconv,ICU} to convert it to UTF-8 before outputting.
Related
This is my code:
#include <stdio.h>
#include <string.h>
#define VERSION "2.16.0.0"
int main ()
{
//char buf[] ="2.16.0.0";
int i = 0;
int j ;
char letter[8];
//char a[] = VERSION;
for(i=0;i<8;i++)
{
letter[i] = VERSION[i];
}
char *array;
char* copy = letter ;
while ((array = strtok_r(copy, ".", ©)))
printf("%s\n", array);
printf("%s", array);
}
I split the macro to 2 16 0 0.
Now, I want to format it to 02 16 00 00. How do I do it?
I tried using sprintf() function to format the array but that didn't work out, any other way?
Your program can be simplified in several ways (see below) and I have to point out at least one significant error since the copy of the string in letter does not include the terminating 0.
About how to print, as I understand you would like to print the numerical entries with 2 digits. One method to do that is to convert them to integers and format the output using the printf formatting options:
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#define VERSION "2.16.0.0"
int main ()
{
char *element;
char copy[] = VERSION;
element = strtok(copy, ".");
while (element != NULL)
{
printf("%02d ", atoi(element));
element = strtok(NULL, ".");
}
}
When using the terminal utility "openssl" with the following command:
echo -n "Hello World!" | openssl sha1
This is the output produced:
(stdin)= 2ef7bde608ce5404e97d5f042f95f89f1c232871
I've tried producing the same output with following C code:
#include <stdio.h>
#include <stdlib.h>
#include <openssl/sha.h>
int main(void){
const unsigned char source_string[1024] = "Hello World!";
unsigned char dest_string[1024];
SHA1(source_string, 16, dest_string);
printf("String: %s\nHashed string: %s\n", source_string, dest_string);
return 0;
}
However, it produces this weird non-unicode output:
String: Hello World!
Hashed string: #�V�#��#�����T\�
How can I make it produce the same output as the before shown openssl terminal command?
You should
Pass the correct length of the string (it is 12-byte long) to SHA1.
Print the result in hexadecimal, not as string.
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <openssl/sha.h>
int main(void){
const unsigned char source_string[1024] = "Hello World!";
unsigned char dest_string[1024];
int i;
SHA1(source_string, strlen((const char*)source_string), dest_string);
printf("String: %s\nHashed string: ", source_string);
for (i = 0; i < 20; i++) printf("%02x", dest_string[i]);
putchar('\n');
return 0;
}
I didn't check by running this, so there may be other errors.
I'm trying to figure out a way to convert from a string of type wchar_t* to char, but for some reason having trouble making it work. I have successfully done the opposite (from char to wchar_t*) with mbsrtowcs(), but this defeats me. The return value section inman 3 wcsrtombs has this:
The wcsrtombs() function returns the number of bytes that make up the
converted part of multibyte sequence, not including the terminating
null byte. If a wide character was encountered which could not be
converted, (size_t) -1 is returned, and errno set to EILSEQ.
Consider this minimal example:
#include <string.h>
#include <time.h>
#include <stdlib.h>
#include <ctype.h>
#include <stdbool.h>
#include <locale.h>
#include <wctype.h>
#include <wchar.h>
char *convert_to_multibyte(const wchar_t* arg, long num_chars) {
size_t buffer_size = num_chars * sizeof(wchar_t);
char *mb = malloc(buffer_size); // will waste some memory though
mbstate_t state;
wcsrtombs(NULL, &arg, 0, &state); // this supposedly will initialize the mbstate_t struct
size_t result;
result = wcsrtombs(mb, &arg, buffer_size, &state);
if (result == (size_t)-1) {
free(mb);
return NULL;
}
mb[buffer_size-1] = '\0';
return mb;
}
int main(int argc, char* argv[]) {
setlocale(LC_ALL, "fi_FI.UTF-8");
wchar_t test[] = L"ÄÄÄÄÖÖÖÖ";
char *converted = convert_to_multibyte(test, wcslen(test));
// printf("%s\n", converted);
return 0;
}
With the test string L"ÄÄÄÄÖÖÖÖ", (size_t) -1 is returned, which implies an inconvertible wide char was encountered - this doesn't happen with a string that doesn't have any non-ASCII characters. What am I not understanding here?
#include <stdio.h>
#define stringify(s) tostring(s)
#define tostring(s) #s
#define MAX_VALUE 65536
#define NUM 64 * 1024
enum {
MIN_VALUE = 1024,
};
int main(int argc, char *argv[])
{
const char *max_str = stringify(MAX_VALUE);
const char *min_str = stringify(MIN_VALUE);
printf("max = %s, min = %s\n", max_str, min_str);
return 0;
}
The output is "max = 65536, min = MIN_VALUE num = 1024 * 64"
Experts, how can I modify my code to output like this:
max = 65536, min = 1024 num = 65536
Thanks .
MIN_VALUE is a number. Why do you need to stringify it?
Just use:
printf("%d\n", MIN_VALUE);
I think you're better off using a function instead of a macro for this, the reason being that macros are only expanded even before compile time, let alone runtime.
consider this example:
#define stringify(V) #V
#include <stdio.h>
int main()
{
int x = 5;
const char *str = stringify(x);
printf("%s\n", str);
}
after the preprocessor has done it's work, the code will look like this:
#include <stdio.h>
int main()
{
int x = 5;
const char *str = "x";
printf("%s\n", str);
}
that is because all the preprocessor directive # does, is wrap the given parameter in quotes.
If you want to have an int to string behaviour that works on constants, enums (cast to int) and integer variables, you could use sprintf:
#include <stdio.h>
#include <stdlib.h>
char *stringify(int x)
{
/* get the length of the required buffer */
int len = snprintf(0, 0, "%i", x);
/* allocate memory */
char *res = malloc(sizeof(char) * (len + 1));
/* handle allocation failure */
if(!res)
return 0;
/* convert the int to string */
snprintf(res, len + 1, "%i", x);
/* return the result */
return res;
}
int main()
{
int x = 5;
char *str = stringify(x);
printf("%s\n", str);
/* we free the memory allocated by malloc */
free(str);
}
this would be one way you could to this in C. If you want to know more about the functions I used, have a look at:
http://www.manpagez.com/man/3/vsnprintf/
http://www.manpagez.com/man/3/malloc/
#define statements are handled by the pre-processor before the compiler gets to see the code so it's basically a text substitution (it's actually a little more intelligent with the use of parameters and such).
Since stringify(s) is #defined, the preprocessor faithfully does it job.
stringify(MAX_VALUE) decays to stringify(65536) since MAX_VALUE is #defined to 65536, also known at preprocessing.
But Enumerations are part of the C language itself and not known at preprocessing,
So, stringify(MIN_VALUE) retains as stringify(MIN_VALUE) and hence toString(MIN_VALUE)
To do integer arithmetic or to print
num = 65536
"yes", there is a way to make the preprocessor perform integer arithmetic, which is to use it in a preprocessor condition.
#if 1024*64 == 65536
printf("num=65536\n");
#endif
While reading a book called "Let us C" I read that a function showbit() exists which can show you the bits of the number. There wasn't any special header file mentioned for it. Searched for it on the internet and didn't found anything useful. Is there such a function? I want this to print the binary of decimal numbers. Else please give me a replacement function. Thanks
All integers are actually in binary in your computer. Its just that it is turned into a string that is the decimal representation of that value when you try to print it using printf and "%d". If you want to know what it looks like in some other base (e.g. base 2 or binary), you either have to provide the proper printf format string if it exists (e.g. "%x" for hex) or just build that string yourself and print it out.
Here is some code that can build the string representation of an integer in any base in [2,36].
#include <stdio.h>
#include <string.h>
char digits[]="01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ";
void reverse(char* start, char* end)
{
for(end--;start<end;start++,end--)
{
char t=*start;
*start=*end;
*end=t;
}
}
int base_change(int n, int base,char* buffer)
{
int pos=0;
if (base>strlen(digits))
return -1;
while(n)
{
buffer[pos]=digits[n%base];
n/=base;
pos++;
}
buffer[pos]='\0';
reverse(buffer,buffer+pos);
return 0;
}
int main()
{
char buffer[32];
int conv=base_change(1024,2,buffer);
if (conv==0) printf("%s\n",buffer);
return 0;
}
You can also try this snippet which uses bit-shifting:
EDIT: (I've made it more portable)
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#define BITS_IN_BYTE 8
#define INTEGRAL_TYPE unsigned int
void showBits(INTEGRAL_TYPE x) {
int i;
static int intSizeInBits = sizeof(INTEGRAL_TYPE) * BITS_IN_BYTE;
static char symbol[2] = {'0','1'};
char * binary = (char*) malloc(intSizeInBits + 1);
memset(binary, 0, intSizeInBits + 1);
for (i=0; i< intSizeInBits; i++) {
binary[intSizeInBits-i-1] = symbol[(x>>i) & 0x01];
}
printf("%s\n", binary);
free(binary);
}
int main() {
showBits(8698513);
return 0;
}
HTH!
This is a very simple solution for printing the bits of an integer
int value = 14;
int n;
for (n=8*sizeof(int)-1;n>=0;n--) {
printf("%d",(value >>n)&1);
}
The book "Let Us C" doesn't define it as a built-in function. The showbits() function is defined in later part of the book as a complete user defined function as I personally went through it. This answer is to anyone who haven't reached that later part of the book and are stuck at the first appearance of the function in the book.
you need to look here
#downvoter It works fine in c also. You just need to reformat your code in c-style.
#include <stdlib.h>
#include <stdio.h>
int main()
{
char buffer[20];
int i = 3445;
_itoa( i, buffer, 2 );
printf("String of integer %d (radix 2): %s",i,buffer);
return 0;
}
*you need to save your file as .c in MSVC++ for _itoa() to work.*
this is the header file for showbits
void showbits(unsigned int x)
{
int i;
for(i=(sizeof(int)*5)-1; i>=0; i--)
(x&(1u<<i))?putchar('1'):putchar('0');
printf("\n");
}
No there is no pre built function and you do not need to include any specific header file.
You will have to provide implementation for function showbits(int).
#include <stdio.h>
void showbits(int);
int main()
{
int j,k;
for(j=0;j<=11;j++)
{
printf("\nBinary value of decimal %d is :",j);
showbits(j);
}
return 0;
}
showbits(int n){
int i,k,andmask;
for(i = 15;i>=0;i--){
andmask = 1<<i;
k = n & andmask;
k==0 ? printf("0"):printf("1");
}
}
If you want to print out the bits of a float, for example you could do something like:
float myFloat = 45.2;
for (int n=0;n<8*sizeof(float);n++) {
printf("%d",(myFloat>>n)&1);
}