Related
I would like to convert a very long (arbitrary length, possibly 1000 characters long; university assignment) string into binary. How should I approach this problem? I have thought about it for a while, but I just can't seem to think of anything viable.
The string will be passed to me as const char *str. I want to read the number, which will be in Base 10, and convert it into binary.
Should I read certain number of least significant numbers and store them in unsigned long long int, and then work from there? Is there a better solution? I don't know how the method I suggested would pan out. I wanted to know if there's a better/easier way to do it.
Thank you.
Assuming your input is too large for the biggest integer type, you have to convert it to a unlimited size integer. For this purpose you can use gmplib. If you are not allowed to use external libraries, you can use a different approach:
is your string divisible by two (look at the last digit)?
if yes, write 0 to left side of your output
else, write 1 to left side of your output
divide the string by 2 (every digit)
repeat while string is not filled with 0
I am going to edit this answer, as soon as I wrote the code.
Here you go:
#include<stdbool.h>
#include<stdlib.h>
#include<memory.h>
#include<stdio.h>
typedef struct char_queue {
unsigned int len;
unsigned int capacity;
char* data;
} char_queue;
char_queue init_char_queue() {
return (char_queue) {
0,
4096,
malloc(4096)
};
}
void enqueue(char_queue* queue, char val) {
if (queue->len == queue->capacity) {
char* new_queue_data = malloc(queue->capacity + 4096);
memmove(new_queue_data, queue->data, queue->capacity);
free(queue->data);
queue->data = new_queue_data;
}
queue->len++;
queue->data[queue->capacity - queue->len] = val;
}
char* queue_get_arr(char_queue* queue) {
char* output = malloc(queue->len);
memcpy(output, &queue->data[queue->capacity - queue->len], queue->len);
return output;
}
void free_char_queue(char_queue* queue) {
if (queue->data) free(queue->data);
}
void convert_to_digit_arr(char* input, unsigned int len) {
for (unsigned int i = 0; i < len; i++) {
input[i] = input[i] - '0'; // '5' - '0' = 5
}
}
bool is_null(char* input, unsigned int len) {
for (unsigned int i = 0; i < len; i++) {
if (input[i] != 0) return false;
}
return true;
}
bool divisible_by_two(char* digit_arr, unsigned int len) {
return digit_arr[len - 1] % 2 == 0;
}
void divide_by_two(char* digit_arr, unsigned int len) {
for (unsigned int i = 0; i < len; i++) {
bool is_odd = digit_arr[i] % 2 == 1;
digit_arr[i] /= 2;
if (is_odd && i + 1 < len) { // and is not last (right) digit
digit_arr[i + 1] += 10;
}
}
}
int main(int argc, char** argv) {
for (int i = 1; i < argc; i++) {
unsigned int input_len = strlen(argv[i]);
char* input = malloc(input_len + 1);
strcpy(input, argv[i]);
convert_to_digit_arr(input, input_len);
char_queue queue = init_char_queue();
enqueue(&queue, 0); // null terminator to use the queue content as a string
while (!is_null(input, input_len)) {
enqueue(&queue, divisible_by_two(input, input_len) ? '0' : '1');
divide_by_two(input, input_len);
}
free(input);
char* output = queue_get_arr(&queue);
printf("%s\n", output);
free(output);
free_char_queue(&queue);
}
}
This is not the fastest approach, but it is very simple. Also feel free to optimize it.
How do I convert a really long string (as decimal characters) to binary?
Let us look at printing this.
print2(s)
If the decimal string is at least "2",
__ Divide the decimal string by 2 and notice its remainder.
__ Recursively call print2(s)
__ Print the remainder.
Else print the string.
Example code:
#include <stdio.h>
unsigned decimal_string_divide(char *dividend, unsigned divisor) {
// Remove a potential leading '0'
if (*dividend == '0') {
memmove(dividend, dividend+1, strlen(dividend));
}
// "divide", like we learned in grade school.
unsigned remainder = 0;
while (*dividend) {
unsigned sum = remainder*10 + (*dividend - '0');
remainder = sum%divisor;
*dividend = sum/divisor + '0';
dividend++;
}
return remainder;
}
void decimal_string_print_binary(char *dividend) {
//printf("<%s>\n", dividend); fflush(stdout);
if (dividend[0]) {
// If at least 2 digits or at least "2"
if (dividend[1] || (dividend[0] >= '2')) {
unsigned bit = decimal_string_divide(dividend, 2);
decimal_string_print_binary(dividend);
printf("%c", bit + '0');
} else {
printf("%c", *dividend);
}
}
}
void decimal_string_print_2(const char *dividend) {
printf("%-25s", dividend);
size_t sz = strlen(dividend) + 1;
char buf[sz]; // Use a VLA or allocate memory
strcpy(buf, dividend);
decimal_string_print_binary(buf);
printf("\n");
}
Test
int main(void) {
decimal_string_print_2("0");
decimal_string_print_2("1");
decimal_string_print_2("42");
decimal_string_print_2("8675309");
decimal_string_print_2("18446744073709551615");
}
Output
0 0
1 1
42 101010
8675309 100001000101111111101101
18446744073709551615 1111111111111111111111111111111111111111111111111111111111111111
To instead convert the string from decimal form into a binary string, allocate sufficient buffer (about log10(2) times string length) and instead of printing above, save to the buffer. Left for OP to do.
I am suggesting a better approach. whereby any arguments passed to a function that is not intended to be mutated, be received as a "const", and a local pointer be used to access the data of this "const".
IE:
void to_binary(const char *str) {
char *ptr = str;
...
Then use ptr.
I know that in this case, my argument is purely trivial and academic, but it is a good practice to get used to and may save you many headaches in the future.
Also, when dealing with binary data, use "unsigned char", to ensure that no type conversions are used. You will need bit 7 if the data is not ASCII or alike.
Right now I am trying to convert an int to a char in C programming. After doing research, I found that I should be able to do it like this:
int value = 10;
char result = (char) value;
What I would like is for this to return 'A' (and for 0-9 to return '0'-'9') but this returns a new line character I think.
My whole function looks like this:
char int2char (int radix, int value) {
if (value < 0 || value >= radix) {
return '?';
}
char result = (char) value;
return result;
}
to convert int to char you do not have to do anything
char x;
int y;
/* do something */
x = y;
only one int to char value as the printable (usually ASCII) digit like in your example:
const char digits[] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
int inttochar(int val, int base)
{
return digits[val % base];
}
if you want to convert to the string (char *) then you need to use any of the stansdard functions like sprintf, itoa, ltoa, utoa, ultoa .... or write one yourself:
char *reverse(char *str);
const char digits[] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
char *convert(int number, char *buff, int base)
{
char *result = (buff == NULL || base > strlen(digits) || base < 2) ? NULL : buff;
char sign = 0;
if (number < 0)
{
sign = '-';
}
if (result != NULL)
{
do
{
*buff++ = digits[abs(number % (base ))];
number /= base;
} while (number);
if(sign) *buff++ = sign;
if (!*result) *buff++ = '0';
*buff = 0;
reverse(result);
}
return result;
}
A portable way of doing this would be to define a
const char* foo = "0123456789ABC...";
where ... are the rest of the characters that you want to consider.
Then and foo[value] will evaluate to a particular char. For example foo[0] will be '0', and foo[10] will be 'A'.
If you assume a particular encoding (such as the common but by no means ubiquitous ASCII) then your code is not strictly portable.
Characters use an encoding (typically ASCII) to map numbers to a particular character. The codes for the characters '0' to '9' are consecutive, so for values less than 10 you add the value to the character constant '0'. For values 10 or more, you add the value minus 10 to the character constant 'A':
char result;
if (value >= 10) {
result = 'A' + value - 10;
} else {
result = '0' + value;
}
Converting Int to Char
I take it that OP wants more that just a 1 digit conversion as radix was supplied.
To convert an int into a string, (not just 1 char) there is the sprintf(buf, "%d", value) approach.
To do so to any radix, string management becomes an issue as well as dealing the corner case of INT_MIN
The following C99 solution returns a char* whose lifetime is valid to the end of the block. It does so by providing a compound literal via the macro.
#include <stdio.h>
#include <stdlib.h>
#include <limits.h>
// Maximum buffer size needed
#define ITOA_BASE_N (sizeof(unsigned)*CHAR_BIT + 2)
char *itoa_base(char *s, int x, int base) {
s += ITOA_BASE_N - 1;
*s = '\0';
if (base >= 2 && base <= 36) {
int x0 = x;
do {
*(--s) = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"[abs(x % base)];
x /= base;
} while (x);
if (x0 < 0) {
*(--s) = '-';
}
}
return s;
}
#define TO_BASE(x,b) itoa_base((char [ITOA_BASE_N]){0} , (x), (b))
Sample usage and tests
void test(int x) {
printf("base10:% 11d base2:%35s base36:%7s ", x, TO_BASE(x, 2), TO_BASE(x, 36));
printf("%ld\n", strtol(TO_BASE(x, 36), NULL, 36));
}
int main(void) {
test(0);
test(-1);
test(42);
test(INT_MAX);
test(-INT_MAX);
test(INT_MIN);
}
Output
base10: 0 base2: 0 base36: 0 0
base10: -1 base2: -1 base36: -1 -1
base10: 42 base2: 101010 base36: 16 42
base10: 2147483647 base2: 1111111111111111111111111111111 base36: ZIK0ZJ 2147483647
base10:-2147483647 base2: -1111111111111111111111111111111 base36:-ZIK0ZJ -2147483647
base10:-2147483648 base2: -10000000000000000000000000000000 base36:-ZIK0ZK -2147483648
Ref How to use compound literals to fprintf() multiple formatted numbers with arbitrary bases?
Check out the ascii table
The values stored in a char are interpreted as the characters corresponding to that table. The value of 10 is a newline
So characters in C are based on ASCII (or UTF-8 which is backwards-compatible with ascii codes). This means that under the hood, "A" is actually the number "65" (except in binary rather than decimal). All a "char" is in C is an integer with enough bytes to represent every ASCII character. If you want to convert an int to a char, you'll need to instruct the computer to interpret the bytes of an int as ASCII values - and it's been a while since I've done C, but I believe the compiler will complain since char holds fewer bytes than int. This means we need a function, as you've written. Thus,
if(value < 10) return '0'+value;
return 'A'+value-10;
will be what you want to return from your function. Keep your bounds checks with "radix" as you've done, imho that is good practice in C.
1. Converting int to char by type casting
Source File charConvertByCasting.c
#include <stdio.h>
int main(){
int i = 66; // ~~Type Casting Syntax~~
printf("%c", (char) i); // (type_name) expression
return 0;
}
Executable charConvertByCasting.exe command line output:
C:\Users\boqsc\Desktop\tcc>tcc -run charconvert.c
B
Additional resources:
https://www.tutorialspoint.com/cprogramming/c_type_casting.htm
https://www.tutorialspoint.com/cprogramming/c_data_types.htm
2. Convert int to char by assignment
Source File charConvertByAssignment.c
#include <stdio.h>
int main(){
int i = 66;
char c = i;
printf("%c", c);
return 0;
}
Executable charConvertByAssignment.exe command line output:
C:\Users\boqsc\Desktop\tcc>tcc -run charconvert.c
B
You can do
char a;
a = '0' + 5;
You will get character representation of that number.
Borrowing the idea from the existing answers, i.e. making use of array index.
Here is a "just works" simple demo for "integer to char[]" conversion in base 10, without any of <stdio.h>'s printf family interfaces.
Test:
$ cc -o testint2str testint2str.c && ./testint2str
Result: 234789
Code:
#include <stdio.h>
#include <string.h>
static char digits[] = "0123456789";
void int2str (char *buf, size_t sz, int num);
/*
Test:
cc -o testint2str testint2str.c && ./testint2str
*/
int
main ()
{
int num = 234789;
char buf[1024] = { 0 };
int2str (buf, sizeof buf, num);
printf ("Result: %s\n", buf);
}
void
int2str (char *buf, size_t sz, int num)
{
/*
Convert integer type to char*, in base-10 form.
*/
char *bufp = buf;
int i = 0;
// NOTE-1
void __reverse (char *__buf, int __start, int __end)
{
char __bufclone[__end - __start];
int i = 0;
int __nchars = sizeof __bufclone;
for (i = 0; i < __nchars; i++)
{
__bufclone[i] = __buf[__end - 1 - i];
}
memmove (__buf, __bufclone, __nchars);
}
while (num > 0)
{
bufp[i++] = digits[num % 10]; // NOTE-2
num /= 10;
}
__reverse (buf, 0, i);
// NOTE-3
bufp[i] = '\0';
}
// NOTE-1:
// "Nested function" is GNU's C Extension. Put it outside if not
// compiled by GCC.
// NOTE-2:
// 10 can be replaced by any radix, like 16 for hexidecimal outputs.
//
// NOTE-3:
// Make sure inserting trailing "null-terminator" after all things
// done.
NOTE-1:
"Nested function" is GNU's C Extension. Put it outside if not
compiled by GCC.
NOTE-2:
10 can be replaced by any radix, like 16 for hexidecimal outputs.
NOTE-3:
Make sure inserting trailing "null-terminator" after all things
done.
I've just recently been required to work with C—I normally work with Python and a bit of Java—and I've been running into some issues.
I created a function that converts a base-10 unsigned int into a character array that represents the equivalent hex. Now I need to be able to set a variable with type uint32_t to this 'hex'; what can I do to make sure this char[] is treated as an actual hex value?
The code is below:
int DecToHex(long int conversion, char * regParams[])
{
int hold[8];
for (int index = 0; conversion > 0; index++)
{
hold[index] = conversion % 16;
conversion = conversion / 16;
}
int j = 0;
for (int i = 7; i > -1; i--)
{
if (hold[i] < 10 && hold[i] >= 0)
{
regParams[j] = '0' + hold[i];
}
else if (hold[i] > 9 && hold[i] < 16)
{
regParams[j] = '7' + hold[i];
}
else
{
j--;
}
j++;
}
return 0;
}
You should just use snprintf:
int x = 0xDA8F;
char buf[9];
snprintf(buf, sizeof(buf), "%X", x); // Use %x for lowercase hex digits
To convert a hex representation of a number to an int, use strtol (the third argument to it lets you specify the base), or, since you want to assign it to an unsigned data type, strtoul.
The code would look something like this:
char* HexStr = "8ADF4B";
uint32_t Num = strtoul(HexStr, NULL, 16);
printf("%X\n", Num); // Outputs 8ADF4B
Without resorting to standard library utoa, I'm looking for source code of utoa so I may customise it for specific project. I have unsigned integer (32 bits) with output such as 0xFFFF_FFFF
I also looking for source code for unsigned integer and half word to string in binary format.
Try this:
char *dec(unsigned x, char *s)
{
*--s = 0;
if (!x) *--s = '0';
for (; x; x/=10) *--s = '0'+x%10;
return s;
}
You call it with a pointer to the end of a caller-provided buffer, and the function returns the pointer to the beginning of the string. The buffer should have length at least 3*sizeof(int)+1 to be safe.
Of course this is easily adapted to other bases.
Theres a lot of itoa source files easily found on google... That should give you what you want, eg. itoa from opensource.apple.com
Or write it from scratch, it's not too hard.
That's not terribly hard. Keep dividing by 10 and use the remainder mod 10 as an index into "0123455679". You build this up from right to left, so you have to buffer the result and return it in reverse:
char * utoa(unsigned int n)
{
char * res, buf[30]; // long enough for largest number
unsigned int i, counter = 0;
if (n == 0)
buf[counter++] = '0';
for ( ; n; n /= 10)
buf[counter++] = "0123456789"[n%10];
res = malloc(counter);
for (i = 0; i < counter; ++i)
res[i] = buf[counter - i - 1];
return res;
}
#include <stdint.h>
#include <string.h>
char * utox(uint32_t n) {
static char hexstr[sizeof(n)*2+1];
char * p = hexstr + sizeof(hexstr) -1;
int x;
memset(hexstr, '0', sizeof(hexstr));
*p-- = '\0';
while (n) {
x = n % 16;
if (x < 10)
*p-- = '0' + x;
else
*p-- = 'A' + x - 10;
n /= 16;
}
return hexstr;
}
This should do it, it zero pads. Simply changing the type of n in the function parameters will make it work for any integer type/size.
Most of the functions mentioned/suggested here uses modulus % operator, which is very expensive for embedded system.
So using divide by 10 idea is the only prominent option I guess. Here it is:
/*for byte which is 3 digit most*/
void itoa(unsigned char value,char *desitination)
{
desitination[0] = '\0';
desitination[1] = '\0';
desitination[2] = '\0';
desitination[3] = '\0';//you at least 4 char array, last char is NULL
while (value >= 100)
{
desitination[0]++;
value -= 100;
}
desitination[1] = '0';
while (value >= 10)
{
desitination[1]++;
value -= 10;
}
value+= '0';
desitination[2] =value;
}
There are many implementation of itoa. This is one of them which takes under consideration base from 2 to 36:
/**
* C++ version 0.4 char* style "itoa":
* Written by Lukás Chmela
* Released under GPLv3.
*/
char* itoa(int value, char* result, int base) {
// check that the base if valid
if (base < 2 || base > 36) { *result = '\0'; return result; }
char* ptr = result, *ptr1 = result, tmp_char;
int tmp_value;
do {
tmp_value = value;
value /= base;
*ptr++ = "zyxwvutsrqponmlkjihgfedcba9876543210123456789abcdefghijklmnopqrstuvwxyz" [35 + (tmp_value - value * base)];
} while ( value );
// Apply negative sign
if (tmp_value < 0) *ptr++ = '-';
*ptr-- = '\0';
while(ptr1 < ptr) {
tmp_char = *ptr;
*ptr--= *ptr1;
*ptr1++ = tmp_char;
}
return result;
}
More variations can be found here which includes speed performance of the various implementations.
I was wondering if my implementation of an "itoa" function is correct. Maybe you can help me getting it a bit more "correct", I'm pretty sure I'm missing something. (Maybe there is already a library doing the conversion the way I want it to do, but... couldn't find any)
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
char * itoa(int i) {
char * res = malloc(8*sizeof(int));
sprintf(res, "%d", i);
return res;
}
int main(int argc, char *argv[]) {
...
// Yet, another good itoa implementation
// returns: the length of the number string
int itoa(int value, char *sp, int radix)
{
char tmp[16];// be careful with the length of the buffer
char *tp = tmp;
int i;
unsigned v;
int sign = (radix == 10 && value < 0);
if (sign)
v = -value;
else
v = (unsigned)value;
while (v || tp == tmp)
{
i = v % radix;
v /= radix;
if (i < 10)
*tp++ = i+'0';
else
*tp++ = i + 'a' - 10;
}
int len = tp - tmp;
if (sign)
{
*sp++ = '-';
len++;
}
while (tp > tmp)
*sp++ = *--tp;
return len;
}
// Usage Example:
char int_str[15]; // be careful with the length of the buffer
int n = 56789;
int len = itoa(n,int_str,10);
The only actual error is that you don't check the return value of malloc for null.
The name itoa is kind of already taken for a function that's non-standard, but not that uncommon. It doesn't allocate memory, rather it writes to a buffer provided by the caller:
char *itoa(int value, char * str, int base);
If you don't want to rely on your platform having that, I would still advise following the pattern. String-handling functions which return newly allocated memory in C are generally more trouble than they're worth in the long run, because most of the time you end up doing further manipulation, and so you have to free lots of intermediate results. For example, compare:
void delete_temp_files() {
char filename[20];
strcpy(filename, "tmp_");
char *endptr = filename + strlen(filename);
for (int i = 0; i < 10; ++i) {
itoa(endptr, i, 10); // itoa doesn't allocate memory
unlink(filename);
}
}
vs.
void delete_temp_files() {
char filename[20];
strcpy(filename, "tmp_");
char *endptr = filename + strlen(filename);
for (int i = 0; i < 10; ++i) {
char *number = itoa(i, 10); // itoa allocates memory
strcpy(endptr, number);
free(number);
unlink(filename);
}
}
If you had reason to be especially concerned about performance (for instance if you're implementing a stdlib-style library including itoa), or if you were implementing bases that sprintf doesn't support, then you might consider not calling sprintf. But if you want a base 10 string, then your first instinct was right. There's absolutely nothing "incorrect" about the %d format specifier.
Here's a possible implementation of itoa, for base 10 only:
char *itobase10(char *buf, int value) {
sprintf(buf, "%d", value);
return buf;
}
Here's one which incorporates the snprintf-style approach to buffer lengths:
int itobase10n(char *buf, size_t sz, int value) {
return snprintf(buf, sz, "%d", value);
}
A good int to string or itoa() has these properties;
Works for all [INT_MIN...INT_MAX], base [2...36] without buffer overflow.
Does not assume int size.
Does not require 2's complement.
Does not require unsigned to have a greater positive range than int. In other words, does not use unsigned.
Allows use of '-' for negative numbers, even when base != 10.
Tailor the error handling as needed. (needs C99 or later):
char* itostr(char *dest, size_t size, int a, int base) {
// Max text needs occur with itostr(dest, size, INT_MIN, 2)
char buffer[sizeof a * CHAR_BIT + 1 + 1];
static const char digits[36] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
if (base < 2 || base > 36) {
fprintf(stderr, "Invalid base");
return NULL;
}
// Start filling from the end
char* p = &buffer[sizeof buffer - 1];
*p = '\0';
// Work with negative `int`
int an = a < 0 ? a : -a;
do {
*(--p) = digits[-(an % base)];
an /= base;
} while (an);
if (a < 0) {
*(--p) = '-';
}
size_t size_used = &buffer[sizeof(buffer)] - p;
if (size_used > size) {
fprintf(stderr, "Scant buffer %zu > %zu", size_used , size);
return NULL;
}
return memcpy(dest, p, size_used);
}
I think you are allocating perhaps too much memory. malloc(8*sizeof(int)) will give you 32 bytes on most machines, which is probably excessive for a text representation of an int.
i found an interesting resource dealing with several different issues with the itoa implementation
you might wanna look it up too
itoa() implementations with performance tests
I'm not quite sure where you get 8*sizeof(int) as the maximum possible number of characters -- ceil(8 / (log(10) / log(2))) yields a multiplier of 3*. Additionally, under C99 and some older POSIX platforms you can create an accurately-allocating version with sprintf():
char *
itoa(int i)
{
int n = snprintf(NULL, 0, "%d", i) + 1;
char *s = malloc(n);
if (s != NULL)
snprintf(s, n, "%d", i);
return s;
}
HTH
You should use a function in the printf family for this purpose. If you'll be writing the result to stdout or a file, use printf/fprintf. Otherwise, use snprintf with a buffer big enough to hold 3*sizeof(type)+2 bytes or more.
sprintf is quite slow, if performance matters it is probably not the best solution.
if the base argument is a power of 2 the conversion can be done with a shift and masking, and one can avoid reversing the string by recording the digits from the highest positions. For instance, something like this for base=16
int num_iter = sizeof(int) / 4;
const char digits[] = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
/* skip zeros in the highest positions */
int i = num_iter;
for (; i >= 0; i--)
{
int digit = (value >> (bits_per_digit*i)) & 15;
if ( digit > 0 ) break;
}
for (; i >= 0; i--)
{
int digit = (value >> (bits_per_digit*i)) & 15;
result[len++] = digits[digit];
}
For decimals there is a nice idea to use a static array big enough to record the numbers in the reversed order, see here
Integer-to-ASCII needs to convert data from a standard integer type
into an ASCII string.
All operations need to be performed using pointer arithmetic, not array indexing.
The number you wish to convert is passed in as a signed 32-bit integer.
You should be able to support bases 2 to 16 by specifying the integer value of the base you wish to convert to (base).
Copy the converted character string to the uint8_t* pointer passed in as a parameter (ptr).
The signed 32-bit number will have a maximum string size (Hint: Think base 2).
You must place a null terminator at the end of the converted c-string Function should return the length of the converted data (including a negative sign).
Example my_itoa(ptr, 1234, 10) should return an ASCII string length of 5 (including the null terminator).
This function needs to handle signed data.
You may not use any string functions or libraries.
.
uint8_t my_itoa(int32_t data, uint8_t *ptr, uint32_t base){
uint8_t cnt=0,sgnd=0;
uint8_t *tmp=calloc(32,sizeof(*tmp));
if(!tmp){exit(1);}
else{
for(int i=0;i<32;i++){
if(data<0){data=-data;sgnd=1;}
if(data!=0){
if(data%base<10){
*(tmp+i)=(data%base)+48;
data/=base;
}
else{
*(tmp+i)=(data%base)+55;
data/=base;
}
cnt++;
}
}
if(sgnd){*(tmp+cnt)=45;++cnt;}
}
my_reverse(tmp, cnt);
my_memcopy(tmp,ptr,cnt);
return ++cnt;
}
ASCII-to-Integer needs to convert data back from an ASCII represented string into an integer type.
All operations need to be performed using pointer arithmetic, not array indexing
The character string to convert is passed in as a uint8_t * pointer (ptr).
The number of digits in your character set is passed in as a uint8_t integer (digits).
You should be able to support bases 2 to 16.
The converted 32-bit signed integer should be returned.
This function needs to handle signed data.
You may not use any string functions or libraries.
.
int32_t my_atoi(uint8_t *ptr, uint8_t digits, uint32_t base){
int32_t sgnd=0, rslt=0;
for(int i=0; i<digits; i++){
if(*(ptr)=='-'){*ptr='0';sgnd=1;}
else if(*(ptr+i)>'9'){rslt+=(*(ptr+i)-'7');}
else{rslt+=(*(ptr+i)-'0');}
if(!*(ptr+i+1)){break;}
rslt*=base;
}
if(sgnd){rslt=-rslt;}
return rslt;
}
I don't know about good, but this is my implementation that I did while learning C
static int ft_getintlen(int value)
{
int l;
int neg;
l = 1;
neg = 1;
if (value < 0)
{
value *= -1;
neg = -1;
}
while (value > 9)
{
l++;
value /= 10;
}
if (neg == -1)
{
return (l + 1);
}
return (l);
}
static int ft_isneg(int n)
{
if (n < 0)
return (-1);
return (1);
}
static char *ft_strcpy(char *dest, const char *src)
{
unsigned int i;
i = 0;
while (src[i] != '\0')
{
dest[i] = src[i];
i++;
}
dest[i] = src[i];
return (dest);
}
char *ft_itoa(int n)
{
size_t len;
char *instr;
int neg;
neg = ft_isneg(n);
len = ft_getintlen(n);
instr = (char *)malloc((sizeof(char) * len) + 1);
if (n == -2147483648)
return (ft_strcpy(instr, "-2147483648"));
if (!instr)
return (NULL);
if (neg == -1)
n *= -1;
instr[len--] = 0;
if (n == 0)
instr[len--] = 48;
while (n)
{
instr[len--] = ((n % 10) + 48);
n /= 10;
}
if (neg == -1)
instr[len] = '-';
return (instr);
}
This should work:
#include <string.h>
#include <stdlib.h>
#include <math.h>
char * itoa_alloc(int x) {
int s = x<=0 ? 1 ? 0; // either space for a - or for a 0
size_t len = (size_t) ceil( log10( abs(x) ) );
char * str = malloc(len+s + 1);
sprintf(str, "%i", x);
return str;
}
If you don't want to have to use the math/floating point functions (and have to link in the math libraries) you should be able to find non-floating point versions of log10 by searching the Web and do:
size_t len = my_log10( abs(x) ) + 1;
That might give you 1 more byte than you needed, but you'd have enough.
There a couple of suggestions I might make. You can use a static buffer and strdup to avoid repeatedly allocating too much memory on subsequent calls. I would also add some error checking.
char *itoa(int i)
{
static char buffer[12];
if (snprintf(buffer, sizeof(buffer), "%d", i) < 0)
return NULL;
return strdup(buffer);
}
If this will be called in a multithreaded environment, remove "static" from the buffer declaration.
This is chux's code without safety checks and the ifs. Try it online:
char* itostr(char * const dest, size_t const sz, int a, int const base) {
bool posa = a >= 0;
char buffer[sizeof a * CHAR_BIT + 1];
static const char digits[36] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
char* p = &buffer[sizeof buffer - 1];
do {
*(p--) = digits[abs(a % base)];
a /= base;
} while (a);
*p = '-';
p += posa;
size_t s = &buffer[sizeof(buffer)] - p;
memcpy(dest, p, s);
dest[s] = '\0';
return dest;
}
main()
{
int i=1234;
char stmp[10];
#if _MSC_VER
puts(_itoa(i,stmp,10));
#else
puts((sprintf(stmp,"%d",i),stmp));
#endif
return 0;
}