Parsing a string into an array of integers in C - c

So now that I've figured out how to get what I want, I'm just hoping somebody can let me know a cleaner, less ridiculous way of achieving the same thing. I'm just learning C. Here was my approach.
int main()
{
// String of positive and negative integer values
// Numbers are never more than 2 digits
char TEMPS[256] = "1 -22 -8 14 5";
int N = 5;
int ints[N];
int i = 0;
int mult;
// Arbitrary number to identify that num is not yet in use
int num = 999;
int c = 0;
mult = (TEMPS[c] != 45) ? -1 : 1;
while(strcmp(&TEMPS[c], "\0") != 0)
{
if(TEMPS[c] == 32)
{
ints[i] = mult * num;
i++;
num = 999;
mult = (TEMPS[c + 1] == 45) ? -1 : 1;
}
else if((TEMPS[c] != 45) && (TEMPS[c] != 32))
{
if(num == 999)
{
num = TEMPS[c] - '0';
}
else
{
num = num * 10 + (TEMPS[c] - '0');
}
}
c++;
}
ints[i] = mult * num;
}

I would use strtol - here's a good site for how it works and examples
http://www.tutorialspoint.com/c_standard_library/c_function_strtol.htm
long int strtol(const char *str, char **endptr, int base)
Parameters
str -- This is the string containing the representation of an integral number.
endptr -- This is the reference to an object of type char*, whose value is set by the function to the next character in str after the numerical value.
base -- This is the base, which must be between 2 and 36 inclusive, or be the special value 0.
Return Value
This function returns the converted integral number as a long int value, else zero value is r
I've included one example from the site.
#include <stdio.h>
#include <stdlib.h>
int main()
{
char str[30] = "2030300 This is test";
char *ptr;
long ret;
ret = strtol(str, &ptr, 10);
printf("The number(unsigned long integer) is %ld\n", ret);
printf("String part is |%s|", ptr);
return(0);
}

Related

How can I convert a char array containing the user ID to uid_t? [duplicate]

I am trying to find out if there is an alternative way of converting string to integer in C.
I regularly pattern the following in my code.
char s[] = "45";
int num = atoi(s);
So, is there a better way or another way?
There is strtol which is better IMO. Also I have taken a liking in strtonum, so use it if you have it (but remember it's not portable):
long long
strtonum(const char *nptr, long long minval, long long maxval,
const char **errstr);
You might also be interested in strtoumax and strtoimax which are standard functions in C99. For example you could say:
uintmax_t num = strtoumax(s, NULL, 10);
if (num == UINTMAX_MAX && errno == ERANGE)
/* Could not convert. */
Anyway, stay away from atoi:
The call atoi(str) shall be equivalent to:
(int) strtol(str, (char **)NULL, 10)
except that the handling of errors may differ. If the value cannot be
represented, the behavior is undefined.
Robust C89 strtol-based solution
With:
no undefined behavior (as could be had with the atoi family)
a stricter definition of integer than strtol (e.g. no leading whitespace nor trailing trash chars)
classification of the error case (e.g. to give useful error messages to users)
a "testsuite"
#include <assert.h>
#include <ctype.h>
#include <errno.h>
#include <limits.h>
#include <stdio.h>
#include <stdlib.h>
typedef enum {
STR2INT_SUCCESS,
STR2INT_OVERFLOW,
STR2INT_UNDERFLOW,
STR2INT_INCONVERTIBLE
} str2int_errno;
/* Convert string s to int out.
*
* #param[out] out The converted int. Cannot be NULL.
*
* #param[in] s Input string to be converted.
*
* The format is the same as strtol,
* except that the following are inconvertible:
*
* - empty string
* - leading whitespace
* - any trailing characters that are not part of the number
*
* Cannot be NULL.
*
* #param[in] base Base to interpret string in. Same range as strtol (2 to 36).
*
* #return Indicates if the operation succeeded, or why it failed.
*/
str2int_errno str2int(int *out, char *s, int base) {
char *end;
if (s[0] == '\0' || isspace(s[0]))
return STR2INT_INCONVERTIBLE;
errno = 0;
long l = strtol(s, &end, base);
/* Both checks are needed because INT_MAX == LONG_MAX is possible. */
if (l > INT_MAX || (errno == ERANGE && l == LONG_MAX))
return STR2INT_OVERFLOW;
if (l < INT_MIN || (errno == ERANGE && l == LONG_MIN))
return STR2INT_UNDERFLOW;
if (*end != '\0')
return STR2INT_INCONVERTIBLE;
*out = l;
return STR2INT_SUCCESS;
}
int main(void) {
int i;
/* Lazy to calculate this size properly. */
char s[256];
/* Simple case. */
assert(str2int(&i, "11", 10) == STR2INT_SUCCESS);
assert(i == 11);
/* Negative number . */
assert(str2int(&i, "-11", 10) == STR2INT_SUCCESS);
assert(i == -11);
/* Different base. */
assert(str2int(&i, "11", 16) == STR2INT_SUCCESS);
assert(i == 17);
/* 0 */
assert(str2int(&i, "0", 10) == STR2INT_SUCCESS);
assert(i == 0);
/* INT_MAX. */
sprintf(s, "%d", INT_MAX);
assert(str2int(&i, s, 10) == STR2INT_SUCCESS);
assert(i == INT_MAX);
/* INT_MIN. */
sprintf(s, "%d", INT_MIN);
assert(str2int(&i, s, 10) == STR2INT_SUCCESS);
assert(i == INT_MIN);
/* Leading and trailing space. */
assert(str2int(&i, " 1", 10) == STR2INT_INCONVERTIBLE);
assert(str2int(&i, "1 ", 10) == STR2INT_INCONVERTIBLE);
/* Trash characters. */
assert(str2int(&i, "a10", 10) == STR2INT_INCONVERTIBLE);
assert(str2int(&i, "10a", 10) == STR2INT_INCONVERTIBLE);
/* int overflow.
*
* `if` needed to avoid undefined behaviour
* on `INT_MAX + 1` if INT_MAX == LONG_MAX.
*/
if (INT_MAX < LONG_MAX) {
sprintf(s, "%ld", (long int)INT_MAX + 1L);
assert(str2int(&i, s, 10) == STR2INT_OVERFLOW);
}
/* int underflow */
if (LONG_MIN < INT_MIN) {
sprintf(s, "%ld", (long int)INT_MIN - 1L);
assert(str2int(&i, s, 10) == STR2INT_UNDERFLOW);
}
/* long overflow */
sprintf(s, "%ld0", LONG_MAX);
assert(str2int(&i, s, 10) == STR2INT_OVERFLOW);
/* long underflow */
sprintf(s, "%ld0", LONG_MIN);
assert(str2int(&i, s, 10) == STR2INT_UNDERFLOW);
return EXIT_SUCCESS;
}
GitHub upstream.
Based on: https://stackoverflow.com/a/6154614/895245
Don't use functions from ato... group. These are broken and virtually useless. A moderately better solution would be to use sscanf, although it is not perfect either.
To convert string to integer, functions from strto... group should be used. In your specific case it would be strtol function.
You can code atoi() for fun:
int my_getnbr(char *str)
{
int result;
int puiss;
result = 0;
puiss = 1;
while (('-' == (*str)) || ((*str) == '+'))
{
if (*str == '-')
puiss = puiss * -1;
str++;
}
while ((*str >= '0') && (*str <= '9'))
{
result = (result * 10) + ((*str) - '0');
str++;
}
return (result * puiss);
}
You can also make it recursive, which can fold in 3 lines.
int atoi(const char* str){
int num = 0;
int i = 0;
bool isNegetive = false;
if(str[i] == '-'){
isNegetive = true;
i++;
}
while (str[i] && (str[i] >= '0' && str[i] <= '9')){
num = num * 10 + (str[i] - '0');
i++;
}
if(isNegetive) num = -1 * num;
return num;
}
Just wanted to share a solution for unsigned long aswell.
unsigned long ToUInt(char* str)
{
unsigned long mult = 1;
unsigned long re = 0;
int len = strlen(str);
for(int i = len -1 ; i >= 0 ; i--)
{
re = re + ((int)str[i] -48)*mult;
mult = mult*10;
}
return re;
}
As already mentioned, the atoi family of functions should never be used in any C program, since they don't have any error handling.
The the strtol family of functions is 100% equivalent, but with extended functionality: it has error handling and it also supports other bases than decimal, such as hex or binary. Therefore the correct answer is: use strtol (family).
If you for some reason insist on rolling out this function yourself manually, you should try to do something similar to strtol in case there are other symbols present other than the optional sign and digits. It's quite common that we want to convert numbers that are part of larger string, for example.
A naive version with error handling support might look like the example below. This code is for decimal base 10 numbers only, but otherwise behaves like strtol with an optional pointer set to point at the first invalid symbol encountered (if any). Also note that this code doesn't handle overflows.
#include <ctype.h>
long my_strtol (char* restrict src, char** endptr)
{
long result=0;
long sign=1;
if(endptr != NULL)
{
/* if input is ok and endptr is provided,
it will point at the beginning of the string */
*endptr = src;
}
if(*src=='-')
{
sign = -1;
src++;
}
for(; *src!='\0'; src++)
{
if(!isdigit(*src)) // error handling
{
if(endptr != NULL)
{
*endptr = src;
}
break;
}
result = result*10 + *src - '0';
}
return result * sign;
}
To handle overflows, one can for example add code counting the characters and check that they never go past 10, assuming 32 bit long which can be max 2147483647, 10 digits.
Ok, I had the same problem.I came up with this solution.It worked for me the best.I did try atoi() but didn't work well for me.So here is my solution:
void splitInput(int arr[], int sizeArr, char num[])
{
for(int i = 0; i < sizeArr; i++)
// We are subtracting 48 because the numbers in ASCII starts at 48.
arr[i] = (int)num[i] - 48;
}
You can always roll your own!
#include <stdio.h>
#include <string.h>
#include <math.h>
int my_atoi(const char* snum)
{
int idx, strIdx = 0, accum = 0, numIsNeg = 0;
const unsigned int NUMLEN = (int)strlen(snum);
/* Check if negative number and flag it. */
if(snum[0] == 0x2d)
numIsNeg = 1;
for(idx = NUMLEN - 1; idx >= 0; idx--)
{
/* Only process numbers from 0 through 9. */
if(snum[strIdx] >= 0x30 && snum[strIdx] <= 0x39)
accum += (snum[strIdx] - 0x30) * pow(10, idx);
strIdx++;
}
/* Check flag to see if originally passed -ve number and convert result if so. */
if(!numIsNeg)
return accum;
else
return accum * -1;
}
int main()
{
/* Tests... */
printf("Returned number is: %d\n", my_atoi("34574"));
printf("Returned number is: %d\n", my_atoi("-23"));
return 0;
}
This will do what you want without clutter.
This function will help you
int strtoint_n(char* str, int n)
{
int sign = 1;
int place = 1;
int ret = 0;
int i;
for (i = n-1; i >= 0; i--, place *= 10)
{
int c = str[i];
switch (c)
{
case '-':
if (i == 0) sign = -1;
else return -1;
break;
default:
if (c >= '0' && c <= '9') ret += (c - '0') * place;
else return -1;
}
}
return sign * ret;
}
int strtoint(char* str)
{
char* temp = str;
int n = 0;
while (*temp != '\0')
{
n++;
temp++;
}
return strtoint_n(str, n);
}
Ref: http://amscata.blogspot.com/2013/09/strnumstr-version-2.html
//I think this way we could go :
int my_atoi(const char* snum)
{
int nInt(0);
int index(0);
while(snum[index])
{
if(!nInt)
nInt= ( (int) snum[index]) - 48;
else
{
nInt = (nInt *= 10) + ((int) snum[index] - 48);
}
index++;
}
return(nInt);
}
int main()
{
printf("Returned number is: %d\n", my_atoi("676987"));
return 0;
}
In C++, you can use a such function:
template <typename T>
T to(const std::string & s)
{
std::istringstream stm(s);
T result;
stm >> result;
if(stm.tellg() != s.size())
throw error;
return result;
}
This can help you to convert any string to any type such as float, int, double...
Yes, you can store the integer directly:
int num = 45;
If you must parse a string, atoi or strol is going to win the "shortest amount of code" contest.

How do I fix my `itoa` implementation so it doesn't print reversed output?

I want to convert an integer into a string of numeric characters in C.
I've tried using itoa, but it's non-standard and not provided by my C library.
I tried to implement my own itoa, but it's not working properly:
#include <stdlib.h>
#include <stdio.h>
char *itoa(int val, char *buf, int base)
{
size_t ctr = 0;
for( ; val; val /= base )
{
buf[ctr++] = '0' + (val % base);
}
buf[ctr] = 0;
return buf;
}
int main(void)
{
unsigned char c = 201;
char *buf = malloc(sizeof(c)*8+1);
itoa(c, buf, 2);
puts(buf);
free(buf);
}
It gives reversed output.
For example, if c is 'A' and base is 2, the output is this: 0101101
The output I want it to be is this: 1011010
How do I fix this issue?
Similar questions
I've already seen this question: Is there a printf converter to print in binary format?
I do not want a printf format specifier to print an integer as binary, I want to convert the binary to a string.
I've already seen this question: Print an int in binary representation using C
Although the answer does convert an integer into a string of binary digits, that's the only thing it can do.
Restrictions
I want itoa to be able to work with other bases, such as 10, 8, etc. and print correctly (i.e. 12345 translates to "12345" and not to "11000000111001").
I do not want to use printf or sprintf to do this.
I do not care about the length of the string as long is the result is correct.
I do not want to convert the integer into ASCII characters other than numeric ones, with the exception of bases greater than 10, in which case the characters may be alphanumeric.
The answer must fit this prototype exactly:
char *itoa(int val, char *buf, int base);
There may be a function called nitoa that has this prototype and returns the number of characters required to hold the result of itoa:
size_t nitoa(int val, int base);
How do I fix my itoa implementation so it doesn't print reversed output?
Rather than reverse the string, form it right-to-left. #4 of #user3386109
I recommend the helper function also receives in a size.
#include <limits.h>
char* itostr(char *dest, size_t size, int a, int base) {
// Max text needs occur with itostr(dest, size, INT_MIN, 2)
char buffer[sizeof a * CHAR_BIT + 1 + 1];
static const char digits[36] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
if (base < 2 || base > 36) {
fprintf(stderr, "Invalid base");
return NULL;
}
// Start filling from the end
char* p = &buffer[sizeof buffer - 1];
*p = '\0';
// Work with negative `int`
int an = a < 0 ? a : -a;
do {
*(--p) = digits[-(an % base)];
an /= base;
} while (an);
if (a < 0) {
*(--p) = '-';
}
size_t size_used = &buffer[sizeof(buffer)] - p;
if (size_used > size) {
fprintf(stderr, "Scant buffer %zu > %zu", size_used , size);
return NULL;
}
return memcpy(dest, p, size_used);
}
Then to provide memory, use a compound literal.
// compound literal C99 or later
#define INT_STR_SIZE (sizeof(int)*CHAR_BIT + 2)
#define MY_ITOA(x, base) itostr((char [INT_STR_SIZE]){""}, INT_STR_SIZE, (x), (base))
Now you can call it multiple times.
int main(void) {
printf("%s %s %s %s\n", MY_ITOA(INT_MIN,10), MY_ITOA(-1,10), MY_ITOA(0,10), MY_ITOA(INT_MAX,10));
printf("%s %s\n", MY_ITOA(INT_MIN,2), MY_ITOA(INT_MIN,36));
return (0);
}
Output
-2147483648 -1 0 2147483647
-10000000000000000000000000000000 -ZIK0ZK
Note: sizeof(c)*8+1 is one too small for INT_MIN, base 2.
This solution works for me:
#include <errno.h>
#include <stdlib.h>
#include <string.h>
#define itoa lltoa
#define utoa ulltoa
#define ltoa lltoa
#define ultoa ulltoa
#define nitoa nlltoa
#define nutoa nulltoa
#define nltoa nlltoa
#define nultoa nulltoa
#define BASE_BIN 2
#define BASE_OCT 8
#define BASE_DEC 10
#define BASE_HEX 16
#define BASE_02Z 36
__extension__
char *ulltoa(unsigned long long val, char *buf, int base)
{
int remainder;
char c, *tmp = buf;
if(base < BASE_BIN)
{
errno = EINVAL;
return NULL;
}
do {
remainder = val % base;
if(remainder >= BASE_DEC) c = 'a' - BASE_DEC;
else c = '0';
*tmp++ = remainder + c;
val /= base;
} while(val);
*tmp = 0;
return strrev(buf);
}
__extension__
size_t nulltoa(unsigned long long val, int base)
{
size_t size = 0;
if(base < BASE_BIN)
{
errno = EINVAL;
return 0;
}
if(!val) size++;
for( ; val; val /= base, size++ );
return size;
}
__extension__
char *lltoa(long long val, char *buf, int base)
{
if(val < 0 && base > BASE_BIN)
{
val = -val;
*buf++ = '-';
}
return ulltoa(val, buf, base);
}
__extension__
size_t nlltoa(long long val, int base)
{
size_t size = 0;
if(val < 0 && base > BASE_BIN)
{
val = -val;
size++;
}
return size + nulltoa(val, base);
}

Converting int to char in C

Right now I am trying to convert an int to a char in C programming. After doing research, I found that I should be able to do it like this:
int value = 10;
char result = (char) value;
What I would like is for this to return 'A' (and for 0-9 to return '0'-'9') but this returns a new line character I think.
My whole function looks like this:
char int2char (int radix, int value) {
if (value < 0 || value >= radix) {
return '?';
}
char result = (char) value;
return result;
}
to convert int to char you do not have to do anything
char x;
int y;
/* do something */
x = y;
only one int to char value as the printable (usually ASCII) digit like in your example:
const char digits[] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
int inttochar(int val, int base)
{
return digits[val % base];
}
if you want to convert to the string (char *) then you need to use any of the stansdard functions like sprintf, itoa, ltoa, utoa, ultoa .... or write one yourself:
char *reverse(char *str);
const char digits[] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
char *convert(int number, char *buff, int base)
{
char *result = (buff == NULL || base > strlen(digits) || base < 2) ? NULL : buff;
char sign = 0;
if (number < 0)
{
sign = '-';
}
if (result != NULL)
{
do
{
*buff++ = digits[abs(number % (base ))];
number /= base;
} while (number);
if(sign) *buff++ = sign;
if (!*result) *buff++ = '0';
*buff = 0;
reverse(result);
}
return result;
}
A portable way of doing this would be to define a
const char* foo = "0123456789ABC...";
where ... are the rest of the characters that you want to consider.
Then and foo[value] will evaluate to a particular char. For example foo[0] will be '0', and foo[10] will be 'A'.
If you assume a particular encoding (such as the common but by no means ubiquitous ASCII) then your code is not strictly portable.
Characters use an encoding (typically ASCII) to map numbers to a particular character. The codes for the characters '0' to '9' are consecutive, so for values less than 10 you add the value to the character constant '0'. For values 10 or more, you add the value minus 10 to the character constant 'A':
char result;
if (value >= 10) {
result = 'A' + value - 10;
} else {
result = '0' + value;
}
Converting Int to Char
I take it that OP wants more that just a 1 digit conversion as radix was supplied.
To convert an int into a string, (not just 1 char) there is the sprintf(buf, "%d", value) approach.
To do so to any radix, string management becomes an issue as well as dealing the corner case of INT_MIN
The following C99 solution returns a char* whose lifetime is valid to the end of the block. It does so by providing a compound literal via the macro.
#include <stdio.h>
#include <stdlib.h>
#include <limits.h>
// Maximum buffer size needed
#define ITOA_BASE_N (sizeof(unsigned)*CHAR_BIT + 2)
char *itoa_base(char *s, int x, int base) {
s += ITOA_BASE_N - 1;
*s = '\0';
if (base >= 2 && base <= 36) {
int x0 = x;
do {
*(--s) = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"[abs(x % base)];
x /= base;
} while (x);
if (x0 < 0) {
*(--s) = '-';
}
}
return s;
}
#define TO_BASE(x,b) itoa_base((char [ITOA_BASE_N]){0} , (x), (b))
Sample usage and tests
void test(int x) {
printf("base10:% 11d base2:%35s base36:%7s ", x, TO_BASE(x, 2), TO_BASE(x, 36));
printf("%ld\n", strtol(TO_BASE(x, 36), NULL, 36));
}
int main(void) {
test(0);
test(-1);
test(42);
test(INT_MAX);
test(-INT_MAX);
test(INT_MIN);
}
Output
base10: 0 base2: 0 base36: 0 0
base10: -1 base2: -1 base36: -1 -1
base10: 42 base2: 101010 base36: 16 42
base10: 2147483647 base2: 1111111111111111111111111111111 base36: ZIK0ZJ 2147483647
base10:-2147483647 base2: -1111111111111111111111111111111 base36:-ZIK0ZJ -2147483647
base10:-2147483648 base2: -10000000000000000000000000000000 base36:-ZIK0ZK -2147483648
Ref How to use compound literals to fprintf() multiple formatted numbers with arbitrary bases?
Check out the ascii table
The values stored in a char are interpreted as the characters corresponding to that table. The value of 10 is a newline
So characters in C are based on ASCII (or UTF-8 which is backwards-compatible with ascii codes). This means that under the hood, "A" is actually the number "65" (except in binary rather than decimal). All a "char" is in C is an integer with enough bytes to represent every ASCII character. If you want to convert an int to a char, you'll need to instruct the computer to interpret the bytes of an int as ASCII values - and it's been a while since I've done C, but I believe the compiler will complain since char holds fewer bytes than int. This means we need a function, as you've written. Thus,
if(value < 10) return '0'+value;
return 'A'+value-10;
will be what you want to return from your function. Keep your bounds checks with "radix" as you've done, imho that is good practice in C.
1. Converting int to char by type casting
Source File charConvertByCasting.c
#include <stdio.h>
int main(){
int i = 66; // ~~Type Casting Syntax~~
printf("%c", (char) i); // (type_name) expression
return 0;
}
Executable charConvertByCasting.exe command line output:
C:\Users\boqsc\Desktop\tcc>tcc -run charconvert.c
B
Additional resources:
https://www.tutorialspoint.com/cprogramming/c_type_casting.htm
https://www.tutorialspoint.com/cprogramming/c_data_types.htm
2. Convert int to char by assignment
Source File charConvertByAssignment.c
#include <stdio.h>
int main(){
int i = 66;
char c = i;
printf("%c", c);
return 0;
}
Executable charConvertByAssignment.exe command line output:
C:\Users\boqsc\Desktop\tcc>tcc -run charconvert.c
B
You can do
char a;
a = '0' + 5;
You will get character representation of that number.
Borrowing the idea from the existing answers, i.e. making use of array index.
Here is a "just works" simple demo for "integer to char[]" conversion in base 10, without any of <stdio.h>'s printf family interfaces.
Test:
$ cc -o testint2str testint2str.c && ./testint2str
Result: 234789
Code:
#include <stdio.h>
#include <string.h>
static char digits[] = "0123456789";
void int2str (char *buf, size_t sz, int num);
/*
Test:
cc -o testint2str testint2str.c && ./testint2str
*/
int
main ()
{
int num = 234789;
char buf[1024] = { 0 };
int2str (buf, sizeof buf, num);
printf ("Result: %s\n", buf);
}
void
int2str (char *buf, size_t sz, int num)
{
/*
Convert integer type to char*, in base-10 form.
*/
char *bufp = buf;
int i = 0;
// NOTE-1
void __reverse (char *__buf, int __start, int __end)
{
char __bufclone[__end - __start];
int i = 0;
int __nchars = sizeof __bufclone;
for (i = 0; i < __nchars; i++)
{
__bufclone[i] = __buf[__end - 1 - i];
}
memmove (__buf, __bufclone, __nchars);
}
while (num > 0)
{
bufp[i++] = digits[num % 10]; // NOTE-2
num /= 10;
}
__reverse (buf, 0, i);
// NOTE-3
bufp[i] = '\0';
}
// NOTE-1:
// "Nested function" is GNU's C Extension. Put it outside if not
// compiled by GCC.
// NOTE-2:
// 10 can be replaced by any radix, like 16 for hexidecimal outputs.
//
// NOTE-3:
// Make sure inserting trailing "null-terminator" after all things
// done.
NOTE-1:
"Nested function" is GNU's C Extension. Put it outside if not
compiled by GCC.
NOTE-2:
10 can be replaced by any radix, like 16 for hexidecimal outputs.
NOTE-3:
Make sure inserting trailing "null-terminator" after all things
done.

Algorithm to parse an int from a string in one pass

I'm trying to write a function that parses an integer from a string representation.
My problem is that I don't know how to do this with one pass through the string. If I knew ahead of time that the input contained only characters in the range '0', '1', ..., '9' and that the string was of length n, I could of course calculate
character_1 * 10^(n-1) + character_2 * 10^(n-2) + .... + character_n * 10^0
but I want to deal with the general scenario as I've presented it.
I'm not looking for a library function, but an algorithm to achieve this in "pure C".
Here's the code I started from:
int parse_int (const char * c1, const char * c2, int * i)
{
/*
[c1, c2]: Range of characters in the string
i: Integer whose string representnation will be converted
Returns the number of characters parsed.
Exs. "2342kjsd32" returns 4, since the first 4 characters were parsed.
"hhsd3b23" returns 0
*/
int n = 0;
*i = 0;
while (c1!= c2)
{
char c = *c1;
if (c >= '0' && c <= '9')
{
}
}
return n;
}
Just as some of the comments and answers suggested, maybe a bit clearer: You have to "shift" the result "left" by multiplying it by 10 in every iteration before the addition of the new digit.
Indeed, this should remind us of Horner's method. As you have recognized, the result can be written like a polynomial:
result = c1 * 10^(n-1) + c2 * 10^(n-2) + ... + cn * 10^0
And this equation can be rewritten as this:
result = cn + 10*(... + 10*(c2 + 10*c1))
Which is the form this approach is based on. From the formula you can already see, that you don't need to know the power of 10 the first digit is to be multiplied by, directly from the start.
Here's an example:
#include <stdio.h>
int parse_int(const char * begin, const char * end, int * result) {
int d = 0;
for (*result = 0; begin != end; d++, begin++) {
int digit = *begin - '0';
if (digit >= 0 && digit < 10) {
*result *= 10;
*result += digit;
}
else break;
}
return d;
}
int main() {
char arr[] = "2342kjsd32";
int result;
int ndigits = parse_int(arr, arr+sizeof(arr), &result);
printf("%d digits parsed, got: %d\n", ndigits, result);
return 0;
}
The same can be achieved using sscanf(), for everyone that is fine with using the C standard library (can also handle negative numbers):
#include <stdio.h>
int main() {
char arr[] = "2342kjsd32";
int result, ndigits;
sscanf(arr, "%d%n", &result, &ndigits);
printf("%d digits parsed, got: %d\n", ndigits, result);
return 0;
}
The output is (both implementations):
$ gcc test.c && ./a.out
4 digits parsed, got: 2342
I think this is good solution to count parse character
int parse(char *str)
{
int k = 0;
while(*str)
{
if((*str >= '0') & (*str <= '9'))
break;
str++;
k++;
}
return k;
}
Here's a working version:
#include <stdio.h>
int parse_int (const char * c1, const char * c2, int * i)
{
/*
[c1, c2]: Range of characters in the string
i: Integer whose string representnation will be converted
Returns the number of characters parsed.
Exs. "2342kjsd32" returns 4, since the first 4 characters were parsed.
"hhsd3b23" returns 0
*/
int n = 0;
*i = 0;
for (; c1 != c2; c1++)
{
char c = *c1;
if (c >= '0' && c <= '9')
{
++n;
*i = *i * 10 + c - '0';
}
else
{
break;
}
}
return n;
}
int main()
{
int i;
char const* c1 = "2342kjsd32";
int n = parse_int(c1, c1+10, &i);
printf("n: %d, i: %d\n", n, i);
return 0;
}
Output:
n: 4, i: 2342

How to convert epoch decimal time to 64 bit binary and back to decimal in C

I am trying to create a script that will convert decimals to binary based on specified size and then reverse the process, meaning from binary to decimal. So far the script and the output from my point of view (beginner) the script looks correct. I can convert all numbers from decimal to binary and vice versa. I am stack on the last part, that I am trying to convert the epoch time from a 64 bit binary number to decimal. I can not understand where I am going wrong since the rest of the numbers seem to recovered correctly. The source points that I found the scripts that I am using are Binary to Decimal and Decimal to Binary.
Update: modified code to short version:
I have modified the code to simply demonstrate the problem. The code works fine up to 32 bit binary conversion. But since I need to convert up to 64 I do not know how to do that. I noticed that because I used before int I reached the maximum limitations 32 bits, so I modified that to long long int to reach the 64 bit.
I have provided a sample of simple conversion of decimal as 1 in 32 bit format and 64 that demonstrate the problem. The epoch time is the desired output but I need to verify that the code works before I attempt the conversion.
Sample of code:
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <math.h>
#include <time.h>
#include <inttypes.h>
#define MAX_CHARACTERS 65
typedef struct rec {
char transmit[MAX_CHARACTERS];
char receive[MAX_CHARACTERS];
}RECORD;
char *decimal_to_binary(int n , int num); /* Define function */
char *decimal_to_binary(int n , int num) {
long long int c, d, count;
char *pointer;
count = 0;
pointer = (char*) malloc( num + 1 );
if ( pointer == NULL )
exit(EXIT_FAILURE);
for ( c = num - 1; c >= 0; c-- ) {
d = n >> c;
if ( d & 1 )
*( pointer + count ) = 1 + '0';
else
*( pointer + count ) = 0 + '0';
count++;
}
*( pointer + count ) = '\0';
return pointer;
}
int binary_decimal(long long int n); /* Define function */
int binary_decimal(long long int n) { /* Function to convert binary to decimal.*/
int decimal=0, i=0, rem;
while (n!=0) {
rem = n%10;
n/=10;
decimal += rem*pow(2,i);
++i;
}
return decimal;
}
int main(void) {
RECORD *ptr_record;
ptr_record = (RECORD *) malloc (sizeof(RECORD));
if (ptr_record == NULL) {
printf("Out of memmory!\nExit!\n");
exit(0);
}
int LI_d = 1;
char *LI_b = decimal_to_binary(LI_d,32);
memset( (*ptr_record).transmit , '\0' , sizeof((*ptr_record).transmit) );
strncat((*ptr_record).transmit , LI_b , strlen(LI_b) );
printf("LI: %s\n",(*ptr_record).transmit);
//transmit and receive
memset( (*ptr_record).receive , '\0' , sizeof((*ptr_record).receive) );
strncpy( (*ptr_record).receive , (*ptr_record).transmit , strlen((*ptr_record).transmit) );
char *LI_rcv_b = strndup( (*ptr_record).receive , 64 );
int LI_rcv_i = atoi (LI_rcv_b);
int final_LI = binary_decimal(LI_rcv_i);
printf("Final_LI: %i\n",final_LI);
free( ptr_record );
return 0;
}
Sample of output for 32 bit conversion:
LI: 00000000000000000000000000000001
Final_LI: 1
Sample of output for 64 bit conversion:
LI: 0000000000000000000000000000000100000000000000000000000000000001
Final_LI: -1
decimal_to_binary(int n, ...): better to use unsigned math
//char *decimal_to_binary(int n, int num) {
char *decimal_to_binary(unsigned long long n, int num) {
// long long int c, d, count;
unsigned long long int d;
int c, count;
char *pointer;
count = 0;
pointer = malloc(num + 1); // drop cast
if (pointer == NULL)
exit(EXIT_FAILURE);
for (c = num - 1; c >= 0; c--) {
d = n >> c;
if (d & 1)
*(pointer + count) = 1 + '0';
else
*(pointer + count) = 0 + '0';
count++;
}
*(pointer + count) = '\0';
return pointer;
}
Simplify binary_decimal(). Again use unsigned math, drop pow()
/* Function to convert binary to decimal.*/
unsigned long binary_decimal(unsigned long long int n) {
unsigned long decimal = 0;
while (n != 0) {
decimal *= 2;
decimal += n % 10;
n /= 10;
}
return decimal;
}
main() has lots of issues
int main(void) {
RECORD *ptr_record;
ptr_record = malloc(sizeof(RECORD)); // drop cast
if (ptr_record == NULL) {
printf("Out of memory!\nExit!\n"); // spelling fix
exit(0);
}
// use unsigned long long
unsigned long long LI_d = 1;
LI_d = (unsigned long long) -1;
char *LI_b = decimal_to_binary(LI_d, 32);
memset((*ptr_record).transmit, '\0', sizeof((*ptr_record).transmit));
// strncat((*ptr_record).transmit, LI_b, strlen(LI_b));
strncat((*ptr_record).transmit, LI_b, sizeof((*ptr_record).transmit) - 1);
printf("LI: %s\n", (*ptr_record).transmit);
//transmit and receive
memset((*ptr_record).receive, '\0', sizeof((*ptr_record).receive));
// strncpy((*ptr_record).receive, (*ptr_record).transmit, strlen((*ptr_record).transmit));
strncpy((*ptr_record).receive, (*ptr_record).transmit, sizeof((*ptr_record).transmit) - 1);
// char *LI_rcv_b = strndup((*ptr_record).receive, 64);
char *LI_rcv_b = strndup((*ptr_record).receive, MAX_CHARACTERS);
// At this point, approach is in error
// Cannot take a 64-decimal digit string and convert to a typical long long.
// int LI_rcv_i = atoi(LI_rcv_b);
// int final_LI = binary_decimal(LI_rcv_i);
// printf("Final_LI: %i\n", final_LI);
// Suspect you want to convert 64-binary digit string to a 64-bit integer
// maybe by somehow using binary_decimal - suggest re-write of that function
unsigned long long LI_rcv_i = strtoull(LI_rcv_b, NULL, 2);
printf("Final_LI: %llu\n", LI_rcv_i);
free(ptr_record);
return 0;
}
Output
LI: 11111111111111111111111111111111
Final_LI: 4294967295

Resources