I am trying to get the data from an array of longs that I have just created but I got different data.
please see code below :
#include <string.h>
#include "readfile.h"
int main()
{
long wr_data [6] ;
wr_data[0] = 11;
wr_data[1] = 1100;
wr_data[2] = 1122323;
wr_data[3] = 11333;
wr_data[4] = 11434243;
wr_data[5] = 1166587;
writeFile(wr_data);
readFile();
return(0);
}
int readFile()
{
FILE *file;
long * data
printf("Error Reading File\n");;
/* Open file for both reading and writing */
file = fopen(fileName, "r");
if (file == NULL)
{
printf("Error Reading File\n");
return -1;
}
for (int i = 0; i < 5; i++)
{
fscanf(file, "%ld", &data[i] );
printf("data[%d]: %ld \n",i, data[i]);
}
fclose(file);
return 0;
}
int writeFile(long * data)
{
FILE *fp;
if (data != NULL)
{
if ((fp = fopen(fileName,"w")) == NULL)
return -1;
if (*data !=0 )
fwrite(data,sizeof(long),6,fp);
printf("Write data\n");
fclose(fp);
}
return 0;
}
the result I get is as follows :
Write data
data[0]: 140526045102081
data[1]: 47
data[2]: 197764
data[3]: 140526045102080
data[4]: 4096
I want to preserve the write function as it is as it comes from an existing code. I tried also the function fread but without success
fread(data, sizeof(long ), 6, file);
Thanks in advance for help.
It's working here. I made the following changes to your code:
//needed for malloc
#include <stdio.h>
//needed for output
#include <stdlib.h>
...
char *fileName = "so";
...
//allocate memory to store the values
long *data = (long *)malloc(sizeof(long)*6);
...
//read the stored longs
fread(data, sizeof(long ), 6, file);
int i;
for(i=0; i<6; i++)
printf("%ld\n", data[i]);
what do you think?
edit:
Well the main change was the memory allocation. When you want to store values of any kind, your program needs to be granted by the operating system a memory zone to store those values.
In this case we had two options, either create a staticly allocated array with a fixed size, or allocate the needed memory in a dynamic fashion with the malloc function or equivalent.
Don't forget, if you want to store something, first make sure you have a place for it to be stored (i.e. allocated memory). If you don't you will most likely get an error "Segmentation Fault" aka "SIGSEGV" which means that you tried to access memory that didn't belong to you.
Also, the "fscanf(file, "%ld", &data[i] );" will read "file" as text and will try to parse floats out of that same text. Since you're storing the longs as longs and not as text, this will not work, since you're writing and reading different things.
You are writing the binary content of the array to the file and afterwards try to interpret this as a long value which can obviously not work. If you want to store the numbers as text you must convert them to text before writing or print them to file by using the fprintf(FILE *, const char *, ...) function.
It is working as expected using the following code using a text file (you might want to change the filename). Otherwise you could just fwrite and fread the whole content, depending on your needs.
#include <stdio.h>
const char *filename = "yourfile";
int readFile()
{
FILE *file;
long data[6];
int i;
printf("Error Reading File\n");;
/* Open file for both reading and writing */
file = fopen(filename, "r");
if (file == NULL)
{
printf("Error Reading File\n");
return -1;
}
for (i = 0; i < 6; i++)
{
fscanf(file, "%ld", &data[i] );
printf("data[%d]: %ld \n",i, data[i]);
}
fclose(file);
return 0;
}
int writeFile(long * data)
{
FILE *fp;
int i;
if (data != NULL)
{
if ((fp = fopen(filename,"w")) == NULL)
return -1;
if (*data !=0 )
{
for(i = 0; i != 6; ++i)
fprintf(fp, "%ld ", data[i]);
}
printf("Write data\n");
fclose(fp);
}
return 0;
}
int main()
{
long wr_data [6] ;
wr_data[0] = 11;
wr_data[1] = 1100;
wr_data[2] = 1122323;
wr_data[3] = 11333;
wr_data[4] = 11434243;
wr_data[5] = 1166587;
writeFile(wr_data);
readFile();
return(0);
}
Related
I need help to read the numbers of a .txt file and put them in an array. But only from the second line onwards. I'm stuck and don't know where to go from the code that i built.
Example of the .txt file:
10 20
45000000
48000000
56000000
#define MAX 50
int main (void){
FILE *file;
int primNum;
int secNum;
int listOfNumers[50];
int numberOfLines = MAX;
int i = 0;
file = fopen("file.txt", "rt");
if (file == NULL)
{
printf("Error\n");
return 1;
}
fscanf(file, "%d %d\n", &primNum, &secNum);
printf("\n1st Number: %d",primNum);
printf("\n2nd Number: %d",secNum);
printf("List of Numbers");
for(i=0;i<numberOfLines;i++){
//Count the number from the second line onwards
}
fclose(file);
return 0;
}
You just need a loop to keep reading ints from file and populate the listOfNumers array until reading an int fails.
Since you don't know how many ints there are in the file, you could also allocate the memory dynamically. Example:
#include <stdio.h>
#include <stdlib.h>
int main(void) {
FILE* file = fopen("file.txt", "rt");
if(file == NULL) {
perror("file.txt");
return 1;
}
int primNum;
int secNum;
if(fscanf(file, "%d %d", &primNum, &secNum) != 2) {
fprintf(stderr, "failed reading primNum and secNum\n");
return 1;
}
unsigned numberOfLines = 0;
// allocate space for one `int`
int* listOfNumers = malloc((numberOfLines + 1) * sizeof *listOfNumers);
// the above could just be:
// int* listOfNumers = malloc(sizeof *listOfNumers);
while(fscanf(file, "%d", listOfNumers + numberOfLines) == 1) {
++numberOfLines;
// increase the allocated space by the sizeof 1 int
int* np = realloc(listOfNumers, (numberOfLines + 1) * sizeof *np);
if(np == NULL) break; // if allocating more space failed, break out
listOfNumers = np; // save the new pointer
}
fclose(file);
puts("List of Numbers:");
for(unsigned i = 0; i < numberOfLines; ++i) {
printf("%d\n", listOfNumers[i]);
}
free(listOfNumers); // free the dynamically allocated space
}
There are a few ways to approach this; if you know the size of the first line, you should be able to use fseek to move the position of the file than use getline to get each line of the file:
int fseek(FILE *stream, long offset, int whence);
The whence parameter can be:
SEEK_SET : the Beginning
SEEK_CUR : the current position
SEEK_END : the End
The other option would to encapsulate the entire file read in a while loop:
char *line = NULL;
size_t linecap = 0;
ssize_t linelen;
int counter = 0;
while((linelen = getline(&line, &linecap, file)) != -1){
if counter == 0{
sscanf(line, "%d %d\n", &primNum, &secNum);
}else{
//Process your line
}
counter++; //This would give you your total line length
}
I'm currently trying to use a CMP decompressor: https://web.archive.org/web/20070113004119/http://rewiki.regengedanken.de:80/wiki/.CMP
It does in fact decompress the cmp, but it does not write it into a file.
So i tried myself.
int main(int argc, char** argv)
{
int length, dstLength;
unsigned char* fileInMem; //compressed data
unsigned char* dstFile; //decompressed data
if (argc < 2) {
fprintf(stderr, "give filename.cmp as parameter\n");
return 1;
}
printf("%s", argv[1]);
fileInMem = loadFile(argv[1], &length); //compressed data read
if (fileInMem == NULL) {
return 1;
}
dstFile = parseCmp(fileInMem, length, &dstLength); //decompress and assign data to dstFile
if (dstFile) {
/* Now we can save the file from dstFile, dstLength bytes */
printf("%d bytes depacked\n", dstLength);
for (int i = 0; i < 16; i++) {
dataArray[i] = fileInMem[i];
}
FILE *writer = fopen(argv[2], "r+");
//fputs(fileInMem, writer);
//fputs(dstFile, writer);
fclose(writer);
free(dstFile);
}
free(fileInMem);
return 0;
}
As you can see the decompressed data is a pointer to an unsigned char (according to the website a bitstream) and I tried fputs() from stdio.h, but the resulting file contains only 4 Bytes when viewed in a hex-editor.
If you need more information, please comment.
Thank you in advance.
Edit: This is what I was able to change thanks to your help, but when I open the file, it is still empty:
FILE* writer = fopen(argv[2], "wb");
fwrite(dstFile, 192, 192, writer);
192, because the length of the first decompressed Image is 192 Bytes large.
This is a common issue.
First, you need to open the output file writer for writing in binary mode ("wb").
FILE *writer = fopen(argv[2], "wb");
Second, you can't use fputs to write arbitrary data to a file, since it expects a string. Use fwrite instead: (assuming writer is the output file, dstFile the decompressed data and dstLength the amount of bytes to write)
fwrite(dstFile, 1, dstLength, writer);
If you examine the resulting file with an hex editor, you will see it is identical to the decompressed data.
Test-update
I wrote some test-code to see what is wrong, share your results so we can help you.
Add these functions to your code:
void printDataToScreen(unsigned char *dataptr, int datalen)
{
if (dataptr == NULL)
{
printf("[!] ERROR, NULL POINTER PROVIDED!\n");
return;
}
printf("> Dumping %d bytes of data into the terminal...\n", datalen);
for (int i = 0; i < datalen; i++)
{
if (i % 16 == 0)
printf("\n ");
printf("%02X ", dataptr[i]);
}
printf("\n\n");
}
void writeDataToFile(char *fileName, unsigned char *dataptr, int datalen)
{
FILE *file = fopen(fileName, "wb");
if (dataptr == NULL)
{
printf("[!] ERROR, NULL POINTER PROVIDED!\n");
return;
} else if (file == NULL)
{
printf("[!] ERROR WHILE OPENING FILE '%s'!\n", fileName);
return;
}
printf("> Writting %d bytes of data to '%s'...\n", datalen, fileName);
int writtenBytes = fwrite(dataptr, 1, datalen, file);
printf(" Done, %d bytes written!\n\n", writtenBytes);
fclose(file);
}
void runTest(char *fileName, unsigned char *dataptr, int datalen)
{
printf("Running tests... [0/2 done]\n");
printDataToScreen(dataptr, datalen);
printf("Running tests... [1/2 done]\n");
writeDataToFile(fileName, dataptr, datalen);
printf("Finished! [2/2 done]\n");
}
Call it like this:
runTest(argv[2], dstFile, dstLength);
Add the call to this place in your code (comment this code, also the line where you close writer):
FILE *writer = fopen(argv[2], "r+");
//fputs(fileInMem, writer);
//fputs(dstFile, writer);
Please share your results.
I am writing a program to compare two binary files and plot the first difference. I want to read 16 bytes of data from each file continuously and compare them. For that I am storing 16 bytes from both file into char *buffer1, buffer2. When I print the output I am getting that buffer1 has both the data of file1 and file2.
The code is as follows:
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
void printConversion(char *buf1, char *buf2) {
size_t len = strlen(buf1);
char *binary = malloc(len * 8 + 1);
binary[0] = '\0';
for (size_t i = 0; i < len; ++i) {
char ch = buf1[i];
for (int j = 7; j >= 0; --j) {
if (ch & (1 << j)) {
strcat(binary,"1");
} else {
strcat(binary,"0");
}
}
}
printf("File1: %s\t", binary);
free(binary);
printf("File2:");
for (int i = 0; i < sizeof(buf2); i++) {
printf("%x", buf2[i] - '0');
}
}
void fileRead(FILE *fp, char *buf, int count) {
fseek(fp, count, SEEK_SET);
fread(buf, 1, 16, fp);
}
int fileSize(FILE *fp) {
fseek(fp, 0, SEEK_END);
int size = ftell(fp) + 1;
return size;
}
int main(int argc, char *argv[]) {
printf("***Binary File Comparator***\n ");
int count = 0;
int index = 0;
char buffer1[16];
char buffer2[16];
char buffer3[16];
char buffer4[16];
// Invalid Number of Arguments
if (argc < 3 || argc > 3) {
printf("Invalid Number of Arguments\n");
}
FILE *fp1, *fp2;
fp1 = fopen(argv[1], "rb");
int size = fileSize(fp1);
int size1 = size;
fclose(fp1);
while (size > 1) {
fp1 = fopen(argv[1], "rb");
fileRead(fp1, buffer1, count);
fclose(fp1);
fp2 = fopen(argv[2], "rb");
fileRead(fp2, buffer2, count);
if (size1 < count) {
int lastSize = count - size1;
count = count + lastSize;
fclose(fp2);
} else {
count = count+16;
fclose(fp2);
}
**printf("buffer1:%s\tbuffer2:%s\n", buffer1, buffer2)**;
size = size - 16;
int result = strcmp(buffer1, buffer2);
if (result != 0) {
for (int i = 0; i < sizeof(buffer1); i++) {
if (buffer1[i] != buffer2[i]) {
int count1 = (count - 16) + i;
index++;
if (index == 1) {
printf("Byte_Offset:%x\n", count1);
fp1 = fopen(argv[1], "rb");
fileRead(fp1, buffer3, count1);
fclose(fp1);
fp2 = fopen(argv[2], "rb");
fileRead(fp2, buffer4, count1);
fclose(fp2);
printConversion(buffer3, buffer4);
break;
}
} else {
continue;
}
}
}
}
}
I have tried to highlight the printf part that is printing my buffer1 and buffer2
The output is as follows:
buffer1:83867715933586928386771593358692 buffer2:8386771593358692
buffer1:49216227905963264921622790596326 buffer2:4921622790596326
buffer1:40267236116867294026723611686729 buffer2:4026723611686729
buffer1:82306223673529228230622367352922 buffer2:8230622367352922
buffer1:25869679356114222586967935611422 buffer2:2586967935611422
Can anybody help what I am doing wrong. Please point me the error and what optimization changes could be done in code. I am at learning stage your feedback will be very helpful.
You are complicating the task by reading 16 bytes at a time. If the goal is to indicate the first difference, just read one byte at a time from both files with getc() this way:
int compare_files(FILE *fp1, FILE *fp2) {
unsigned long pos;
int c1, c2;
for (pos = 0;; pos++) {
c1 = getc(fp1);
c2 = getc(fp2);
if (c1 != c2 || c1 == EOF)
break;
}
if (c1 == c2) {
printf("files are identical and have %lu bytes\n", pos);
return 0; // files are identical
} else
if (c1 == EOF) {
printf("file1 is included in file2, the first %lu bytes are identical\n", pos);
return 1;
} else
if (c2 == EOF) {
printf("file2 is included in file1, the first %lu bytes are identical\n", pos);
return 2;
} else {
printf("file1 and file2 differ at position %lu: 0x%02X <> 0x%02X\n", pos, c1, c2);
return 3;
}
}
In terms of efficiency, reading one byte at a time does not pose a problem if the streams are buffered. For large files, you can get better performance by memory mapping the file contents if available on the target system and for the given input streams.
Not an actual answer, but a word on optimisation. You can increase the speed of the program if you have a bigger buffer. Basically the larger the buffer the faster the program runs HOWEVER the speed you gain from just making it larger will increase logarithmically.
Here is a picture of a graph that will help you understand. Also, what i mentioned applies to any simmilar situation. This includes: Copying files, filling the sound buffer etc. Loading the entire file in your RAM first and operationg on it will usually be faster than loading parts of it. Ofc this is not possible with larger files but still this is what you should aim for if you want speed.
PS: I'm writting here because i don't have rep to comment.
EDIT: I came up with solution but since you did not state what you need to do with your buffer3 and buffer4 i packed it up inside a function.
If you are sure that you are only going to use 16 bytes as a buffer size, remove the nBufferSize parameter and replace the buffer dynamic allocation with a static one.
If after the execution you need the buffers, add them as parameters and keep the nBufferSize param. Keep in mind that if you intend to use them outside the function, you should also allocate them outside the function, so things don't get messy.
/** Returns 0 if files are identical, 1 if they are different and -1 if there
is an error. */
int FileCmp(char* szFile1, char* szFile2, int nBufferSize)
{
FILE *f1, *f2;
f1 = fopen(szFile1, "rb");
f2 = fopen(szFile2, "rb");
// Some error checking?
if (f1 == NULL || f2 == NULL)
return -1;
// You can check here for file sizes before you start comparing them.
// ...
// Start the comparrison.
/// Replace this part with static allocation. --------
char* lpBuffer1 = malloc(sizeof(char)*nBufferSize);
if (lpBuffer1 == NULL) // close the files and return error.
{
fclose(f1);
fclose(f2);
return -1;
}
char* lpBuffer2 = malloc(sizeof(char)*nBufferSize);
if (lpBuffer2 == NULL) // close the files, free buffer1 and return error.
{
free(lpBuffer1);
fclose(f1);
fclose(f2);
return -1;
}
/// --------------------------------------------------
while(1)
{
unsigned int uRead1 = fread(lpBuffer1, sizeof(char), nBufferSize, f1);
unsigned int uRead2 = fread(lpBuffer2, sizeof(char), nBufferSize, f2);
if (uRead1 != uRead2)
goto lFilesAreDifferent;
for(unsigned int i = 0; i < uRead1; i++)
if (lpBuffer1[i] != lpBuffer2[i])
goto lFilesAreDifferent;
if ((feof(f1) != 0) && (feof(f2) != 0))
break; // both files have nothing more to read and are identical.
goto lSkip;
lFilesAreDifferent:
free(lpBuffer1);
free(lpBuffer2);
fclose(f1);
fclose(f2);
return 1;
lSkip:;
}
// The files are the same. Close them, free the buffers and return 0.
free(lpBuffer1);
free(lpBuffer2);
fclose(f1);
fclose(f2);
return 0;
}
A simple Demo:
#define BUFFER_SIZE 16
int main(int nArgs, char** szArgs)
{
if (nArgs != 3)
{
printf("Invalid number of arguments.");
return 0;
}
int nResult = FileCmp(szArgs[1], szArgs[2], BUFFER_SIZE);
switch (nResult)
{
case 0: printf("Files [%s] and [%s] are identical.", szArgs[1], szArgs[2]); break;
case 1: printf("Files [%s] and [%s] are different.", szArgs[1], szArgs[2]); break;
case -1: printf("Error."); break;
}
return 0;
}
EDIT II: Personally, i have never used the C standard FILE library (it was either C++ fstream or pure win32 fileapi) so don't take my word here for granted but fread is the fastest function i could find (faster than fgets or fgetc). If you want even faster than this you should get into OS dependant functions (like ReadFile() for Windows).
chqrlie's solution using getc is absolutely the right way to do this. I wanted to address some points brought up in comments, and find it's best to do that with code. In one comment, I recommend pseudo code which could be confusing (namely, you can't write fwrite(file1...) || fwrite(file2 ...) because of the short circuit. But you can implement the idea of that with:
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
/*
* Compare two files, 16 bytes at a time. (Purely to demonstrate memcmp.
* Clearly, this should be implemented with getc.)
*/
FILE * xfopen(const char *, const char *);
size_t xfread(void *, FILE *, const char *);
int
main(int argc, char **argv)
{
FILE *fp[2];
size_t n[2];
char buf[2][16];
unsigned count = 0;
if(argc != 3) { return EXIT_FAILURE; }
fp[0] = xfopen(argv[1], "r");
fp[1] = xfopen(argv[2], "r");
do {
n[0] = xfread(buf[0], fp[0], argv[1]);
n[1] = xfread(buf[1], fp[1], argv[2]);
if( n[0] != n[1] || (n[0] && memcmp(buf[0], buf[1], n[0]))) {
fprintf(stderr, "files differ in block %u\n", count);
return 1;
}
count += 1;
} while(n[0]);
puts("files are identical");
return 0;
}
size_t
xfread(void *b, FILE *fp, const char *name)
{
size_t n = fread(b, 1, 16, fp);
if(n == 0 && ferror(fp)) {
fprintf(stderr, "Error reading %s\n", name);
exit(EXIT_FAILURE);
}
return n;
}
FILE *
xfopen(const char *path, const char *mode)
{
FILE *fp = strcmp(path, "-") ? fopen(path, mode) : stdin;
if( fp == NULL ) {
perror(path);
exit(EXIT_FAILURE);
}
return fp;
}
I want sign a tar.gz file and verify the signature for do this i put a signature (in char) in my file. Then i have an another executable who check the sign and if she is good delete this, and to do that i get the content of the file whitout my signature and copy on an other file that a create.
But here is the problem, When a do this my tar.gz original file make a size of 141 and my newly created file make a size of 140. Then when a decompress them i have this error message:
gzip: stdin: unexpected end of file tar:
Child returned status 1 tar:
Error is not recoverable: exiting now
I think when i get the content i forgot a charactere. I tried to use clearerr or feof whitout success.
This is my code for get the content of the signed file:
#define SIZE_SIGNATURE (423)
int get_size(char *file)
{
struct stat sb;
int size = 0;
if (stat(file, &sb) == -1) {
fprintf(stderr, "Cant access the %s file\n", file);
return (-1);
}
size = sb.st_size - SIZE_SIGNATURE;
size = size / 4;
if (size <= 0) {
fprintf(stderr, "The content of the file is invalid\n");
return (-1);
}
return (size);
}
int *get_content(char *file, int size)
{
FILE *fp = NULL;
int *content = NULL;
int i = 0;
content = malloc(sizeof(int) * size);
if (!content) {
fprintf(stderr, "Error, malloc fail\n");
return (NULL);
}
fp = fopen(file,"rb");
if (!fp) {
fprintf(stderr, "Cant open %s file\n", file);
return (NULL);
}
fread(content, sizeof(int), size, fp);
fclose(fp);
return (content);
}
And when i have created my new file i put the content on them like this:
fp = fopen(new_name, "a");
if (!fp) {
fprintf(stderr, "A problem has occured during file creation.\n Cant delete signature\n");
free(new_name);
return;
}
fwrite(content, sizeof(int), size, fp);
Why i do: size = size / 4 (i dont know if is the thing to do)
He is a little code for understanding the thing i simply put three int in non lisible charactere
#include <stdio.h>
#include <stdlib.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <unistd.h>
void ecriture1()
{
FILE* F;
int a = 5;
int b = 6;
int d = 42;
F = fopen("test.bin","wb");
fwrite(&a,sizeof(int),1,F);
fwrite(&d,sizeof(int),1,F);
fwrite(&b,sizeof(int),1,F);
fclose(F);
}
void lecture1()
{
int *tab;
int i = 0;
int size;
struct stat sb;
FILE* F;
stat("test.bin", &sb);
F = fopen("test.bin","rb");
size = sb.st_size;
tab = malloc(sizeof(int) * size);
while (i < size) {
fread(&tab[i], sizeof(int), 1, F);
i++;
}
for (i = 0; i < size; i++)
printf("[i] = %d,", tab[i]);
printf("\n");
fclose(F);
}
int main()
{
ecriture1();
lecture1();
return 0;
}
He is the resultat when i dont put the / 4:
[i] = 5,[i] = 42,[i] = 6,[i] = 0,[i] = 0,[i] = 0,[i] = 0,[i] = 0,[i] = 0,[i] = 0,[i] = 0,[i] = 0,
And when i put / 4:
[i] = 5,[i] = 42,[i] = 6,
EDIT:
In my programe when i delete the / 4 and i decompresse the new .tar.gz i have this error message:
gzip: stdin: invalid compressed data--length error
tar: Child returned status 1
tar: Error is not recoverable: exiting now
So i think without error on my part that i should put the / 4 but i cant explain this.
You have a rounding error because you divide the size by 4.
I think the reason why you do size / 4 is because you later do sizeof(int) * size and if you have 4 byte integers then that will cause your size to be 4 times larger than you want. So I think you wrote two bugs there that cancel each other out.
I would suggest you remove the / 4 as well as also removing the sizeof(int) * and thus always just let size be the byte count.
As a bonus, this will remove the rounding error that you might have which is probably the whole reason for your problem. Because 141/4 = 35,25 which will be rounded to 35. Then 35 * sizeof(int) = 35 * 4 = 140.
Also I would not recommend storing arbitrary binary data of arbitrary size in an array of int because an int array should have a size evenly divisible by 4. I would probably go for a char * or just a void * rather than int *.
I have a large file containing floating point numbers and I want to read them.
52.881 49.779 21.641 37.230 23.417 7.506 120.190 1.240 79.167 82.397 126.502 47.377 112.583 124.590 103.339 5.821 24.566 38.916 42.576
This is just the beggining of the file. It has 10000000 numbers.
I got this code but I don't know how to print the numbers.
#include <stdio.h>
#include <stdlib.h>
#include <err.h>
#include <fcntl.h>
#include <sysexits.h>
#include <unistd.h>
int main()
{
int fd;
size_t bytes_read, bytes_expected = 1000000*sizeof(double);
double *data;
char *infile = "file.dat";
if ((fd = open(infile,O_RDONLY)) < 0)
err(EX_NOINPUT, "%s", infile);
if ((data = malloc(bytes_expected)) == NULL)
err(EX_OSERR, "data malloc");
bytes_read = read(fd, data, bytes_expected);
if (bytes_read != bytes_expected)
err(EX_DATAERR, "Read only %d of %d bytes",
bytes_read, bytes_expected);
/* print all */
free(data);
exit(EX_OK);
}
You are attempting to read a text file as if the data was binary, so you will read some bytes but the double values stored in the array will not be the values that you wanted to read from the file, you can probably do this
FILE *file;
double *array;
size_t count;
const char *infile = "file.dat";
file = fopen(infile, "r");
if (file == NULL)
return -1;
count = 0;
while (fscanf(file, "%*lf") == 1)
count += 1;
rewind(file);
array = malloc(count * sizeof(*array));
if (array == NULL) {
fprintf(stderr, "cannot allocate %zu bytes!\n", count * sizeof(*array));
fclose(file);
return -1;
}
// Read the values into the array
for (size_t i = 0; i < count; ++i) {
fscanf(file, "%lf", &array[i]);
}
// Print the array
for (size_t i = 0; i < count; ++i) {
fprintf(stdout, "%f\n", array[i]);
}
// Release memory
free(array);
Since you want a fast solution, maybe you have to sacrifice memory.
The faster manner of reading a file is in binary form.
Thus, I would obtain the file size with an efficient method,
then I would allocate memory accordingly,
with the idea of uploading the entire file to memory.
There, since memory reading is faster than file reading,
the data can be quickly read by using sscanf(...).
We can also observe that each floating point number
needs at least 3 characters to be stored in a text file:
1 char for the dot ('.'),
1 char for some digit,
and 1 char for
a space (' ') used to separating a value from its succesor in the
file.
Thus, the file size divided by 3 will be the upper bound for the size of the array of doubles.
#include <stdio.h>
int main(void) {
char *filename = "file.dat";
FILE *F = fopen(filename, "rb");
fseek(F, 0L, SEEK_END);
long int filesize = ftell(F);
rewind(F);
char *data = malloc(filesize+1);
fread(data, filesize, 1, F);
data[filesize] = '\0'; // End of string, just in case
fclose(F);
// The desired data will be stored in array:
double *array = malloc(sizeof(double) * filesize/3);
int ret;
int n; // represents the no chars in a sscanf(...) reading
double *a = array;
while (1) { // Infinite loop...
ret = sscanf(data, " %lg%n", a, &n);
if (ret == EOF) break; // <<---- EXIT POINT of the loop
a++;
data += n;
}
long int array_size = a - array + 1;
}