In order to write the content of a file in a buffer, I first need to know the size of the string to allocate. To do this, I use the following function:
long file_length(FILE *fp)
{
if (fp == NULL) return -1L;
fseek(fp, 0L, SEEK_END);
const long len = ftell(fp);
rewind(fp);
return len;
}
And I use it as follows to store the contents of my file:
char *file_content(const char *fname)
{
assert(access(fname, F_OK) != -1);
assert(access(fname, R_OK) != -1);
FILE *fp = fopen(fname, "r");
assert(fp != NULL);
const long flen = file_length(fp);
printf("Length of file: %ld\n", flen);
char *buff = malloc(flen + 1);
assert(buff != NULL);
fread(buff, sizeof(char), flen, fp);
buff[flen + 1] = '\0';
fclose(fp);
return buff;
}
And then I test:
int main()
{
char *content = file_content("test.txt");
printf("Length of buffer: %lld\n", strlen(content));
free(content);
return 0;
}
Here's test.txt:
Hello, world!
This is a simple test.
Stackoverflow.
My program then displays this:
Length of file: 57
Length of buffer: 53
As the file has 4 line feeds, I imagine that the result is related to their interpretation according to the different readings that are made (for the position of the file, with fseek, and for its buffering, with fread). But is it? Or maybe it changes depending on the platform or a reading mode?
If that's the case, so I would like to know how to get the same results, so that I can allocate the exact size of the string directly from my file_length function, without having to subtract the number of line feeds the file contains (if it's possible?) in order to be as optimal as possible.
I am trying to read a file in C and make some loading print that will show me how much of the file I have read so far (I am reading the file in chunks). My problem is when it comes to files larger (>100MBs), the loading crashes and I know that it is because there are limits of data types in C, yet I do not know how to fix it, or how to approach it.
Firstly, before I start the reading chunks the file part, I am computing the file size and then, I use a mathematical formula that would compute percentage since I make a sum of how much I have read and how much is total.
FILE *file;
file = fopen("test.txt", "r");
size_t nread;
char * buf = (char*)malloc(sizeof(char) * CHUNK);
long total = 0;
fseek(file, 0L, SEEK_END);
total = ftell(file);
fseek(file, 0L, SEEK_SET);
if (file)
{
while ((nread = fread(buf, 1, CHUNK, file)) > 0)
{
printf(" loading: %d \n", ftell(file)*100/total);
}
}
fclose(file);
The problem is at the sizes in C, since the files are larger then the int/long int limits, it overflows, is there a way to correctly display how much of the file I have parsed? I cannot store the result of "ftell(file)*100/total" in an int or long int.
The problem is with the data types. The answer was to use instead of int data type, the unsigned long long int datatype in order to compute the percentage.
FILE *file;
file = fopen("test.txt", "r");
size_t nread;
char * buf = (char*)malloc(sizeof(char) * CHUNK);
unsigned long long total = 0, keeptrack_sum, percentage;
fseek(file, 0L, SEEK_END);
total = ftell(file);
fseek(file, 0L, SEEK_SET);
if (file)
{
while ((nread = fread(buf, 1, CHUNK, file)) > 0)
{
keeptrack_sum += nread;
percentage = keeptrack_sum*100/total
printf(" loading: %d \n", percentage);
}
}
fclose(file);
I am working with an arm-cortex, programming in C/C++. I am generating a CSV file with the board, which only contains numbers (amplitudes of microphone 16bit).
I want to compress this file using a portable, small and fast library so iI chose QuickLZ. (I don't need the best compression).
Here is the link: http://www.quicklz.com/download.html
I understand that the algorithm is supposed to check the characters in the file and compress so the src/dst are char*. I have tried to do some code but I obtained a 0 byte zip file.
#include <stdio.h>
#include <stdlib.h>
#include "quicklz.h"
int main()
{
FILE *ifile = fopen("new_text_file.csv", "w");
FILE *ofile = fopen("new_zip_file.zip", "w");
char *src, *dst;
if (ifile != NULL) {
printf("OPEN SUCESS!\r\n");
}
else {
printf ("OPEN ERROR");
}
fprintf (ifile,"23483209489"); //Just for example
ifile = fopen("new.csv", "r");
qlz_state_compress *state_compress = (qlz_state_compress *)malloc(sizeof(qlz_state_compress));
size_t len, len2;
// allocate source buffer and read file
fseek(ifile, 0, SEEK_END);
len = ftell(ifile);
fseek(ifile, 0, SEEK_SET);
src = (char*) malloc(len);
fread(src, 1, len, ifile);
// allocate "uncompressed size" + 400 for the destination buffer
dst = (char*) malloc(len + 400);
// compress and write result
len2 = qlz_compress(src, dst, len, state_compress);
fwrite(dst, len2, 1, ofile);
fclose(ifile);
fclose(ofile);
}
Thanks all.
I'm trying a very simple thing: read a minimal text file and compress it with the compress() utility from zlib. I think I've done everything fine, I allocate filesize * 10 for the output, it should be more that enough, but I keep getting -5 (Z_BUF_ERROR) as result of the operation.
Any help?
#include <stdio.h>
#include <stdlib.h>
#include "zlib.h"
#define FILE_TO_OPEN "text.txt"
static char* readcontent(const char *filename, int* size)
{
char* fcontent = NULL;
int fsize = 0;
FILE* fp = fopen(filename, "r");
if(fp) {
fseek(fp, 0, SEEK_END);
fsize = ftell(fp);
rewind(fp);
fcontent = (char*) malloc(sizeof(char) * fsize);
fread(fcontent, 1, fsize, fp);
fclose(fp);
}
*size = fsize;
return fcontent;
}
int main(int argc, char const *argv[])
{
int input_size;
char* content_of_file = readcontent(FILE_TO_OPEN, &input_size);
printf("%d\n", input_size);
uLongf compressed_data_size;
char* compressed_data = malloc(sizeof(char) * (input_size * 10));
int result = compress((Bytef*) compressed_data, (uLongf*)&compressed_data_size, (const Bytef*)content_of_file, (uLongf)input_size);
printf("%d\n", result);
return 0;
}
Use fopen(filename, "rb"). If you are on Windows that b is important to avoid corruption of binary data.
Use compressBound() in zlib instead of input_size * 10 and set compressed_data_size before calling compress(). (You do not need to and should not write your own compressBound().)
Try
uLongf compressed_data_size = compressBound(input_size);
compressBound should be available in zlib.
Also you are better of probably using rb in fopen like I mentioned in my comment before.
What is the simplest way (least error-prone, least lines of code, however you want to interpret it) to open a file in C and read its contents into a string (char*, char[], whatever)?
I tend to just load the entire buffer as a raw memory chunk into memory and do the parsing on my own. That way I have best control over what the standard lib does on multiple platforms.
This is a stub I use for this. you may also want to check the error-codes for fseek, ftell and fread. (omitted for clarity).
char * buffer = 0;
long length;
FILE * f = fopen (filename, "rb");
if (f)
{
fseek (f, 0, SEEK_END);
length = ftell (f);
fseek (f, 0, SEEK_SET);
buffer = malloc (length);
if (buffer)
{
fread (buffer, 1, length, f);
}
fclose (f);
}
if (buffer)
{
// start to process your data / extract strings here...
}
Another, unfortunately highly OS-dependent, solution is memory mapping the file. The benefits generally include performance of the read, and reduced memory use as the applications view and operating systems file cache can actually share the physical memory.
POSIX code would look like this:
int fd = open("filename", O_RDONLY);
int len = lseek(fd, 0, SEEK_END);
void *data = mmap(0, len, PROT_READ, MAP_PRIVATE, fd, 0);
Windows on the other hand is little more tricky, and unfortunately I don't have a compiler in front of me to test, but the functionality is provided by CreateFileMapping() and MapViewOfFile().
If "read its contents into a string" means that the file does not contain characters with code 0, you can also use getdelim() function, that either accepts a block of memory and reallocates it if necessary, or just allocates the entire buffer for you, and reads the file into it until it encounters a specified delimiter or end of file. Just pass '\0' as the delimiter to read the entire file.
This function is available in the GNU C Library, http://www.gnu.org/software/libc/manual/html_mono/libc.html#index-getdelim-994
The sample code might look as simple as
char* buffer = NULL;
size_t len;
ssize_t bytes_read = getdelim( &buffer, &len, '\0', fp);
if ( bytes_read != -1) {
/* Success, now the entire file is in the buffer */
If you are reading special files like stdin or a pipe, you are not going to be able to use fstat to get the file size beforehand. Also, if you are reading a binary file fgets is going to lose the string size information because of embedded '\0' characters. Best way to read a file then is to use read and realloc:
#include <stdio.h>
#include <unistd.h>
#include <errno.h>
#include <string.h>
int main () {
char buf[4096];
ssize_t n;
char *str = NULL;
size_t len = 0;
while (n = read(STDIN_FILENO, buf, sizeof buf)) {
if (n < 0) {
if (errno == EAGAIN)
continue;
perror("read");
break;
}
str = realloc(str, len + n + 1);
memcpy(str + len, buf, n);
len += n;
str[len] = '\0';
}
printf("%.*s\n", len, str);
return 0;
}
Note: This is a modification of the accepted answer above.
Here's a way to do it, complete with error checking.
I've added a size checker to quit when file was bigger than 1 GiB. I did this because the program puts the whole file into a string which may use too much ram and crash a computer. However, if you don't care about that you could just remove it from the code.
#include <stdio.h>
#include <stdlib.h>
#define FILE_OK 0
#define FILE_NOT_EXIST 1
#define FILE_TOO_LARGE 2
#define FILE_READ_ERROR 3
char * c_read_file(const char * f_name, int * err, size_t * f_size) {
char * buffer;
size_t length;
FILE * f = fopen(f_name, "rb");
size_t read_length;
if (f) {
fseek(f, 0, SEEK_END);
length = ftell(f);
fseek(f, 0, SEEK_SET);
// 1 GiB; best not to load a whole large file in one string
if (length > 1073741824) {
*err = FILE_TOO_LARGE;
return NULL;
}
buffer = (char *)malloc(length + 1);
if (length) {
read_length = fread(buffer, 1, length, f);
if (length != read_length) {
free(buffer);
*err = FILE_READ_ERROR;
return NULL;
}
}
fclose(f);
*err = FILE_OK;
buffer[length] = '\0';
*f_size = length;
}
else {
*err = FILE_NOT_EXIST;
return NULL;
}
return buffer;
}
And to check for errors:
int err;
size_t f_size;
char * f_data;
f_data = c_read_file("test.txt", &err, &f_size);
if (err) {
// process error
}
else {
// process data
free(f_data);
}
What is the simplest way (least error-prone, least lines of code, however you want to interpret it) to open a file in C and read its contents into a string ...?
Sadly, even after years, answers are error prone and many lack proper string formation and error checking.
#include <stdio.h>
#include <stdlib.h>
// Read the file into allocated memory.
// Return NULL on error.
char* readfile(FILE *f) {
// f invalid? fseek() fail?
if (f == NULL || fseek(f, 0, SEEK_END)) {
return NULL;
}
long length = ftell(f);
rewind(f);
// Did ftell() fail? Is the length too long?
if (length == -1 || (unsigned long) length >= SIZE_MAX) {
return NULL;
}
// Convert from long to size_t
size_t ulength = (size_t) length;
char *buffer = malloc(ulength + 1);
// Allocation failed? Read incomplete?
if (buffer == NULL || fread(buffer, 1, ulength, f) != ulength) {
free(buffer);
return NULL;
}
buffer[ulength] = '\0'; // Now buffer points to a string
return buffer;
}
Note that if the text file contains null characters, the allocated data will contain all the file data, yet the string will appear to be short. Better code would also return the length information so the caller can handle that.
char* readfile(FILE *f, size_t *ulength_ptr) {
...
if (ulength_ptr) *ulength_ptr == *ulength;
...
}
If the file is text, and you want to get the text line by line, the easiest way is to use fgets().
char buffer[100];
FILE *fp = fopen("filename", "r"); // do not use "rb"
while (fgets(buffer, sizeof(buffer), fp)) {
... do something
}
fclose(fp);
If you're using glib, then you can use g_file_get_contents;
gchar *contents;
GError *err = NULL;
g_file_get_contents ("foo.txt", &contents, NULL, &err);
g_assert ((contents == NULL && err != NULL) || (contents != NULL && err == NULL));
if (err != NULL)
{
// Report error to user, and free error
g_assert (contents == NULL);
fprintf (stderr, "Unable to read file: %s\n", err->message);
g_error_free (err);
}
else
{
// Use file contents
g_assert (contents != NULL);
}
}
Just modified from the accepted answer above.
#include <stdio.h>
#include <stdlib.h>
#include <assert.h>
char *readFile(char *filename) {
FILE *f = fopen(filename, "rt");
assert(f);
fseek(f, 0, SEEK_END);
long length = ftell(f);
fseek(f, 0, SEEK_SET);
char *buffer = (char *) malloc(length + 1);
buffer[length] = '\0';
fread(buffer, 1, length, f);
fclose(f);
return buffer;
}
int main() {
char *content = readFile("../hello.txt");
printf("%s", content);
}
// Assumes the file exists and will seg. fault otherwise.
const GLchar *load_shader_source(char *filename) {
FILE *file = fopen(filename, "r"); // open
fseek(file, 0L, SEEK_END); // find the end
size_t size = ftell(file); // get the size in bytes
GLchar *shaderSource = calloc(1, size); // allocate enough bytes
rewind(file); // go back to file beginning
fread(shaderSource, size, sizeof(char), file); // read each char into ourblock
fclose(file); // close the stream
return shaderSource;
}
This is a pretty crude solution because nothing is checked against null.
I will add my own version, based on the answers here, just for reference. My code takes into consideration sizeof(char) and adds a few comments to it.
// Open the file in read mode.
FILE *file = fopen(file_name, "r");
// Check if there was an error.
if (file == NULL) {
fprintf(stderr, "Error: Can't open file '%s'.", file_name);
exit(EXIT_FAILURE);
}
// Get the file length
fseek(file, 0, SEEK_END);
long length = ftell(file);
fseek(file, 0, SEEK_SET);
// Create the string for the file contents.
char *buffer = malloc(sizeof(char) * (length + 1));
buffer[length] = '\0';
// Set the contents of the string.
fread(buffer, sizeof(char), length, file);
// Close the file.
fclose(file);
// Do something with the data.
// ...
// Free the allocated string space.
free(buffer);
easy and neat(assuming contents in the file are less than 10000):
void read_whole_file(char fileName[1000], char buffer[10000])
{
FILE * file = fopen(fileName, "r");
if(file == NULL)
{
puts("File not found");
exit(1);
}
char c;
int idx=0;
while (fscanf(file , "%c" ,&c) == 1)
{
buffer[idx] = c;
idx++;
}
buffer[idx] = 0;
}