I have following data in a file:
Name
Surname
#include <stdio.h>
#define FILENAME "file.txt"
#define MAXSIZE 128
int main(void)
{
setvbuf(stdout, NULL, _IONBF, 0);
FILE *file = fopen(FILENAME, "r");
if (!file) {
perror(FILENAME);
return 1;
}
int ch;
size_t i = 0;
char array[3][MAXSIZE];
for(int a=0; a < 3; a++)
{
while (i < MAXSIZE - 1 && ((ch = getc(file)) != EOF))
{
if (ch == '\n')
break;
array[a][i++] = ch;
}
/* null-terminate the array to create a string */
array[a][i] = '\0';
}
fclose(file);
for(int a=0; a < 3; a++)
{
for(int i=0; i < 10; i++)
{
printf("%c", array[a][i]);
}
}
}
When I run this program it gives me output like this "
How can I modify it, so it will not output garbage?
This is a link to my previous post:
Link
As I noted in a comment:
Your printing loop for (int i=0; i < 10; i++) { printf("%c", array[a][i]); } needs to stop when array[a][i] == '\0' —— add if (array[a][i] == '\0') break; before the printf().
You also need to reset i to 0 before the while loop (but after the for loop). If you declared i inside the first for loop, you'd not have the problems you do.
Note that you have two different variables called i (one is size_t i = 0; before the loops; the other is for (int i = 0; …) while printing) and one hides the other. That can lead to confusion.
Those changes might lead to this code:
#include <stdio.h>
#define FILENAME "file.txt"
#define MAXSIZE 128
int main(void)
{
setvbuf(stdout, NULL, _IONBF, 0);
FILE *file = fopen(FILENAME, "r");
if (!file) {
perror(FILENAME);
return 1;
}
char array[3][MAXSIZE];
for (int a = 0; a < 3; a++)
{
int ch;
size_t i = 0;
while (i < MAXSIZE - 1 && ((ch = getc(file)) != EOF))
{
if (ch == '\n')
break;
array[a][i++] = ch;
}
array[a][i] = '\0';
}
fclose(file);
for (int a = 0; a < 3; a++)
{
for (int i = 0; i < 10; i++)
{
if (array[a][i] == '\0')
break;
printf("%c", array[a][i]);
}
}
}
There's also no obvious reason not to print the data using:
for (int a = 0; a < 3; a++)
puts(array[a]);
or
for (int a = 0; a < 3; a++)
printf("%s\n", array[a]);
Related
I am trying to read a text file formatted as a matrix and store it into a 2-d array in C. But I keep getting segmentation faults. I am not sure if I used fgetc correctly and how to fix it.
Command:
./exec number filename
Text file:
00000
01100
00100
00100
00000
int main(int argc, char *argv[]) {
int n = atoi(argv[1]);
char *mapping = malloc(strlen(argv[2]) + 1);
strcpy(mapping,argv[2]);
FILE *file = fopen(mapping, "r");
int c;
int matrix[5][5] = {0};
for(int i =0; i < 5; i++)
{
for(int j=0; j<5; j++)
{
c = fgetc(file);
if(c == '1')
{
matrix[i][j] = 1;
}
else if(c == '0')
{
matrix[i][j] = 0;
}
else if(c == EOF)
{
return -1;
}
}
}
}
Edit: I used fscanf as below, but it is giving me the output, which is incorrect.
01100100100000000000000000000000
int main(int argc, char *argv[]) {
int n = atoi(argv[1]);
char *mapping = malloc(strlen(argv[2]) + 1);
strcpy(mapping,argv[2]);
FILE *file = fopen(mapping, "r");
char c;
int matrix[5][5];
for(int i =0; i < 5; i++) {
for (int j = 0; j < 5; j++) {
fscanf(file, "%d", &c);
}
}
fclose(file);
for(int i =0; i < 5; i++) {
for (int j = 0; j < 5; j++) {
printf("%d", matrix[i][j]);
}
}
return 0;
Sort command of linux must sort the lines of a text file and transfer the output to another file. But my code gives a runtime error. Please rectify the pointer mistakes so that output.
In which line exactly should I make changes? Because there is no output after all.
I'm pasting the whole code:
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
void sortfile(char **arr, int linecount) {
int i, j;
char t[500];
for (i = 1; i < linecount; i++) {
for (j = 1; j < linecount; j++) {
if (strcmp(arr[j - 1], arr[j]) > 0) {
strcpy(t, arr[j - 1]);
strcpy(arr[j - 1], arr[j]);
strcpy(arr[j], t);
}
}
}
}
int main() {
FILE *fileIN, *fileOUT;
fileIN = fopen("test1.txt", "r");
unsigned long int linecount = 0;
int c;
if (fileIN == NULL) {
fclose(fileIN);
return 0;
}
while ((c = fgetc(fileIN)) != EOF) {
if (c == '\n')
linecount++;
}
printf("line count=%d", linecount);
char *arr[linecount];
char singleline[500];
int i = 0;
while (fgets(singleline, 500, fileIN) != NULL) {
arr[i] = (char*)malloc(500);
strcpy(arr[i], singleline);
i++;
}
sortfile(arr, linecount);
for (i = 0; i < linecount; i++) {
printf("%s\n", arr[i]);
}
fileOUT = fopen("out.txt", "w");
if (!fileOUT) {
exit(-1);
}
for (i = 0; i < linecount; i++) {
fprintf(fileOUT, "%s", arr[i]);
}
fclose(fileIN);
fclose(fileOUT);
}
The problem in your code is you do not rewind the input stream after reading it the first time to count the number of newlines. You should add rewind(fileIN); before the next loop.
Note however that there are other problems in this code:
the number of newline characters may be less than the number of successful calls to fgets(): lines longer than 499 bytes will be silently broken in multiple chunks, causing more items to be read by fgets() than newlines. Also the last line might not end with a newline. Just count the number of successful calls to fgets().
You allocate 500 bytes for each line, which is potentially very wasteful. Use strdup() to allocate only the necessary size.
Swapping the lines in the sort routine should be done by swapping the pointers, not copying the contents.
allocating arr with malloc is safer and more portable than defining it as a variable sized array with char *arr[linecount];
Here is a modified version:
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
void sortfile(char **arr, int linecount) {
for (;;) {
int swapped = 0;
for (int j = 1; j < linecount; j++) {
if (strcmp(arr[j - 1], arr[j]) > 0) {
char *t = arr[j - 1];
arr[j - 1] = arr[j];
arr[j] = t;
swapped = 1;
}
}
if (swapped == 0)
break;
}
}
int main() {
FILE *fileIN, *fileOUT;
char singleline[500];
int i, linecount;
fileIN = fopen("test1.txt", "r");
if (fileIN == NULL) {
fprintf(stderr, "cannot open %s\n", "test1.txt");
return 1;
}
linecount = 0;
while (fgets(singleline, 500, fileIN)) {
linecount++;
}
printf("line count=%d\n", linecount);
char **arr = malloc(sizeof(*arr) * linecount);
if (arr == NULL) {
fprintf(stderr, "memory allocation failure\n");
return 1;
}
rewind(fileIN);
for (i = 0; i < linecount && fgets(singleline, 500, fileIN) != NULL; i++) {
arr[i] = strdup(singleline);
if (arr[i] == NULL) {
fprintf(stderr, "memory allocation failure\n");
return 1;
}
}
fclose(fileIN);
if (i != linecount) {
fprintf(stderr, "line count mismatch: i=%d, lilnecount=%d\n",
i, linecount);
linecount = i;
}
sortfile(arr, linecount);
for (i = 0; i < linecount; i++) {
printf("%s", arr[i]);
}
fileOUT = fopen("out.txt", "w");
if (!fileOUT) {
fprintf(stderr, "cannot open %s\n", "out.txt");
return 1;
}
for (i = 0; i < linecount; i++) {
fprintf(fileOUT, "%s", arr[i]);
}
fclose(fileOUT);
for (i = 0; i < linecount; i++) {
free(arr[i]);
}
free(arr);
return 0;
}
To get a different sort order, you would change the comparison function. Instead of strcmp() you could use this:
#include <ctype.h>
int my_strcmp(const char *s1, const char *s2) {
/* compare strings lexicographically but swap lower and uppercase letters */
unsigned char c, d;
while ((c = *s1++) == (d = *s2++)) {
if (c == '\0')
return 0; /* string are equal */
}
/* transpose case of c */
if (islower(c)) {
c = toupper(c);
} else {
c = tolower(c);
}
/* transpose case of d */
if (islower(d)) {
d = toupper(d);
} else {
d = tolower(d);
}
/* on ASCII systems, we should still have c != d */
/* return comparison result */
if (c <= d)
return -1;
} else {
return 1;
}
}
Currently I am making a project that uses char arrays that have null elements. I want to be able to get the length of the array, in the sense of the number of elements that aren't null. This seemed reasonably trivial and I made this function:
int getWordLen(char word[]) {
int count = 0;
for (int i = 0; i < 512; i++) {
if (word[i] != '\0') {
count++;
}
}
printf("%d ", count);
return count;
}
However, every char array returns a length of 188. Any help would be appreciated.
This is the function I was calling it from:
void redact(Words * redactWords, char fileName[]) {
FILE * file = fopen(fileName, "r");
FILE * outputFile = fopen("outputFile.txt", "w+");
char word[512];
int i = 0;
char c;
while (c != EOF) {
c = getc(file);
if ((c > 96) && (c < 123)) {
word[i] = c;
i++;
continue;
}
else if ((c > 64) && (c < 91)) {
word[i] = c + 32;
i++;
continue;
}
i = 0;
if (isWordRedactWord(redactWords, word)) {
//write stars to file
char starStr[512];
for (int i = 0; i < getWordLen(word); i++) {
starStr[i] = '*';
}
fputs(starStr, outputFile);
}
else {
//write word to file
fputs(word, outputFile);
}
strcpy(word, emptyWord(word));
}
fclose(file);
fclose(outputFile);
}
In the initial while, I would only use while(!EOF).
Also, I believe you are using a lot more resources than necessary with the implementation of that for inside the while:
char starStr[512];
for (int i = 0; i < getWordLen(word); i++) {
starStr[i] = '*';
I suggest you to put it outside the while loop and see what happens.
If it is always giving you 188 of lenght, it is counting something that's constant, and may be related to that outer loop.
Hope you can solve it!
I have created a program that reads a series of strings from a .txt file and after compiling a new .txt file is created where the strings should be in alphabetical order.The problem is that I can't write more than 10 words, the compiler just stops/crashes, WHY? Does it depend by the type of compiler? I am currently using Code-Bloks.How can I optimize the code to run more smoothly?
#include<stdio.h>
#include<string.h>
#include<stdlib.h>
void arrange(int n, char *x[])
{
char *temp;
int i,str;
for(str = 0; str < n-1; ++str)
{
for(i = str+1; i < n; ++i)
{
if(strcmp(x[str],x[i]) > 0)
{
temp = x[str];
x[str] = x[i];
x[i] = temp;
}
}
}
return;
}
int number_of_lines = 0;
void countOfLinesFromFile(char *filename){
FILE* myfile = fopen(filename, "r");
int ch;
do
{
ch = fgetc(myfile);
if(ch == '\n')
number_of_lines++;
}
while (ch != EOF);
if(ch != '\n' && number_of_lines != 0)
number_of_lines++;
fclose(myfile);
return number_of_lines;
}
int main()
{
int i , ts=0;
char *x[10];
char *fileName = "WORDS.txt";
countOfLinesFromFile(fileName);
printf("%d",number_of_lines);
FILE * fp;
fp = fopen ("WORDS.txt", "r");
for(i = 0; i < number_of_lines; i++)
{
x[i] = (char*) malloc (1200*sizeof(char));
fscanf(fp, "%s", x[i]);
}
FILE *fPointer;
fPointer=fopen("Alphabetical.txt","w+");
arrange(i,x);
for(i = 0; i < number_of_lines; i++)
{
fprintf(fPointer,"%s\n",x[i]);
}
fclose(fPointer);
fclose(fp);
return 0;
}
char *x[10];
The buffer size is too small
These two lines define how much information you can store
char *x[10]; // 10 strings
x[i] = (char*) malloc (1200*sizeof(char)); // 1200 characters each
As it is written now, you can only hold a maximum of 10 strings with each string being no longer than 1200 characters.
The crash is caused when number_of_lines >= 11 in the following for loop:
for(i = 0; i < number_of_lines; i++)
{
x[i] = (char*) malloc (1200*sizeof(char));
fscanf(fp, "%s", x[i]);
}
When i is 11 you write to x[11] which is past the end of x.
I wrote this code which reads every char of my text and puts it into my char array. My Problem is that the end of the file is not detected and so the fscanf() returns after the end of the text every time the last char until my array is filled. How can I prevent that? I am programming in C.
My Code:
int main() {
char array[50][50];
char buff;
FILE *cola = fopen("C:/Users/danie/Desktop/cola.txt", "r");
for (int i = 0; i < 50; i++) {
for (int k = 0; k < 50; k++) {
fscanf(cola, "%c", &buff);
array[i][k] = buff;
}
}
fclose(cola);
for (int i = 0; i < 50; i++) {
for (int k = 0; k < 50; k++) {
printf("%c", array[i][k]);
}
}
return 0;
}
Thank you for your help.
fscanf() returns the number of successful conversions. You should test the return value and also handle newline characters specifically:
#include <stdio.h>
int main(void) {
char array[50][50];
char buff;
FILE *cola = fopen("C:/Users/danie/Desktop/cola.txt", "r");
if (cola == NULL) {
return 1;
}
for (int i = 0; i < 50; i++) {
for (int k = 0; k < 50; k++) {
if (fscanf(cola, "%c", &buff) != 1 || buff == '\n') {
array[i][k] = '\0';
break;
}
array[i][k] = buff;
}
}
fclose(cola);
for (int i = 0; i < 50; i++) {
for (int k = 0; k < 50 && array[i][k] != '\0'; k++) {
printf("%c", array[i][k]);
}
printf("\n");
}
return 0;
}
The code can be simplified if you use getc() instead of fscanf() to read bytes from the file:
#include <stdio.h>
int main(void) {
char array[50][51];
int c, i, k, n;
FILE *cola = fopen("C:/Users/danie/Desktop/cola.txt", "r");
if (cola == NULL) {
return 1;
}
for (n = 0; n < 50; n++) {
for (k = 0; k < 50; k++) {
if ((c = getc(cola)) == EOF || c == '\n') {
break;
}
array[n][k] = c;
}
array[n][k] = '\0';
if (c == EOF && k == 0)
break;
}
fclose(cola);
for (i = 0; i < n; i++) {
puts(array[i]);
}
return 0;
}
Replace:
for (int i = 0; i < 50; i++) {
for (int k = 0; k < 50; k++) {
fscanf(cola, "%c", &buff);
array[i][k] = buff;
}
}
with:
for (int i = 0; i < 50; i++) {
for (int k = 0; k < 50; k++) {
int c = getc(cola);
if (c == EOF)
break;
array[i][k] = c;
}
}
Since buff is then unused, don't define it. Note that the return type of getc() is an int, not just a char. Always check the I/O function for success/failure. In your original code, you don't even check whether the I/O operation succeeds, which makes detecting EOF impossible.
Note that this code makes a number of assumptions that may or may not be justifiable. For example, you assume each line in the file consists of 49 characters plus a newline; you also assume you'll never need to print the information as a 'string' (your existing code does not; it prints character by character, so it is 'safe').
You might want to describe the input as:
Read up to 50 lines with up to 49 characters plus a newline in each line, storing the result in the variable array with each line being a null-terminated string.
This is more resilient to common problems (short lines, long lines, not enough lines). The code for that might be:
enum { LINE_LEN = 50, NUM_LINES = 50 };
char array[NUM_LINES][LINE_LEN];
int i;
for (i = 0; i < LINE_LEN; i++)
{
int c;
int k;
for (k = 0; k < LINE_LEN; k++)
{
c = getc(cola);
if (c == EOF || c == '\n')
break;
if (k == LINE_LEN - 1)
{
/* Too long - gobble excess */
while ((c = getc(cola)) != EOF && c != '\n')
;
break;
}
array[i][k] = c;
}
array[i][k] = '\0';
if (c == EOF)
break;
}
int num_lines = i; // You have num_lines lines of data in your array
I found one version of the Coca Cola™ ASCII art image at https://www.ascii-code.com/ascii-art/logos/coca-cola.php which looks similar to what you have in your images, but there are many other sources and variants:
__ ___ __ .ama ,
,d888a ,d88888888888ba. ,88"I) d
a88']8i a88".8"8) `"8888:88 " _a8'
.d8P' PP .d8P'.8 d) "8:88:baad8P'
,d8P' ,ama, .aa, .ama.g ,mmm d8P' 8 .8' 88):888P'
,d88' d8[ "8..a8"88 ,8I"88[ I88' d88 ]IaI" d8[
a88' dP "bm8mP8'(8'.8I 8[ d88' `" .88
,88I ]8' .d'.8 88' ,8' I[ ,88P ,ama ,ama, d8[ .ama.g
[88' I8, .d' ]8, ,88B ,d8 aI (88',88"8) d8[ "8. 88 ,8I"88[
]88 `888P' `8888" "88P"8m" I88 88[ 8[ dP "bm8m88[.8I 8[
]88, _,,aaaaaa,_ I88 8" 8 ]P' .d' 88 88' ,8' I[
`888a,. ,aadd88888888888bma. )88, ,]I I8, .d' )88a8B ,d8 aI
"888888PP"' `8""""""8 "888PP' `888P' `88P"88P"8m"
This file's longest line is the first at 67 characters plus newline; the shortest is 61 characters plus newline. The file only has 13 lines and 845 characters (LF line endings) in total. Thus, your program is ill-equipped to deal with this particular data file. It looks for 2,500 characters, and won't get them.
My complete test code was rigged to read from standard input, rather than a fixed file name.
#include <stdio.h>
int main(void)
{
FILE *cola = stdin;
enum { LINE_LEN = 80, NUM_LINES = 50 };
char array[NUM_LINES][LINE_LEN];
int i; // Need value of i after loop
for (i = 0; i < NUM_LINES; i++)
{
int c; // Need value of c after loop
int k;
for (k = 0; k < LINE_LEN; k++)
{
c = getc(cola);
if (c == EOF || c == '\n')
break;
if (k == LINE_LEN - 1)
{
/* Too long - gobble excess */
while ((c = getc(cola)) != EOF && c != '\n')
;
break;
}
array[i][k] = c;
}
array[i][k] = '\0';
if (c == EOF)
break;
}
int num_lines = i; // You have num_lines lines of data in your array
for (i = 0; i < num_lines; i++)
puts(array[i]);
return 0;
}
I tested it on the data file shown, with an empty line at the end, and with a couple of lines containing more than 79 characters after the blank line. It handled all those special cases correctly. Note that handling user input is hard; handling perverse user input is harder. The code is less compact. You could change the rules and then change the code to match. I'm not sure this is the most minimal way to code this; it does work, however. It might be better to have a function to handle the inner input loop; the outer loop could test the return value from that function. This would cut down on the special case handling.
#include <assert.h>
#include <limits.h>
#include <stdio.h>
static int read_line(FILE *fp, size_t buflen, char *buffer)
{
assert(buflen < INT_MAX);
int c; // Need value of c after loop
size_t k; // Need value of k after loop
for (k = 0; k < buflen; k++)
{
if ((c = getc(fp)) == EOF || c == '\n')
break;
if (k == buflen - 1)
{
/* Too long - gobble excess */
while ((c = getc(fp)) != EOF && c != '\n')
;
break;
}
buffer[k] = c;
}
buffer[k] = '\0';
return (k == 0 && c == EOF) ? EOF : (int)k;
}
int main(void)
{
enum { LINE_LEN = 80, NUM_LINES = 50 };
char array[NUM_LINES][LINE_LEN];
int i;
for (i = 0; i < NUM_LINES; i++)
{
if (read_line(stdin, LINE_LEN, array[i]) == EOF)
break;
}
int num_lines = i;
for (i = 0; i < num_lines; i++)
puts(array[i]);
return 0;
}
This produces the same output from the same input as the previous version.
int main() {
//char array[50][50];
char buff;
int t;
FILE *cola = fopen("C:/Users/danie/Desktop/cola.txt", "r");
if (cola == NULL)
{
printf("Cannot open file \n");
exit(0);
}
while (1) {
t = fgetc(cola);
if (t == EOF)
break;
buff = t;
printf("%c", buff);
}
fclose(cola);
return 0;
}