I am trying to read through the file given then tokenize it. The only problem im having is fgets.The file open recieves no errors. I have seen this elsewhere on the site however no matter how i set this up including setting fileLine to a set amount like (char fileline [200]) i get a segmentation fault. Thanks in advance for any help.
#include <stdio.h> #include <stdlib.h> #include <string.h> #include <fcntl.h> #include <unistd.h> int main(int argc, char *argv[]){ char *fileName = "0"; char *tokenize, *savePtr; struct Record *database= malloc(sizeof(database[0])); int recordNum =0; char *fileLine = malloc(sizeof(char *));//have replaced with fileline[200] still didnt work FILE *fd = open(fileName,O_RDWR); if(fd< 0){ perror("ERROR OPENING FILE"); } while(fgets(fileLine,200,fd) !=NULL){ printf("%s\n", fileLine); tokenize = strtok_r(fileLine,",",&savePtr); while(tokenize != NULL){ //TOKENIZING into a struct } }
malloc(200 * sizeof(char))sizeof(char) == 1always